VideoReceiver.cc 26.6 KB
Newer Older
1 2 3 4 5 6 7 8
/****************************************************************************
 *
 *   (c) 2009-2016 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13 14 15 16 17


/**
 * @file
 *   @brief QGC Video Receiver
 *   @author Gus Grubba <mavlink@grubba.com>
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21

Gus Grubba's avatar
Gus Grubba committed
22
#include <QDebug>
23
#include <QUrl>
24 25
#include <QDir>
#include <QDateTime>
26
#include <QSysInfo>
27

28 29
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

30 31
#if defined(QGC_GST_STREAMING)

32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

48 49 50
#endif


Gus Grubba's avatar
Gus Grubba committed
51 52
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
53
#if defined(QGC_GST_STREAMING)
54
    , _running(false)
55
    , _recording(false)
56
    , _streaming(false)
57 58
    , _starting(false)
    , _stopping(false)
59 60 61
    , _sink(NULL)
    , _tee(NULL)
    , _pipeline(NULL)
62
    , _pipelineStopRec(NULL)
Gus Grubba's avatar
Gus Grubba committed
63
    , _videoSink(NULL)
64 65
    , _socket(NULL)
    , _serverPresent(false)
66
#endif
67 68 69
    , _videoSurface(NULL)
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
70
{
71
    _videoSurface  = new VideoSurface;
72
#if defined(QGC_GST_STREAMING)
73
    _setVideoSink(_videoSurface->videoSink());
74 75
    _timer.setSingleShot(true);
    connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
76 77 78
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
79 80
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
81
#endif
Gus Grubba's avatar
Gus Grubba committed
82 83 84 85
}

VideoReceiver::~VideoReceiver()
{
86
#if defined(QGC_GST_STREAMING)
87 88 89 90
    stop();
    if(_socket) {
        delete _socket;
    }
91 92 93
    if (_videoSink) {
        gst_object_unref(_videoSink);
    }
94
#endif
95 96
    if(_videoSurface)
        delete _videoSurface;
Gus Grubba's avatar
Gus Grubba committed
97 98
}

99
#if defined(QGC_GST_STREAMING)
100 101
void
VideoReceiver::_setVideoSink(GstElement* sink)
Gus Grubba's avatar
Gus Grubba committed
102 103 104 105 106 107 108 109 110 111
{
    if (_videoSink) {
        gst_object_unref(_videoSink);
        _videoSink = NULL;
    }
    if (sink) {
        _videoSink = sink;
        gst_object_ref_sink(_videoSink);
    }
}
112
#endif
Gus Grubba's avatar
Gus Grubba committed
113

114 115 116 117 118 119 120 121 122
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
123
#if defined(QGC_GST_STREAMING)
124 125
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
126
{
127
    gchar* name;
128 129
    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created\n", name);
130 131
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
132
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
133
    g_free(description);
134 135
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
136 137 138
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}
139
#endif
140

141
//-----------------------------------------------------------------------------
142
#if defined(QGC_GST_STREAMING)
143 144
void
VideoReceiver::_connected()
145 146 147
{
    //-- Server showed up. Now we start the stream.
    _timer.stop();
148
    _socket->deleteLater();
149 150 151 152 153 154
    _socket = NULL;
    _serverPresent = true;
    start();
}
#endif

155
//-----------------------------------------------------------------------------
156
#if defined(QGC_GST_STREAMING)
157 158
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
159 160
{
    Q_UNUSED(socketError);
161
    _socket->deleteLater();
162 163 164 165 166 167
    _socket = NULL;
    //-- Try again in 5 seconds
    _timer.start(5000);
}
#endif

168
//-----------------------------------------------------------------------------
169
#if defined(QGC_GST_STREAMING)
170 171
void
VideoReceiver::_timeout()
172 173 174 175 176 177 178 179 180 181 182 183
{
    //-- If socket is live, we got no connection nor a socket error
    if(_socket) {
        delete _socket;
        _socket = NULL;
    }
    //-- RTSP will try to connect to the server. If it cannot connect,
    //   it will simply give up and never try again. Instead, we keep
    //   attempting a connection on this timer. Once a connection is
    //   found to be working, only then we actually start the stream.
    QUrl url(_uri);
    _socket = new QTcpSocket;
184
    _socket->setProxy(QNetworkProxy::NoProxy);
185 186
    connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
    connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
187
    //qCDebug(VideoReceiverLog) << "Trying to connect to:" << url.host() << url.port();
188 189 190 191 192
    _socket->connectToHost(url.host(), url.port());
    _timer.start(5000);
}
#endif

193
//-----------------------------------------------------------------------------
194 195 196 197 198 199 200 201 202
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
203 204
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
205
{
206
#if defined(QGC_GST_STREAMING)
207 208
    qCDebug(VideoReceiverLog) << "start()";

Gus Grubba's avatar
Gus Grubba committed
209 210 211 212 213 214 215 216
    if (_uri.isEmpty()) {
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
    if (_videoSink == NULL) {
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
217 218 219 220
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
221

222
    _starting = true;
223

224 225
    bool isUdp  = _uri.contains("udp://");
    bool isRtsp = _uri.contains("rtsp://");
226
    bool isTCP  = _uri.contains("tcp://");
Gus Grubba's avatar
Gus Grubba committed
227

228 229
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && (isRtsp || isTCP)) {
230 231 232 233
        _timer.start(100);
        return;
    }

Gus Grubba's avatar
Gus Grubba committed
234
    bool running = false;
235
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
236 237 238 239 240

    GstElement*     dataSource  = NULL;
    GstCaps*        caps        = NULL;
    GstElement*     demux       = NULL;
    GstElement*     parser      = NULL;
241
    GstElement*     queue       = NULL;
Gus Grubba's avatar
Gus Grubba committed
242
    GstElement*     decoder     = NULL;
243
    GstElement*     queue1      = NULL;
244

Gus Grubba's avatar
Gus Grubba committed
245 246
    do {
        if ((_pipeline = gst_pipeline_new("receiver")) == NULL) {
247
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
248 249 250
            break;
        }

251 252
        if(isUdp) {
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
253 254
        } else if(isTCP) {
            dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
255 256
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
257 258
        }

259 260
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
261 262 263
            break;
        }

264 265 266 267 268 269
        if(isUdp) {
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(G_OBJECT(dataSource), "uri", qPrintable(_uri), "caps", caps, NULL);
270 271 272
        } else if(isTCP) {
            QUrl url(_uri);
            g_object_set(G_OBJECT(dataSource), "host", qPrintable(url.host()), "port", url.port(), NULL );
273
        } else {
274
            g_object_set(G_OBJECT(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", static_cast<guint64>(5000000), NULL);
275
        }
Gus Grubba's avatar
Gus Grubba committed
276

277 278 279 280 281 282 283 284 285 286 287
        // Currently, we expect H264 when using anything except for TCP.  Long term we may want this to be settable
        if (isTCP) {
            if ((demux = gst_element_factory_make("tsdemux", "mpeg2-ts-demuxer")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tsdemux')";
                break;
            }
        } else {
            if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('rtph264depay')";
                break;
            }
288 289
        }

Gus Grubba's avatar
Gus Grubba committed
290
        if ((parser = gst_element_factory_make("h264parse", "h264-parser")) == NULL) {
291
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('h264parse')";
Gus Grubba's avatar
Gus Grubba committed
292 293 294
            break;
        }

295
        if((_tee = gst_element_factory_make("tee", NULL)) == NULL)  {
296 297 298
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
299

300
        if((queue = gst_element_factory_make("queue", NULL)) == NULL)  {
301 302
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
303
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
304 305
            break;
        }
306

307 308 309 310 311 312 313 314 315 316 317
        if ((decoder = gst_element_factory_make("avdec_h264", "h264-decoder")) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('avdec_h264')";
            break;
        }

        if ((queue1 = gst_element_factory_make("queue", NULL)) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue') [1]";
            break;
        }

        gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL);
318
        pipelineUp = true;
319

320 321
        if(isUdp) {
            // Link the pipeline in front of the tee
322
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
323 324 325 326 327 328 329 330 331 332
                qCritical() << "Unable to link UDP elements.";
                break;
            }
        } else if (isTCP) {
            if(!gst_element_link(dataSource, demux)) {
                qCritical() << "Unable to link TCP dataSource to Demux.";
                break;
            }
            if(!gst_element_link_many(parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
                qCritical() << "Unable to link TCP pipline to parser.";
333 334
                break;
            }
335
            g_signal_connect(demux, "pad-added", G_CALLBACK(newPadCB), parser);
336
        } else {
337
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
338
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, NULL)) {
339
                qCritical() << "Unable to link RTSP elements.";
340 341
                break;
            }
342 343
        }

344
        dataSource = demux = parser = queue = decoder = queue1 = NULL;
Gus Grubba's avatar
Gus Grubba committed
345

346
        GstBus* bus = NULL;
Gus Grubba's avatar
Gus Grubba committed
347

348 349 350 351 352 353
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = NULL;
        }
Gus Grubba's avatar
Gus Grubba committed
354 355 356 357 358 359 360 361 362 363 364 365 366

        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (caps != NULL) {
        gst_caps_unref(caps);
        caps = NULL;
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

367 368 369 370
        // In newer versions, the pipeline will clean up all references that are added to it
        if (_pipeline != NULL) {
            gst_object_unref(_pipeline);
            _pipeline = NULL;
Gus Grubba's avatar
Gus Grubba committed
371 372
        }

373 374 375 376 377 378
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
            if (decoder != NULL) {
                gst_object_unref(decoder);
                decoder = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
379

380 381 382 383
            if (parser != NULL) {
                gst_object_unref(parser);
                parser = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
384

385 386 387 388
            if (demux != NULL) {
                gst_object_unref(demux);
                demux = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
389

390 391 392 393
            if (dataSource != NULL) {
                gst_object_unref(dataSource);
                dataSource = NULL;
            }
394

395 396 397 398
            if (_tee != NULL) {
                gst_object_unref(_tee);
                dataSource = NULL;
            }
399

400 401 402 403
            if (queue != NULL) {
                gst_object_unref(queue);
                dataSource = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
404
        }
405 406 407 408 409

        _running = false;
    } else {
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
410
    }
411
    _starting = false;
412
#endif
Gus Grubba's avatar
Gus Grubba committed
413 414
}

415 416 417
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
418
{
419
#if defined(QGC_GST_STREAMING)
420
    qCDebug(VideoReceiverLog) << "stop()";
421 422 423
    if(!_streaming) {
        _shutdownPipeline();
    } else if (_pipeline != NULL && !_stopping) {
424 425 426 427
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
428
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
429
        gst_object_unref(bus);
430 431 432 433 434 435
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
436
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
437
    }
438
#endif
Gus Grubba's avatar
Gus Grubba committed
439 440
}

441 442 443
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
444 445 446 447
{
    _uri = uri;
}

448
//-----------------------------------------------------------------------------
449
#if defined(QGC_GST_STREAMING)
450 451
void
VideoReceiver::_shutdownPipeline() {
452 453 454 455
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474
    GstBus* bus = NULL;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
        bus = NULL;
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    gst_object_unref(_pipeline);
    _pipeline = NULL;
    delete _sink;
    _sink = NULL;
    _serverPresent = false;
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
475
#endif
476

477
//-----------------------------------------------------------------------------
478
#if defined(QGC_GST_STREAMING)
479 480
void
VideoReceiver::_handleError() {
481 482 483 484 485
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
    _shutdownPipeline();
}
#endif

486
//-----------------------------------------------------------------------------
487
#if defined(QGC_GST_STREAMING)
488 489
void
VideoReceiver::_handleEOS() {
490 491
    if(_stopping) {
        _shutdownPipeline();
492
        qCDebug(VideoReceiverLog) << "Stopped";
493 494 495 496 497
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
        qCritical() << "VideoReceiver: Unexpected EOS!";
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
498 499
    }
}
500
#endif
Gus Grubba's avatar
Gus Grubba committed
501

502
//-----------------------------------------------------------------------------
503
#if defined(QGC_GST_STREAMING)
504 505 506 507 508 509
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
        qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
    }
510 511 512
}
#endif

513
//-----------------------------------------------------------------------------
514
#if defined(QGC_GST_STREAMING)
515 516
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
517 518 519 520
{
    Q_UNUSED(bus)
    Q_ASSERT(msg != NULL && data != NULL);
    VideoReceiver* pThis = (VideoReceiver*)data;
521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
543 544
    return TRUE;
}
545
#endif
546

547
//-----------------------------------------------------------------------------
548
#if defined(QGC_GST_STREAMING)
549 550
void
VideoReceiver::_cleanupOldVideos()
551
{
552
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581
    QDir videoDir = QDir(savePath);
    videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
    videoDir.setSorting(QDir::Time);
    //-- All the movie extensions we support
    QStringList nameFilters;
    for(uint32_t i = 0; i < NUM_MUXES; i++) {
        nameFilters << QString("*.") + QString(kVideoExtensions[i]);
    }
    videoDir.setNameFilters(nameFilters);
    //-- get the list of videos stored
    QFileInfoList vidList = videoDir.entryInfoList();
    if(!vidList.isEmpty()) {
        uint64_t total   = 0;
        //-- Settings are stored using MB
        uint64_t maxSize = (qgcApp()->toolbox()->settingsManager()->videoSettings()->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
        //-- Compute total used storage
        for(int i = 0; i < vidList.size(); i++) {
            total += vidList[i].size();
        }
        //-- Remove old movies until max size is satisfied.
        while(total >= maxSize && !vidList.isEmpty()) {
            total -= vidList.last().size();
            qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
            QFile file (vidList.last().filePath());
            file.remove();
            vidList.removeLast();
        }
    }
}
582
#endif
583

584
//-----------------------------------------------------------------------------
585 586 587 588 589 590 591 592 593 594 595
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
596 597
void
VideoReceiver::startRecording(void)
598
{
599
#if defined(QGC_GST_STREAMING)
600

601 602 603 604 605 606 607
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
    if(_pipeline == NULL || _recording) {
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

608
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
609
    if(savePath.isEmpty()) {
610
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
611 612 613
        return;
    }

614 615 616 617 618 619 620 621 622
    uint32_t muxIdx = qgcApp()->toolbox()->settingsManager()->videoSettings()->recordingFormat()->rawValue().toUInt();
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

623
    _sink           = new Sink();
624 625
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", NULL);
626 627
    _sink->parse    = gst_element_factory_make("h264parse", NULL);
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], NULL);
628 629 630
    _sink->filesink = gst_element_factory_make("filesink", NULL);
    _sink->removing = false;

631
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
632 633 634 635
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

636
    QString videoFile;
637
    videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
638

639 640
    g_object_set(G_OBJECT(_sink->filesink), "location", qPrintable(videoFile), NULL);
    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;
641 642

    gst_object_ref(_sink->queue);
643
    gst_object_ref(_sink->parse);
644 645 646
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

647 648
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
649 650

    gst_element_sync_state_with_parent(_sink->queue);
651
    gst_element_sync_state_with_parent(_sink->parse);
652 653 654 655 656 657 658 659 660 661 662 663 664
    gst_element_sync_state_with_parent(_sink->mux);
    gst_element_sync_state_with_parent(_sink->filesink);

    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
#endif
}

665 666 667
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
668
{
669
#if defined(QGC_GST_STREAMING)
670 671 672 673 674 675 676 677 678 679 680
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
    if(_pipeline == NULL || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, NULL);
#endif
}

681
//-----------------------------------------------------------------------------
682 683 684 685 686 687
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
688 689
void
VideoReceiver::_shutdownRecordingBranch()
690 691
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
692
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
693 694 695 696 697
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
698
    _pipelineStopRec = NULL;
699

700 701 702 703
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
704 705

    gst_object_unref(_sink->queue);
706
    gst_object_unref(_sink->parse);
707 708 709 710 711 712
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
    _sink = NULL;
    _recording = false;
713

714 715 716 717 718
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

719
//-----------------------------------------------------------------------------
720 721 722 723 724
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
725 726
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
727 728 729 730
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
731
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
732 733 734 735 736 737 738 739 740

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
741 742
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
743

744 745 746
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
747
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
748
    gst_object_unref(bus);
749 750 751 752 753 754 755 756 757 758 759 760 761

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

762
//-----------------------------------------------------------------------------
763
#if defined(QGC_GST_STREAMING)
764 765
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
766 767
{
    Q_UNUSED(pad);
768 769 770 771 772 773 774
    if(info != NULL && user_data != NULL) {
        VideoReceiver* pThis = (VideoReceiver*)user_data;
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
775 776 777
    return GST_PAD_PROBE_REMOVE;
}
#endif
778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809

//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
    if(_videoSurface) {
        if(stopping() || starting()) {
            return;
        }
        if(streaming()) {
            if(!_videoRunning) {
                _videoSurface->setLastFrame(0);
                _videoRunning = true;
                emit videoRunningChanged();
            }
        } else {
            if(_videoRunning) {
                _videoRunning = false;
                emit videoRunningChanged();
            }
        }
        if(_videoRunning) {
            time_t elapsed = 0;
            time_t lastFrame = _videoSurface->lastFrame();
            if(lastFrame != 0) {
                elapsed = time(0) - _videoSurface->lastFrame();
            }
            if(elapsed > 2 && _videoSurface) {
                stop();
            }
        } else {
810
            if(!running() && !_uri.isEmpty()) {
811 812 813 814 815 816 817
                start();
            }
        }
    }
#endif
}