VideoReceiver.cc 26.6 KB
Newer Older
1 2 3 4 5 6 7 8
/****************************************************************************
 *
 *   (c) 2009-2016 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13 14 15 16 17


/**
 * @file
 *   @brief QGC Video Receiver
 *   @author Gus Grubba <mavlink@grubba.com>
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21

Gus Grubba's avatar
Gus Grubba committed
22
#include <QDebug>
23
#include <QUrl>
24 25
#include <QDir>
#include <QDateTime>
26
#include <QSysInfo>
27

28 29
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

30 31
#if defined(QGC_GST_STREAMING)

32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

48 49 50
#endif


Gus Grubba's avatar
Gus Grubba committed
51 52
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
53
#if defined(QGC_GST_STREAMING)
54
    , _running(false)
55
    , _recording(false)
56
    , _streaming(false)
57 58
    , _starting(false)
    , _stopping(false)
59 60 61
    , _sink(NULL)
    , _tee(NULL)
    , _pipeline(NULL)
62
    , _pipelineStopRec(NULL)
Gus Grubba's avatar
Gus Grubba committed
63
    , _videoSink(NULL)
64 65
    , _socket(NULL)
    , _serverPresent(false)
66
#endif
67 68 69
    , _videoSurface(NULL)
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
70
{
71
    _videoSurface  = new VideoSurface;
72
#if defined(QGC_GST_STREAMING)
73
    _setVideoSink(_videoSurface->videoSink());
74 75
    _timer.setSingleShot(true);
    connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
76 77 78
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
79 80
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
81
#endif
Gus Grubba's avatar
Gus Grubba committed
82 83 84 85
}

VideoReceiver::~VideoReceiver()
{
86
#if defined(QGC_GST_STREAMING)
87 88 89 90
    stop();
    if(_socket) {
        delete _socket;
    }
91 92 93
    if (_videoSink) {
        gst_object_unref(_videoSink);
    }
94
#endif
95 96
    if(_videoSurface)
        delete _videoSurface;
Gus Grubba's avatar
Gus Grubba committed
97 98
}

99
#if defined(QGC_GST_STREAMING)
100 101
void
VideoReceiver::_setVideoSink(GstElement* sink)
Gus Grubba's avatar
Gus Grubba committed
102 103 104 105 106 107 108 109 110 111
{
    if (_videoSink) {
        gst_object_unref(_videoSink);
        _videoSink = NULL;
    }
    if (sink) {
        _videoSink = sink;
        gst_object_ref_sink(_videoSink);
    }
}
112
#endif
Gus Grubba's avatar
Gus Grubba committed
113

114 115 116 117 118 119 120 121 122
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
123
#if defined(QGC_GST_STREAMING)
124 125
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
126
{
127
    gchar* name;
128 129
    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created\n", name);
130 131
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
132
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
133
    g_free(description);
134 135
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
136 137 138
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}
139
#endif
140

141
//-----------------------------------------------------------------------------
142
#if defined(QGC_GST_STREAMING)
143 144
void
VideoReceiver::_connected()
145 146 147
{
    //-- Server showed up. Now we start the stream.
    _timer.stop();
148
    _socket->deleteLater();
149 150 151 152 153 154
    _socket = NULL;
    _serverPresent = true;
    start();
}
#endif

155
//-----------------------------------------------------------------------------
156
#if defined(QGC_GST_STREAMING)
157 158
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
159 160
{
    Q_UNUSED(socketError);
161
    _socket->deleteLater();
162 163 164 165 166 167
    _socket = NULL;
    //-- Try again in 5 seconds
    _timer.start(5000);
}
#endif

168
//-----------------------------------------------------------------------------
169
#if defined(QGC_GST_STREAMING)
170 171
void
VideoReceiver::_timeout()
172 173 174 175 176 177 178 179 180 181 182 183 184 185
{
    //-- If socket is live, we got no connection nor a socket error
    if(_socket) {
        delete _socket;
        _socket = NULL;
    }
    //-- RTSP will try to connect to the server. If it cannot connect,
    //   it will simply give up and never try again. Instead, we keep
    //   attempting a connection on this timer. Once a connection is
    //   found to be working, only then we actually start the stream.
    QUrl url(_uri);
    _socket = new QTcpSocket;
    connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
    connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
186
    //qCDebug(VideoReceiverLog) << "Trying to connect to:" << url.host() << url.port();
187 188 189 190 191
    _socket->connectToHost(url.host(), url.port());
    _timer.start(5000);
}
#endif

192
//-----------------------------------------------------------------------------
193 194 195 196 197 198 199 200 201
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
202 203
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
204
{
205
#if defined(QGC_GST_STREAMING)
206 207
    qCDebug(VideoReceiverLog) << "start()";

Gus Grubba's avatar
Gus Grubba committed
208 209 210 211 212 213 214 215
    if (_uri.isEmpty()) {
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
    if (_videoSink == NULL) {
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
216 217 218 219
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
220

221
    _starting = true;
222

223 224
    bool isUdp  = _uri.contains("udp://");
    bool isRtsp = _uri.contains("rtsp://");
225
    bool isTCP  = _uri.contains("tcp://");
Gus Grubba's avatar
Gus Grubba committed
226

227 228
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && (isRtsp || isTCP)) {
229 230 231 232
        _timer.start(100);
        return;
    }

Gus Grubba's avatar
Gus Grubba committed
233
    bool running = false;
234
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
235 236 237 238 239

    GstElement*     dataSource  = NULL;
    GstCaps*        caps        = NULL;
    GstElement*     demux       = NULL;
    GstElement*     parser      = NULL;
240
    GstElement*     queue       = NULL;
Gus Grubba's avatar
Gus Grubba committed
241
    GstElement*     decoder     = NULL;
242
    GstElement*     queue1      = NULL;
243

Gus Grubba's avatar
Gus Grubba committed
244 245
    do {
        if ((_pipeline = gst_pipeline_new("receiver")) == NULL) {
246
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
247 248 249
            break;
        }

250 251
        if(isUdp) {
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
252 253
        } else if(isTCP) {
            dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
254 255
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
256 257
        }

258 259
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
260 261 262
            break;
        }

263 264 265 266 267 268
        if(isUdp) {
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(G_OBJECT(dataSource), "uri", qPrintable(_uri), "caps", caps, NULL);
269 270 271
        } else if(isTCP) {
            QUrl url(_uri);
            g_object_set(G_OBJECT(dataSource), "host", qPrintable(url.host()), "port", url.port(), NULL );
272
        } else {
273
            g_object_set(G_OBJECT(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", static_cast<guint64>(5000000), NULL);
274
        }
Gus Grubba's avatar
Gus Grubba committed
275

276 277 278 279 280 281 282 283 284 285 286
        // Currently, we expect H264 when using anything except for TCP.  Long term we may want this to be settable
        if (isTCP) {
            if ((demux = gst_element_factory_make("tsdemux", "mpeg2-ts-demuxer")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tsdemux')";
                break;
            }
        } else {
            if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('rtph264depay')";
                break;
            }
287 288
        }

Gus Grubba's avatar
Gus Grubba committed
289
        if ((parser = gst_element_factory_make("h264parse", "h264-parser")) == NULL) {
290
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('h264parse')";
Gus Grubba's avatar
Gus Grubba committed
291 292 293
            break;
        }

294
        if((_tee = gst_element_factory_make("tee", NULL)) == NULL)  {
295 296 297
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
298

299
        if((queue = gst_element_factory_make("queue", NULL)) == NULL)  {
300 301
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
302
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
303 304
            break;
        }
305

306 307 308 309 310 311 312 313 314 315 316
        if ((decoder = gst_element_factory_make("avdec_h264", "h264-decoder")) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('avdec_h264')";
            break;
        }

        if ((queue1 = gst_element_factory_make("queue", NULL)) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue') [1]";
            break;
        }

        gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL);
317
        pipelineUp = true;
318

319 320
        if(isUdp) {
            // Link the pipeline in front of the tee
321
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
322 323 324 325 326 327 328 329 330 331
                qCritical() << "Unable to link UDP elements.";
                break;
            }
        } else if (isTCP) {
            if(!gst_element_link(dataSource, demux)) {
                qCritical() << "Unable to link TCP dataSource to Demux.";
                break;
            }
            if(!gst_element_link_many(parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
                qCritical() << "Unable to link TCP pipline to parser.";
332 333
                break;
            }
334
            g_signal_connect(demux, "pad-added", G_CALLBACK(newPadCB), parser);
335
        } else {
336
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
337
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, NULL)) {
338
                qCritical() << "Unable to link RTSP elements.";
339 340
                break;
            }
341 342
        }

343
        dataSource = demux = parser = queue = decoder = queue1 = NULL;
Gus Grubba's avatar
Gus Grubba committed
344

345
        GstBus* bus = NULL;
Gus Grubba's avatar
Gus Grubba committed
346

347 348 349 350 351 352
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = NULL;
        }
Gus Grubba's avatar
Gus Grubba committed
353 354 355 356 357 358 359 360 361 362 363 364 365

        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (caps != NULL) {
        gst_caps_unref(caps);
        caps = NULL;
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

366 367 368 369
        // In newer versions, the pipeline will clean up all references that are added to it
        if (_pipeline != NULL) {
            gst_object_unref(_pipeline);
            _pipeline = NULL;
Gus Grubba's avatar
Gus Grubba committed
370 371
        }

372 373 374 375 376 377
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
            if (decoder != NULL) {
                gst_object_unref(decoder);
                decoder = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
378

379 380 381 382
            if (parser != NULL) {
                gst_object_unref(parser);
                parser = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
383

384 385 386 387
            if (demux != NULL) {
                gst_object_unref(demux);
                demux = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
388

389 390 391 392
            if (dataSource != NULL) {
                gst_object_unref(dataSource);
                dataSource = NULL;
            }
393

394 395 396 397
            if (_tee != NULL) {
                gst_object_unref(_tee);
                dataSource = NULL;
            }
398

399 400 401 402
            if (queue != NULL) {
                gst_object_unref(queue);
                dataSource = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
403
        }
404 405 406 407 408

        _running = false;
    } else {
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
409
    }
410
    _starting = false;
411
#endif
Gus Grubba's avatar
Gus Grubba committed
412 413
}

414 415 416
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
417
{
418
#if defined(QGC_GST_STREAMING)
419
    qCDebug(VideoReceiverLog) << "stop()";
420 421 422
    if(!_streaming) {
        _shutdownPipeline();
    } else if (_pipeline != NULL && !_stopping) {
423 424 425 426
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
427
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
428
        gst_object_unref(bus);
429 430 431 432 433 434
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
435
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
436
    }
437
#endif
Gus Grubba's avatar
Gus Grubba committed
438 439
}

440 441 442
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
443 444 445 446
{
    _uri = uri;
}

447
//-----------------------------------------------------------------------------
448
#if defined(QGC_GST_STREAMING)
449 450
void
VideoReceiver::_shutdownPipeline() {
451 452 453 454
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473
    GstBus* bus = NULL;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
        bus = NULL;
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    gst_object_unref(_pipeline);
    _pipeline = NULL;
    delete _sink;
    _sink = NULL;
    _serverPresent = false;
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
474
#endif
475

476
//-----------------------------------------------------------------------------
477
#if defined(QGC_GST_STREAMING)
478 479
void
VideoReceiver::_handleError() {
480 481 482 483 484
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
    _shutdownPipeline();
}
#endif

485
//-----------------------------------------------------------------------------
486
#if defined(QGC_GST_STREAMING)
487 488
void
VideoReceiver::_handleEOS() {
489 490
    if(_stopping) {
        _shutdownPipeline();
491
        qCDebug(VideoReceiverLog) << "Stopped";
492 493 494 495 496
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
        qCritical() << "VideoReceiver: Unexpected EOS!";
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
497 498
    }
}
499
#endif
Gus Grubba's avatar
Gus Grubba committed
500

501
//-----------------------------------------------------------------------------
502
#if defined(QGC_GST_STREAMING)
503 504 505 506 507 508
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
        qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
    }
509 510 511
}
#endif

512
//-----------------------------------------------------------------------------
513
#if defined(QGC_GST_STREAMING)
514 515
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
516 517 518 519
{
    Q_UNUSED(bus)
    Q_ASSERT(msg != NULL && data != NULL);
    VideoReceiver* pThis = (VideoReceiver*)data;
520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
542 543
    return TRUE;
}
544
#endif
545

546
//-----------------------------------------------------------------------------
547
#if defined(QGC_GST_STREAMING)
548 549
void
VideoReceiver::_cleanupOldVideos()
550
{
551
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580
    QDir videoDir = QDir(savePath);
    videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
    videoDir.setSorting(QDir::Time);
    //-- All the movie extensions we support
    QStringList nameFilters;
    for(uint32_t i = 0; i < NUM_MUXES; i++) {
        nameFilters << QString("*.") + QString(kVideoExtensions[i]);
    }
    videoDir.setNameFilters(nameFilters);
    //-- get the list of videos stored
    QFileInfoList vidList = videoDir.entryInfoList();
    if(!vidList.isEmpty()) {
        uint64_t total   = 0;
        //-- Settings are stored using MB
        uint64_t maxSize = (qgcApp()->toolbox()->settingsManager()->videoSettings()->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
        //-- Compute total used storage
        for(int i = 0; i < vidList.size(); i++) {
            total += vidList[i].size();
        }
        //-- Remove old movies until max size is satisfied.
        while(total >= maxSize && !vidList.isEmpty()) {
            total -= vidList.last().size();
            qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
            QFile file (vidList.last().filePath());
            file.remove();
            vidList.removeLast();
        }
    }
}
581
#endif
582

583
//-----------------------------------------------------------------------------
584 585 586 587 588 589 590 591 592 593 594
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
595 596
void
VideoReceiver::startRecording(void)
597
{
598
#if defined(QGC_GST_STREAMING)
599

600 601 602 603 604 605 606
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
    if(_pipeline == NULL || _recording) {
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

607
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
608
    if(savePath.isEmpty()) {
609
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
610 611 612
        return;
    }

613 614 615 616 617 618 619 620 621
    uint32_t muxIdx = qgcApp()->toolbox()->settingsManager()->videoSettings()->recordingFormat()->rawValue().toUInt();
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

622
    _sink           = new Sink();
623 624
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", NULL);
625 626
    _sink->parse    = gst_element_factory_make("h264parse", NULL);
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], NULL);
627 628 629
    _sink->filesink = gst_element_factory_make("filesink", NULL);
    _sink->removing = false;

630
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
631 632 633 634
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

635
    QString videoFile;
636
    videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
637

638 639
    g_object_set(G_OBJECT(_sink->filesink), "location", qPrintable(videoFile), NULL);
    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;
640 641

    gst_object_ref(_sink->queue);
642
    gst_object_ref(_sink->parse);
643 644 645
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

646 647
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
648 649

    gst_element_sync_state_with_parent(_sink->queue);
650
    gst_element_sync_state_with_parent(_sink->parse);
651 652 653 654 655 656 657 658 659 660 661 662 663
    gst_element_sync_state_with_parent(_sink->mux);
    gst_element_sync_state_with_parent(_sink->filesink);

    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
#endif
}

664 665 666
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
667
{
668
#if defined(QGC_GST_STREAMING)
669 670 671 672 673 674 675 676 677 678 679
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
    if(_pipeline == NULL || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, NULL);
#endif
}

680
//-----------------------------------------------------------------------------
681 682 683 684 685 686
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
687 688
void
VideoReceiver::_shutdownRecordingBranch()
689 690
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
691
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
692 693 694 695 696
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
697
    _pipelineStopRec = NULL;
698

699 700 701 702
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
703 704

    gst_object_unref(_sink->queue);
705
    gst_object_unref(_sink->parse);
706 707 708 709 710 711
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
    _sink = NULL;
    _recording = false;
712

713 714 715 716 717
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

718
//-----------------------------------------------------------------------------
719 720 721 722 723
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
724 725
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
726 727 728 729
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
730
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
731 732 733 734 735 736 737 738 739

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
740 741
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
742

743 744 745
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
746
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
747
    gst_object_unref(bus);
748 749 750 751 752 753 754 755 756 757 758 759 760

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

761
//-----------------------------------------------------------------------------
762
#if defined(QGC_GST_STREAMING)
763 764
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
765 766
{
    Q_UNUSED(pad);
767 768 769 770 771 772 773
    if(info != NULL && user_data != NULL) {
        VideoReceiver* pThis = (VideoReceiver*)user_data;
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
774 775 776
    return GST_PAD_PROBE_REMOVE;
}
#endif
777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808

//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
    if(_videoSurface) {
        if(stopping() || starting()) {
            return;
        }
        if(streaming()) {
            if(!_videoRunning) {
                _videoSurface->setLastFrame(0);
                _videoRunning = true;
                emit videoRunningChanged();
            }
        } else {
            if(_videoRunning) {
                _videoRunning = false;
                emit videoRunningChanged();
            }
        }
        if(_videoRunning) {
            time_t elapsed = 0;
            time_t lastFrame = _videoSurface->lastFrame();
            if(lastFrame != 0) {
                elapsed = time(0) - _videoSurface->lastFrame();
            }
            if(elapsed > 2 && _videoSurface) {
                stop();
            }
        } else {
809
            if(!running() && !_uri.isEmpty()) {
810 811 812 813 814 815 816
                start();
            }
        }
    }
#endif
}