VideoReceiver.cc 40 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _videoSink(nullptr)
66 67
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
68
    , _restart_time_ms(1389)
69 70 71
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
72
    , _udpReconnect_us(5000000)
73
#endif
74 75
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
76
    , _videoSettings(nullptr)
Gus Grubba's avatar
Gus Grubba committed
77
{
Gus Grubba's avatar
Gus Grubba committed
78
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
79
#if defined(QGC_GST_STREAMING)
80 81
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
82 83 84 85 86
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
87 88
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
89
#endif
Gus Grubba's avatar
Gus Grubba committed
90 91 92 93
}

VideoReceiver::~VideoReceiver()
{
94
#if defined(QGC_GST_STREAMING)
95
    stop();
96
    setVideoSink(nullptr);
97
#endif
Gus Grubba's avatar
Gus Grubba committed
98 99
}

100 101 102 103 104 105 106 107 108
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
109
#if defined(QGC_GST_STREAMING)
110 111
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
112
{
113
    gchar* name = gst_pad_get_name(pad);
114
    //g_print("A new pad %s was created\n", name);
115 116
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
117
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
118
    g_free(description);
119 120
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
121
        qCCritical(VideoReceiverLog) << "Failed to link elements\n";
122 123 124
    g_free(name);
}

125 126 127 128 129 130 131 132
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
133
        qCCritical(VideoReceiverLog) << "No sink pad found";
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
164
        qCCritical(VideoReceiverLog) << "No sink pad found";
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

196 197 198 199 200 201 202 203 204 205 206 207 208
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
    gchar* name = gst_pad_get_name(pad);

    GstPad* ghostpad = gst_ghost_pad_new(name, pad);

    g_free(name);

    gst_pad_set_active(ghostpad, TRUE);

    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
209
        qCCritical(VideoReceiverLog) << "Failed to add ghost pad to source";
210 211 212 213 214 215 216 217 218 219 220
    }
}

static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
    gboolean isRtpPad = FALSE;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
221
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
222

223 224 225 226
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = TRUE;
            }
227 228 229 230 231 232 233 234 235 236 237
            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (isRtpPad) {
        GstElement* buffer;

238
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);

            gst_element_sync_state_with_parent(buffer);

            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");

            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);

                gst_object_unref(sinkpad);
                sinkpad = nullptr;

                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
255
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
256 257
                }
            } else {
258
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
259 260
            }
        } else {
261
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
        }
    }

    newPadCB(element, pad, data);
}

static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
    int* probeRes = (int*)user_data;

    *probeRes |= 1;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
278
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
279

280 281 282 283
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }
284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    return TRUE;
}

GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
    if (uri.isEmpty()) {
300
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
336
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
337 338 339 340
                        break;
                    }
                } else if (isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
341
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
342 343 344 345 346 347 348 349 350 351 352
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
353
            qCDebug(VideoReceiverLog) << "URI is not recognized";
354 355 356
        }

        if (!source) {
357
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
358 359 360
            break;
        }

361 362 363 364 365 366 367 368 369 370 371
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
        if (isTcpMPEGTS || isUdpMPEGTS) {
            if ((parser = gst_element_factory_make("tsdemux", "parser")) == nullptr) {
                qCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
                break;
            }
        } else {
            if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
                qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('parsebin')";
                break;
            }
372 373 374
        }

        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
375
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
376 377 378 379 380 381 382 383 384 385 386
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
387
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
388
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
389 390 391 392 393 394
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
395
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
396 397 398 399
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
400
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

        source = buffer = parser = nullptr;

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

439 440
//-----------------------------------------------------------------------------
void
441
VideoReceiver::_restart_timeout()
442
{
443
    qgcApp()->toolbox()->videoManager()->restartVideo();
444
}
445
#endif
446

447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
    delete _socket;
    _socket = nullptr;

    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

509
//-----------------------------------------------------------------------------
510 511 512 513 514 515 516 517
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
518 519
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
520
{
521
    qCDebug(VideoReceiverLog) << "Starting " << _uri;
Gus Grubba's avatar
Gus Grubba committed
522
    if(qgcApp()->runningUnitTests()) {
523 524
        return;
    }
525 526
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
527
        qCDebug(VideoReceiverLog) << "Stream not enabled/configured";
528 529
        return;
    }
530

531
#if defined(QGC_GST_STREAMING)
532
    _stop = false;
533

534 535
    QString uri = _uri;

536
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
537
    //-- Taisync on iOS or Android sends a raw h.264 stream
538 539 540
    if (qgcApp()->toolbox()->videoManager()->isTaisync()) {
        uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
    }
541 542
#endif

543
    if (uri.isEmpty()) {
544
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
Gus Grubba's avatar
Gus Grubba committed
545 546
        return;
    }
547

548 549
    bool useTcpConnection = uri.contains("rtsp://") || uri.contains("tcp://");

Gus Grubba's avatar
Gus Grubba committed
550
    if (_videoSink == nullptr) {
551
        qCWarning(VideoReceiverLog) << "Failed because video sink is not set";
Gus Grubba's avatar
Gus Grubba committed
552 553
        return;
    }
554 555 556 557
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
558

559
    _starting = true;
560

561 562 563 564 565 566
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && useTcpConnection) {
        _tcp_timer.start(100);
        return;
    }

567 568 569
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

570
    bool running    = false;
571
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
572

573 574 575
    GstElement* source  = nullptr;
    GstElement* queue   = nullptr;
    GstElement* decoder = nullptr;
576

Gus Grubba's avatar
Gus Grubba committed
577
    do {
Gus Grubba's avatar
Gus Grubba committed
578
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
579
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
Gus Grubba's avatar
Gus Grubba committed
580 581 582
            break;
        }

583 584
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);

585
        if ((source = _makeSource(uri)) == nullptr) {
586
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
Gus Grubba's avatar
Gus Grubba committed
587 588 589
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
590
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
591
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
592 593
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
594

Gus Grubba's avatar
Gus Grubba committed
595
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
596 597
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
598
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
599 600
            break;
        }
601

602
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
603
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
604 605 606
            break;
        }

607 608
        gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);

609
        pipelineUp = true;
610

611 612 613
        g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);

        if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
614
            qCCritical(VideoReceiverLog) << "Unable to receiver pipeline.";
615
            break;
616 617
        }

618 619 620
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

621
        source = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
622

Gus Grubba's avatar
Gus Grubba committed
623
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
624

Gus Grubba's avatar
Gus Grubba committed
625
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
626 627 628
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
629
            bus = nullptr;
630
        }
Gus Grubba's avatar
Gus Grubba committed
631

632
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
633 634 635 636 637
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (!running) {
638
        qCCritical(VideoReceiverLog) << "Failed";
Gus Grubba's avatar
Gus Grubba committed
639

640
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
641
        if (_pipeline != nullptr) {
642
            gst_bin_remove(GST_BIN(_pipeline), _videoSink);
643
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
644
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
645 646
        }

647 648
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
649
            if (decoder != nullptr) {
650
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
651
                decoder = nullptr;
652
            }
Gus Grubba's avatar
Gus Grubba committed
653

654 655 656 657 658
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

659 660 661
            if (source != nullptr) {
                gst_object_unref(source);
                source = nullptr;
662
            }
663

Gus Grubba's avatar
Gus Grubba committed
664
            if (_tee != nullptr) {
665
                gst_object_unref(_tee);
666
                _tee = nullptr;
667
            }
668

Gus Grubba's avatar
Gus Grubba committed
669
        }
670 671 672

        _running = false;
    } else {
673
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
674 675
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
676
    }
677
    _starting = false;
678
#endif
Gus Grubba's avatar
Gus Grubba committed
679 680
}

681 682 683
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
684
{
685
    if(qgcApp() && qgcApp()->runningUnitTests()) {
686 687
        return;
    }
688
#if defined(QGC_GST_STREAMING)
689
    _stop = true;
690
    qCDebug(VideoReceiverLog) << "Stopping";
691 692
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
693
    } else if (_pipeline != nullptr && !_stopping) {
694
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
695 696
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
697 698
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
699
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
700
        gst_object_unref(bus);
701 702
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
703
            qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
704 705 706
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
707
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
708
    }
709
#endif
Gus Grubba's avatar
Gus Grubba committed
710 711
}

712 713 714
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
715 716 717 718
{
    _uri = uri;
}

719
//-----------------------------------------------------------------------------
720
#if defined(QGC_GST_STREAMING)
721 722
void
VideoReceiver::_shutdownPipeline() {
723 724 725 726
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
727 728
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
729 730
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
731
        bus = nullptr;
732 733
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
734 735
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    _tee = nullptr;
736
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
737
    _pipeline = nullptr;
738
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
739
    _sink = nullptr;
740
    _serverPresent = false;
741 742 743 744 745 746
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
747
#endif
748

749
//-----------------------------------------------------------------------------
750
#if defined(QGC_GST_STREAMING)
751 752
void
VideoReceiver::_handleError() {
753
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
754
    stop();
755
    _restart_timer.start(_restart_time_ms);
756 757 758
}
#endif

759
//-----------------------------------------------------------------------------
760
#if defined(QGC_GST_STREAMING)
761 762
void
VideoReceiver::_handleEOS() {
763
    if(_stopping) {
764 765 766
        if(_recording && _sink->removing) {
            _shutdownRecordingBranch();
        }
767
        _shutdownPipeline();
768
        qCDebug(VideoReceiverLog) << "Stopped";
769 770 771
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
772
        qCWarning(VideoReceiverLog) << "Unexpected EOS!";
773
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
774 775
    }
}
776
#endif
Gus Grubba's avatar
Gus Grubba committed
777

778
//-----------------------------------------------------------------------------
779
#if defined(QGC_GST_STREAMING)
780 781 782 783
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
784
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
785
    }
786 787 788
}
#endif

789
//-----------------------------------------------------------------------------
790
#if defined(QGC_GST_STREAMING)
791 792
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
793 794
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
795
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
796
    VideoReceiver* pThis = (VideoReceiver*)data;
797 798 799 800 801 802 803

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
804
        qCCritical(VideoReceiverLog) << error->message;
805 806 807 808 809 810 811 812 813 814
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830
    case(GST_MESSAGE_ELEMENT): {
        const GstStructure *s = gst_message_get_structure (msg);

        if (gst_structure_has_name (s, "GstBinForwarded")) {
            GstMessage *forward_msg = NULL;
            gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
            if (forward_msg != nullptr) {
                if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
                    pThis->msgEOSReceived();
                }
                gst_message_unref(forward_msg);
                forward_msg = nullptr;
            }
        }
    }
        break;
831 832 833 834
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
835 836
    return TRUE;
}
837
#endif
838

839
//-----------------------------------------------------------------------------
840
#if defined(QGC_GST_STREAMING)
841 842
void
VideoReceiver::_cleanupOldVideos()
843
{
844
    //-- Only perform cleanup if storage limit is enabled
845
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
846 847 848 849 850 851 852 853
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
854
        }
855 856 857 858 859 860
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
861
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
862 863 864 865 866 867 868 869 870 871 872 873
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
874 875 876
        }
    }
}
877
#endif
878

879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
905
            qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
906 907 908 909 910
        }
    }
}
#endif

911
//-----------------------------------------------------------------------------
912 913 914 915
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
916
//                         source-->tee
917
//                                   |
Andrew Voznytsa's avatar
Andrew Voznytsa committed
918 919 920 921 922
//                                   |    +---------_sink----------+
//                                   |    |                        |
//   we are adding these elements->  +->teepad-->queue-->_filesink |
//                                        |                        |
//                                        +------------------------+
923
#if defined(QGC_GST_STREAMING)
924 925 926 927 928 929 930 931 932 933
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, unsigned format)
{
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;

    do{
Andrew Voznytsa's avatar
Andrew Voznytsa committed
934
        if ((mux = gst_element_factory_make(kVideoMuxes[format], nullptr)) == nullptr) {
935
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << kVideoMuxes[format] << "') failed";
936 937 938
            break;
        }

Andrew Voznytsa's avatar
Andrew Voznytsa committed
939
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
940
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
941 942 943 944 945 946
            break;
        }

        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);

        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
947
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
948 949 950 951 952 953
            break;
        }

        GstPadTemplate* padTemplate;

        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
954
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
955 956 957 958 959 960 961
            break;
        }

        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
962
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
963 964 965 966 967 968 969 970 971 972 973 974 975 976 977
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
978
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
    }

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    return fileSink;
}
1005
#endif
1006

1007
void
1008
VideoReceiver::startRecording(const QString &videoFile)
1009
{
1010
#if defined(QGC_GST_STREAMING)
1011

1012
    qCDebug(VideoReceiverLog) << "Starting recording";
1013
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
1014
    if(_pipeline == nullptr || _recording) {
1015 1016 1017 1018
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

1019
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
1020 1021 1022 1023 1024 1025 1026 1027
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

Tomaz Canabrava's avatar
Tomaz Canabrava committed
1028 1029 1030 1031
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
    if(savePath.isEmpty()) {
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
        return;
1032
    }
1033

Tomaz Canabrava's avatar
Tomaz Canabrava committed
1034 1035 1036
    _videoFile = savePath + "/"
                + (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
                + "." + kVideoExtensions[muxIdx];
1037 1038

    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
1039

1040
    emit videoFileChanged();
1041

1042 1043 1044 1045 1046 1047 1048
    _sink           = new Sink();
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", nullptr);
    _sink->filesink = _makeFileSink(_videoFile, muxIdx);
    _sink->removing = false;

    if(!_sink->teepad || !_sink->queue || !_sink->filesink) {
1049
        qCCritical(VideoReceiverLog) << "Failed to make _sink elements";
1050 1051
        return;
    }
1052 1053 1054 1055

    gst_object_ref(_sink->queue);
    gst_object_ref(_sink->filesink);

1056
    gst_bin_add(GST_BIN(_pipeline), _sink->queue);
1057 1058 1059

    gst_element_sync_state_with_parent(_sink->queue);

1060 1061 1062
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
1063 1064
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
1065
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
1066
    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffer
1067 1068 1069
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
1070 1071 1072 1073
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

1074 1075 1076 1077 1078 1079 1080
//    // Add the filesink once we have a valid I-frame
//    gst_bin_add(GST_BIN(_pipeline), _sink->filesink);
//    if (!gst_element_link(_sink->queue, _sink->filesink)) {
//        qCritical() << "Failed to link queue and file sink";
//    }
//    gst_element_sync_state_with_parent(_sink->filesink);

1081 1082
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

1083 1084 1085
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
1086 1087
#else
    Q_UNUSED(videoFile)
1088 1089 1090
#endif
}

1091 1092 1093
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
1094
{
1095
#if defined(QGC_GST_STREAMING)
1096
    qCDebug(VideoReceiverLog) << "Stopping recording";
1097
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
1098
    if(_pipeline == nullptr || !_recording) {
1099 1100 1101 1102
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
1103
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
1104 1105 1106
#endif
}

1107
//-----------------------------------------------------------------------------
1108 1109 1110 1111 1112 1113
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
1114 1115
void
VideoReceiver::_shutdownRecordingBranch()
1116
{
1117 1118
    gst_bin_remove(GST_BIN(_pipeline), _sink->queue);
    gst_bin_remove(GST_BIN(_pipeline), _sink->filesink);
1119

1120
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
1121
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
1122 1123 1124 1125 1126

    gst_object_unref(_sink->queue);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
1127
    _sink = nullptr;
1128
    _recording = false;
1129

1130
    emit recordingChanged();
1131
    qCDebug(VideoReceiverLog) << "Recording stopped";
1132 1133 1134
}
#endif

1135
//-----------------------------------------------------------------------------
1136 1137 1138 1139 1140
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
1141
void
1142
VideoReceiver::_unlinkRecordingBranch(GstPadProbeInfo* info)
1143 1144 1145 1146
{
    Q_UNUSED(info)
    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
1147
    gst_pad_unlink(_sink->teepad, sinkpad);
1148 1149
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
1150 1151 1152 1153
    qCDebug(VideoReceiverLog) << "Recording EOS was sent";
    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);
1154 1155 1156
}
#endif

1157
//-----------------------------------------------------------------------------
1158
#if defined(QGC_GST_STREAMING)
1159 1160
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1161 1162
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1163 1164
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1165 1166
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
1167
            pThis->_unlinkRecordingBranch(info);
1168 1169
        }
    }
1170 1171 1172
    return GST_PAD_PROBE_REMOVE;
}
#endif
1173

1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
1194
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
1195 1196 1197
}
#endif

1198 1199 1200 1201 1202 1203
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1204
    if(info != nullptr && user_data != nullptr) {
1205 1206 1207 1208
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1209
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1210 1211

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
1212
            gst_pad_set_offset(pad, -buf->pts);
1213 1214

            // Add the filesink once we have a valid I-frame
1215 1216
            gst_bin_add(GST_BIN(pThis->_pipeline), pThis->_sink->filesink);
            if (!gst_element_link(pThis->_sink->queue, pThis->_sink->filesink)) {
1217
                qCCritical(VideoReceiverLog) << "Failed to link queue and file sink";
1218
            }
1219 1220
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1221
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1222
            pThis->gotFirstRecordingKeyFrame();
1223 1224 1225 1226 1227 1228 1229
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1230 1231 1232 1233 1234
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1235 1236 1237 1238 1239 1240 1241 1242
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1243
        }
1244
    } else {
1245
        if(_videoRunning) {
1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1257
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1258 1259 1260 1261 1262 1263 1264 1265 1266 1267

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1268 1269 1270 1271 1272
        }
    }
#endif
}