VideoReceiver.cc 40.8 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _videoSink(nullptr)
66 67
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
68
    , _restart_time_ms(1389)
69 70 71
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
72
    , _udpReconnect_us(5000000)
73
#endif
74 75
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
76
    , _videoSettings(nullptr)
Gus Grubba's avatar
Gus Grubba committed
77
{
Gus Grubba's avatar
Gus Grubba committed
78
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
79
#if defined(QGC_GST_STREAMING)
80 81
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
82 83 84 85 86
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
87 88
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
89
#endif
Gus Grubba's avatar
Gus Grubba committed
90 91 92 93
}

VideoReceiver::~VideoReceiver()
{
94
#if defined(QGC_GST_STREAMING)
95
    stop();
96
    setVideoSink(nullptr);
97
#endif
Gus Grubba's avatar
Gus Grubba committed
98 99
}

100 101 102 103 104 105 106 107 108
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
109
#if defined(QGC_GST_STREAMING)
110 111
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
112
{
113
    gchar* name = gst_pad_get_name(pad);
114
    //g_print("A new pad %s was created\n", name);
115 116
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
117
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
118
    g_free(description);
119 120
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
121
        qCCritical(VideoReceiverLog) << "Failed to link elements\n";
122 123 124
    g_free(name);
}

125 126 127 128 129 130 131 132
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
133
        qCCritical(VideoReceiverLog) << "No sink pad found";
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
164
        qCCritical(VideoReceiverLog) << "No sink pad found";
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

196 197 198 199 200 201 202 203 204 205 206 207 208
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
    gchar* name = gst_pad_get_name(pad);

    GstPad* ghostpad = gst_ghost_pad_new(name, pad);

    g_free(name);

    gst_pad_set_active(ghostpad, TRUE);

    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
209
        qCCritical(VideoReceiverLog) << "Failed to add ghost pad to source";
210 211 212 213 214 215 216 217 218 219 220
    }
}

static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
    gboolean isRtpPad = FALSE;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
221
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
222

223 224 225 226
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = TRUE;
            }
227 228 229 230 231 232 233 234 235 236 237
            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (isRtpPad) {
        GstElement* buffer;

238
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);

            gst_element_sync_state_with_parent(buffer);

            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");

            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);

                gst_object_unref(sinkpad);
                sinkpad = nullptr;

                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
255
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
256 257
                }
            } else {
258
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
259 260
            }
        } else {
261
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
        }
    }

    newPadCB(element, pad, data);
}

static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
    int* probeRes = (int*)user_data;

    *probeRes |= 1;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
278
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
279

280 281 282 283
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }
284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    return TRUE;
}

GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
    if (uri.isEmpty()) {
300
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
301 302 303 304 305 306 307 308 309 310 311 312
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
313
    GstElement* tsdemux = nullptr;
314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
337
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
338 339
                        break;
                    }
340
                } else if (isUdp265) {
341
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
342
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
343 344 345 346 347 348 349 350 351 352 353
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
354
            qCDebug(VideoReceiverLog) << "URI is not recognized";
355 356 357
        }

        if (!source) {
358
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
359 360 361
            break;
        }

362 363 364 365 366 367 368 369 370 371 372 373
        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
            break;
        }

        if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('parsebin') failed";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

374
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
375
        // FIXME: AV: tsdemux handling is a bit ugly - let's try to find elegant solution for that later
376
        if (isTcpMPEGTS || isUdpMPEGTS) {
377 378
            if ((tsdemux = gst_element_factory_make("tsdemux", nullptr)) == nullptr) {
                qCCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
379 380
                break;
            }
381 382 383 384 385

            gst_bin_add(GST_BIN(bin), tsdemux);

            if (!gst_element_link(source, tsdemux)) {
                qCCritical(VideoReceiverLog) << "gst_element_link() failed";
386 387
                break;
            }
388

389 390
            source = tsdemux;
            tsdemux = nullptr;
391 392 393 394 395 396 397
        }

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
398
            if (probeRes & 2 && !_videoSettings->lowLatencyMode()->rawValue().toBool()) {
399
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
400
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
401 402 403 404 405 406
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
407
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
408 409 410 411
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
412
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
413 414 415 416
                    break;
                }
            }
        } else {
417 418 419 420 421
            if (_videoSettings->lowLatencyMode()->rawValue().toBool()) {
                g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), parser);
            } else {
                g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
            }
422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

        source = buffer = parser = nullptr;

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

442 443 444 445 446
    if (tsdemux != nullptr) {
        gst_object_unref(tsdemux);
        tsdemux = nullptr;
    }

447 448 449 450 451 452 453 454 455 456 457 458 459
    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

460 461
//-----------------------------------------------------------------------------
void
462
VideoReceiver::_restart_timeout()
463
{
464
    qgcApp()->toolbox()->videoManager()->restartVideo();
465
}
466
#endif
467

468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
    delete _socket;
    _socket = nullptr;

    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

530
//-----------------------------------------------------------------------------
531 532 533 534 535 536 537 538
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
539 540
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
541
{
542
    qCDebug(VideoReceiverLog) << "Starting " << _uri;
Gus Grubba's avatar
Gus Grubba committed
543
    if(qgcApp()->runningUnitTests()) {
544 545
        return;
    }
546 547
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
548
        qCDebug(VideoReceiverLog) << "Stream not enabled/configured";
549 550
        return;
    }
551

552
#if defined(QGC_GST_STREAMING)
553
    _stop = false;
554

555 556
    QString uri = _uri;

557
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
558
    //-- Taisync on iOS or Android sends a raw h.264 stream
559 560 561
    if (qgcApp()->toolbox()->videoManager()->isTaisync()) {
        uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
    }
562 563
#endif

564
    if (uri.isEmpty()) {
565
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
Gus Grubba's avatar
Gus Grubba committed
566 567
        return;
    }
568

569 570
    bool useTcpConnection = uri.contains("rtsp://") || uri.contains("tcp://");

Gus Grubba's avatar
Gus Grubba committed
571
    if (_videoSink == nullptr) {
572
        qCWarning(VideoReceiverLog) << "Failed because video sink is not set";
Gus Grubba's avatar
Gus Grubba committed
573 574
        return;
    }
575 576 577

    g_object_set(_videoSink, "sync", !_videoSettings->lowLatencyMode()->rawValue().toBool(), NULL);

578 579 580 581
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
582

583
    _starting = true;
584

585 586 587 588 589 590
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && useTcpConnection) {
        _tcp_timer.start(100);
        return;
    }

591 592 593
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

594
    bool running    = false;
595
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
596

597 598 599
    GstElement* source  = nullptr;
    GstElement* queue   = nullptr;
    GstElement* decoder = nullptr;
600

Gus Grubba's avatar
Gus Grubba committed
601
    do {
Gus Grubba's avatar
Gus Grubba committed
602
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
603
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
Gus Grubba's avatar
Gus Grubba committed
604 605 606
            break;
        }

607 608
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);

609
        if ((source = _makeSource(uri)) == nullptr) {
610
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
Gus Grubba's avatar
Gus Grubba committed
611 612 613
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
614
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
615
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
616 617
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
618

Gus Grubba's avatar
Gus Grubba committed
619
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
620 621
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
622
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
623 624
            break;
        }
625

626
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
627
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
628 629 630
            break;
        }

631 632
        gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);

633
        pipelineUp = true;
634

635 636 637
        g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);

        if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
638
            qCCritical(VideoReceiverLog) << "Unable to receiver pipeline.";
639
            break;
640 641
        }

642 643 644
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

645
        source = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
646

Gus Grubba's avatar
Gus Grubba committed
647
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
648

Gus Grubba's avatar
Gus Grubba committed
649
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
650 651 652
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
653
            bus = nullptr;
654
        }
Gus Grubba's avatar
Gus Grubba committed
655

656
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
657 658 659 660 661
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (!running) {
662
        qCCritical(VideoReceiverLog) << "Failed";
Gus Grubba's avatar
Gus Grubba committed
663

664
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
665
        if (_pipeline != nullptr) {
666
            gst_bin_remove(GST_BIN(_pipeline), _videoSink);
667
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
668
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
669 670
        }

671 672
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
673
            if (decoder != nullptr) {
674
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
675
                decoder = nullptr;
676
            }
Gus Grubba's avatar
Gus Grubba committed
677

678 679 680 681 682
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

683 684 685
            if (source != nullptr) {
                gst_object_unref(source);
                source = nullptr;
686
            }
687

Gus Grubba's avatar
Gus Grubba committed
688
            if (_tee != nullptr) {
689
                gst_object_unref(_tee);
690
                _tee = nullptr;
691
            }
692

Gus Grubba's avatar
Gus Grubba committed
693
        }
694 695 696

        _running = false;
    } else {
697
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
698 699
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
700
    }
701
    _starting = false;
702
#endif
Gus Grubba's avatar
Gus Grubba committed
703 704
}

705 706 707
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
708
{
709
    if(qgcApp() && qgcApp()->runningUnitTests()) {
710 711
        return;
    }
712
#if defined(QGC_GST_STREAMING)
713
    _stop = true;
714
    qCDebug(VideoReceiverLog) << "Stopping";
715 716
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
717
    } else if (_pipeline != nullptr && !_stopping) {
718
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
719 720
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
721 722
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
723
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
724
        gst_object_unref(bus);
725 726
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
727
            qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
728 729 730
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
731
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
732
    }
733
#endif
Gus Grubba's avatar
Gus Grubba committed
734 735
}

736 737 738
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
739 740 741 742
{
    _uri = uri;
}

743
//-----------------------------------------------------------------------------
744
#if defined(QGC_GST_STREAMING)
745 746
void
VideoReceiver::_shutdownPipeline() {
747 748 749 750
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
751 752
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
753 754
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
755
        bus = nullptr;
756 757
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
758 759
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    _tee = nullptr;
760
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
761
    _pipeline = nullptr;
762
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
763
    _sink = nullptr;
764
    _serverPresent = false;
765 766 767 768 769 770
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
771
#endif
772

773
//-----------------------------------------------------------------------------
774
#if defined(QGC_GST_STREAMING)
775 776
void
VideoReceiver::_handleError() {
777
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
778
    stop();
779
    _restart_timer.start(_restart_time_ms);
780 781 782
}
#endif

783
//-----------------------------------------------------------------------------
784
#if defined(QGC_GST_STREAMING)
785 786
void
VideoReceiver::_handleEOS() {
787
    if(_stopping) {
788 789 790
        if(_recording && _sink->removing) {
            _shutdownRecordingBranch();
        }
791
        _shutdownPipeline();
792
        qCDebug(VideoReceiverLog) << "Stopped";
793 794 795
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
796
        qCWarning(VideoReceiverLog) << "Unexpected EOS!";
797
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
798 799
    }
}
800
#endif
Gus Grubba's avatar
Gus Grubba committed
801

802
//-----------------------------------------------------------------------------
803
#if defined(QGC_GST_STREAMING)
804 805 806 807
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
808
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
809
    }
810 811 812
}
#endif

813
//-----------------------------------------------------------------------------
814
#if defined(QGC_GST_STREAMING)
815 816
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
817 818
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
819
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
820
    VideoReceiver* pThis = (VideoReceiver*)data;
821 822 823 824 825 826 827

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
828
        qCCritical(VideoReceiverLog) << error->message;
829 830 831 832 833 834 835 836 837 838
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854
    case(GST_MESSAGE_ELEMENT): {
        const GstStructure *s = gst_message_get_structure (msg);

        if (gst_structure_has_name (s, "GstBinForwarded")) {
            GstMessage *forward_msg = NULL;
            gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
            if (forward_msg != nullptr) {
                if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
                    pThis->msgEOSReceived();
                }
                gst_message_unref(forward_msg);
                forward_msg = nullptr;
            }
        }
    }
        break;
855 856 857 858
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
859 860
    return TRUE;
}
861
#endif
862

863
//-----------------------------------------------------------------------------
864
#if defined(QGC_GST_STREAMING)
865 866
void
VideoReceiver::_cleanupOldVideos()
867
{
868
    //-- Only perform cleanup if storage limit is enabled
869
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
870 871 872 873 874 875 876 877
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
878
        }
879 880 881 882 883 884
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
885
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
886 887 888 889 890 891 892 893 894 895 896 897
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
898 899 900
        }
    }
}
901
#endif
902

903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
929
            qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
930 931 932 933 934
        }
    }
}
#endif

935
//-----------------------------------------------------------------------------
936 937 938 939
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
940
//                         source-->tee
941
//                                   |
Andrew Voznytsa's avatar
Andrew Voznytsa committed
942 943 944 945 946
//                                   |    +---------_sink----------+
//                                   |    |                        |
//   we are adding these elements->  +->teepad-->queue-->_filesink |
//                                        |                        |
//                                        +------------------------+
947
#if defined(QGC_GST_STREAMING)
948 949 950 951 952 953 954 955 956 957
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, unsigned format)
{
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;

    do{
Andrew Voznytsa's avatar
Andrew Voznytsa committed
958
        if ((mux = gst_element_factory_make(kVideoMuxes[format], nullptr)) == nullptr) {
959
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << kVideoMuxes[format] << "') failed";
960 961 962
            break;
        }

Andrew Voznytsa's avatar
Andrew Voznytsa committed
963
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
964
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
965 966 967 968 969 970
            break;
        }

        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);

        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
971
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
972 973 974 975 976 977
            break;
        }

        GstPadTemplate* padTemplate;

        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
978
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
979 980 981 982 983 984 985
            break;
        }

        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
986
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
1002
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
    }

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    return fileSink;
}
1029
#endif
1030

1031
void
1032
VideoReceiver::startRecording(const QString &videoFile)
1033
{
1034
#if defined(QGC_GST_STREAMING)
1035

1036
    qCDebug(VideoReceiverLog) << "Starting recording";
1037
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
1038
    if(_pipeline == nullptr || _recording) {
1039 1040 1041 1042
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

1043
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
1044 1045 1046 1047 1048 1049 1050 1051
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

Tomaz Canabrava's avatar
Tomaz Canabrava committed
1052 1053 1054 1055
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
    if(savePath.isEmpty()) {
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
        return;
1056
    }
1057

Tomaz Canabrava's avatar
Tomaz Canabrava committed
1058 1059 1060
    _videoFile = savePath + "/"
                + (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
                + "." + kVideoExtensions[muxIdx];
1061 1062

    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
1063

1064
    emit videoFileChanged();
1065

1066 1067 1068 1069 1070 1071 1072
    _sink           = new Sink();
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", nullptr);
    _sink->filesink = _makeFileSink(_videoFile, muxIdx);
    _sink->removing = false;

    if(!_sink->teepad || !_sink->queue || !_sink->filesink) {
1073
        qCCritical(VideoReceiverLog) << "Failed to make _sink elements";
1074 1075
        return;
    }
1076 1077 1078 1079

    gst_object_ref(_sink->queue);
    gst_object_ref(_sink->filesink);

1080
    gst_bin_add(GST_BIN(_pipeline), _sink->queue);
1081 1082 1083

    gst_element_sync_state_with_parent(_sink->queue);

1084 1085 1086
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
1087 1088
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
1089
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
1090
    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffer
1091 1092 1093
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
1094 1095 1096 1097
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

1098 1099 1100 1101 1102 1103 1104
//    // Add the filesink once we have a valid I-frame
//    gst_bin_add(GST_BIN(_pipeline), _sink->filesink);
//    if (!gst_element_link(_sink->queue, _sink->filesink)) {
//        qCritical() << "Failed to link queue and file sink";
//    }
//    gst_element_sync_state_with_parent(_sink->filesink);

1105 1106
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

1107 1108 1109
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
1110 1111
#else
    Q_UNUSED(videoFile)
1112 1113 1114
#endif
}

1115 1116 1117
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
1118
{
1119
#if defined(QGC_GST_STREAMING)
1120
    qCDebug(VideoReceiverLog) << "Stopping recording";
1121
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
1122
    if(_pipeline == nullptr || !_recording) {
1123 1124 1125 1126
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
1127
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
1128 1129 1130
#endif
}

1131
//-----------------------------------------------------------------------------
1132 1133 1134 1135 1136 1137
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
1138 1139
void
VideoReceiver::_shutdownRecordingBranch()
1140
{
1141 1142
    gst_bin_remove(GST_BIN(_pipeline), _sink->queue);
    gst_bin_remove(GST_BIN(_pipeline), _sink->filesink);
1143

1144
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
1145
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
1146 1147 1148 1149 1150

    gst_object_unref(_sink->queue);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
1151
    _sink = nullptr;
1152
    _recording = false;
1153

1154
    emit recordingChanged();
1155
    qCDebug(VideoReceiverLog) << "Recording stopped";
1156 1157 1158
}
#endif

1159
//-----------------------------------------------------------------------------
1160 1161 1162 1163 1164
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
1165
void
1166
VideoReceiver::_unlinkRecordingBranch(GstPadProbeInfo* info)
1167 1168 1169 1170
{
    Q_UNUSED(info)
    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
1171
    gst_pad_unlink(_sink->teepad, sinkpad);
1172 1173
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
1174 1175 1176 1177
    qCDebug(VideoReceiverLog) << "Recording EOS was sent";
    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);
1178 1179 1180
}
#endif

1181
//-----------------------------------------------------------------------------
1182
#if defined(QGC_GST_STREAMING)
1183 1184
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1185 1186
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1187 1188
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1189 1190
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
1191
            pThis->_unlinkRecordingBranch(info);
1192 1193
        }
    }
1194 1195 1196
    return GST_PAD_PROBE_REMOVE;
}
#endif
1197

1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
1218
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
1219 1220 1221
}
#endif

1222 1223 1224 1225 1226 1227
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1228
    if(info != nullptr && user_data != nullptr) {
1229 1230 1231 1232
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1233
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1234 1235

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
1236
            gst_pad_set_offset(pad, -buf->pts);
1237 1238

            // Add the filesink once we have a valid I-frame
1239 1240
            gst_bin_add(GST_BIN(pThis->_pipeline), pThis->_sink->filesink);
            if (!gst_element_link(pThis->_sink->queue, pThis->_sink->filesink)) {
1241
                qCCritical(VideoReceiverLog) << "Failed to link queue and file sink";
1242
            }
1243 1244
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1245
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1246
            pThis->gotFirstRecordingKeyFrame();
1247 1248 1249 1250 1251 1252 1253
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1254 1255 1256 1257 1258
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1259 1260 1261 1262 1263 1264 1265 1266
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1267
        }
1268
    } else {
1269
        if(_videoRunning) {
1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1281
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1282 1283 1284 1285 1286 1287 1288 1289 1290 1291

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1292 1293 1294 1295 1296
        }
    }
#endif
}