VideoReceiver.cc 40.1 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _videoSink(nullptr)
66 67
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
68
    , _restart_time_ms(1389)
69 70 71
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
72
    , _udpReconnect_us(5000000)
73
#endif
74 75
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
76
    , _videoSettings(nullptr)
Gus Grubba's avatar
Gus Grubba committed
77
{
Gus Grubba's avatar
Gus Grubba committed
78
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
79
#if defined(QGC_GST_STREAMING)
80 81
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
82 83
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
84 85 86
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
87 88
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
89
#endif
Gus Grubba's avatar
Gus Grubba committed
90 91 92 93
}

VideoReceiver::~VideoReceiver()
{
94
#if defined(QGC_GST_STREAMING)
95
    stop();
96
    setVideoSink(nullptr);
97
#endif
Gus Grubba's avatar
Gus Grubba committed
98 99
}

100 101 102 103 104 105 106 107 108
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
109
#if defined(QGC_GST_STREAMING)
110 111
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
112
{
113
    gchar* name = gst_pad_get_name(pad);
114
    //g_print("A new pad %s was created\n", name);
115 116
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
117
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
118
    g_free(description);
119 120
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
121 122 123 124
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}

125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
        qCritical() << "autoplugQueryCaps(): No sink pad found";
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
        qCritical() << "autoplugQueryContext(): No sink pad found";
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
    gchar* name = gst_pad_get_name(pad);

    GstPad* ghostpad = gst_ghost_pad_new(name, pad);

    g_free(name);

    gst_pad_set_active(ghostpad, TRUE);

    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
        qCritical() << "Failed to add ghost pad to source";
    }
}

static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
    gboolean isRtpPad = FALSE;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
        GstCaps* caps;

        if ((caps = gst_pad_query_caps(pad, filter)) && !gst_caps_is_empty(caps)) {
            qDebug() << gst_caps_to_string(caps);
            isRtpPad = TRUE;

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (isRtpPad) {
        GstElement* buffer;

238
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);

            gst_element_sync_state_with_parent(buffer);

            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");

            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);

                gst_object_unref(sinkpad);
                sinkpad = nullptr;

                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
                    qCritical() << "_wrapWithGhostPad partially failed. Error with gst_pad_link()";
                }
            } else {
                qCritical() << "_wrapWithGhostPad partially failed. Error with gst_element_get_static_pad()";
            }
        } else {
            qCritical() << "_wrapWithGhostPad partially failed. Error with gst_element_factory_make('rtpjitterbuffer')";
        }
    }

    newPadCB(element, pad, data);
}

static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
    int* probeRes = (int*)user_data;

    *probeRes |= 1;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
        GstCaps* caps;

        if ((caps = gst_pad_query_caps(pad, filter)) && !gst_caps_is_empty(caps)) {
            *probeRes |= 2;

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    return TRUE;
}

GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
    if (uri.isEmpty()) {
        qCritical() << "VideoReceiver::_makeSource() failed because URI is not specified";
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
                        qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_caps_from_string()";
                        break;
                    }
                } else if (isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
                        qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_caps_from_string()";
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
            qWarning() << "VideoReceiver::_makeSource(): URI is not recognized";
        }

        if (!source) {
            qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make() for data source";
            break;
        }

359 360 361 362 363 364 365 366 367 368 369
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
        if (isTcpMPEGTS || isUdpMPEGTS) {
            if ((parser = gst_element_factory_make("tsdemux", "parser")) == nullptr) {
                qCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
                break;
            }
        } else {
            if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
                qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('parsebin')";
                break;
            }
370 371 372 373 374 375 376 377 378 379 380 381 382 383 384
        }

        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
            qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_bin_new('sourcebin')";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
385
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436
                    qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('rtpjitterbuffer')";
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
                    qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_link()";
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
                    qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_link()";
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

        source = buffer = parser = nullptr;

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

437 438
//-----------------------------------------------------------------------------
void
439
VideoReceiver::_restart_timeout()
440
{
441
    qgcApp()->toolbox()->videoManager()->restartVideo();
442
}
443
#endif
444

445 446 447 448 449 450
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
451 452 453
    delete _socket;
    _socket = nullptr;

454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

507
//-----------------------------------------------------------------------------
508 509 510 511 512 513 514 515
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
516 517
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
518
{
519
    qCDebug(VideoReceiverLog) << "start():" << _uri;
Gus Grubba's avatar
Gus Grubba committed
520
    if(qgcApp()->runningUnitTests()) {
521 522
        return;
    }
523 524
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
525 526 527
        qCDebug(VideoReceiverLog) << "start() but not enabled/configured";
        return;
    }
528

529
#if defined(QGC_GST_STREAMING)
530
    _stop = false;
531

532 533
    QString uri = _uri;

534
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
535
    //-- Taisync on iOS or Android sends a raw h.264 stream
536 537 538
    if (qgcApp()->toolbox()->videoManager()->isTaisync()) {
        uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
    }
539 540
#endif

541
    if (uri.isEmpty()) {
Gus Grubba's avatar
Gus Grubba committed
542 543 544
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
545 546 547

    bool useTcpConnection = uri.contains("rtsp://") || uri.contains("tcp://");

Gus Grubba's avatar
Gus Grubba committed
548
    if (_videoSink == nullptr) {
Gus Grubba's avatar
Gus Grubba committed
549 550 551
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
552 553 554 555
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
556

557
    _starting = true;
558

559
    //-- For RTSP and TCP, check to see if server is there first
560
    if(!_serverPresent && useTcpConnection) {
561 562 563 564
        _tcp_timer.start(100);
        return;
    }

565 566 567
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

568
    bool running    = false;
569
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
570

571 572 573
    GstElement* source  = nullptr;
    GstElement* queue   = nullptr;
    GstElement* decoder = nullptr;
574

Gus Grubba's avatar
Gus Grubba committed
575
    do {
Gus Grubba's avatar
Gus Grubba committed
576
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
577
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
578 579 580
            break;
        }

581 582
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);

583 584
        if ((source = _makeSource(uri)) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with _makeSource()";
Gus Grubba's avatar
Gus Grubba committed
585 586 587
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
588
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
589 590 591
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
592

Gus Grubba's avatar
Gus Grubba committed
593
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
594 595
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
596
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
597 598
            break;
        }
599

600 601
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('decodebin')";
602 603 604
            break;
        }

605 606
        gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);

607
        pipelineUp = true;
608

609 610 611 612 613
        g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);

        if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
            qCritical() << "Unable to link UDP elements.";
            break;
614 615
        }

616 617 618
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

619
        source = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
620

Gus Grubba's avatar
Gus Grubba committed
621
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
622

Gus Grubba's avatar
Gus Grubba committed
623
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
624 625 626
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
627
            bus = nullptr;
628
        }
Gus Grubba's avatar
Gus Grubba committed
629

630
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
631 632 633 634 635 636 637
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

638
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
639
        if (_pipeline != nullptr) {
640
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
641
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
642 643
        }

644 645
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
646
            if (decoder != nullptr) {
647
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
648
                decoder = nullptr;
649
            }
Gus Grubba's avatar
Gus Grubba committed
650

651 652 653 654 655
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

656 657 658
            if (source != nullptr) {
                gst_object_unref(source);
                source = nullptr;
659
            }
660

Gus Grubba's avatar
Gus Grubba committed
661
            if (_tee != nullptr) {
662
                gst_object_unref(_tee);
663
                _tee = nullptr;
664
            }
665

Gus Grubba's avatar
Gus Grubba committed
666
        }
667 668 669

        _running = false;
    } else {
670
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
671 672
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
673
    }
674
    _starting = false;
675
#endif
Gus Grubba's avatar
Gus Grubba committed
676 677
}

678 679 680
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
681
{
682
    if(qgcApp() && qgcApp()->runningUnitTests()) {
683 684
        return;
    }
685
#if defined(QGC_GST_STREAMING)
686
    _stop = true;
687
    qCDebug(VideoReceiverLog) << "stop()";
688 689
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
690
    } else if (_pipeline != nullptr && !_stopping) {
691
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
692 693
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
694 695
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
696
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
697
        gst_object_unref(bus);
698 699 700 701 702 703
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
704
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
705
    }
706
#endif
Gus Grubba's avatar
Gus Grubba committed
707 708
}

709 710 711
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
712 713 714 715
{
    _uri = uri;
}

716
//-----------------------------------------------------------------------------
717
#if defined(QGC_GST_STREAMING)
718 719
void
VideoReceiver::_shutdownPipeline() {
720 721 722 723
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
724 725
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
726 727
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
728
        bus = nullptr;
729 730 731
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
732
    _pipeline = nullptr;
733
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
734
    _sink = nullptr;
735
    _serverPresent = false;
736 737 738 739 740 741
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
742
#endif
743

744
//-----------------------------------------------------------------------------
745
#if defined(QGC_GST_STREAMING)
746 747
void
VideoReceiver::_handleError() {
748
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
749
    stop();
750
    _restart_timer.start(_restart_time_ms);
751 752 753
}
#endif

754
//-----------------------------------------------------------------------------
755
#if defined(QGC_GST_STREAMING)
756 757
void
VideoReceiver::_handleEOS() {
758
    if(_stopping) {
759 760 761
        if(_recording && _sink->removing) {
            _shutdownRecordingBranch();
        }
762
        _shutdownPipeline();
763
        qCDebug(VideoReceiverLog) << "Stopped";
764 765 766
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
767
        qWarning() << "VideoReceiver: Unexpected EOS!";
768
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
769 770
    }
}
771
#endif
Gus Grubba's avatar
Gus Grubba committed
772

773
//-----------------------------------------------------------------------------
774
#if defined(QGC_GST_STREAMING)
775 776 777 778
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
779
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
780
    }
781 782 783
}
#endif

784
//-----------------------------------------------------------------------------
785
#if defined(QGC_GST_STREAMING)
786 787
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
788 789
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
790
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
791
    VideoReceiver* pThis = (VideoReceiver*)data;
792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825
    case(GST_MESSAGE_ELEMENT): {
        const GstStructure *s = gst_message_get_structure (msg);

        if (gst_structure_has_name (s, "GstBinForwarded")) {
            GstMessage *forward_msg = NULL;
            gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
            if (forward_msg != nullptr) {
                if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
                    pThis->msgEOSReceived();
                }
                gst_message_unref(forward_msg);
                forward_msg = nullptr;
            }
        }
    }
        break;
826 827 828 829
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
830 831
    return TRUE;
}
832
#endif
833

834
//-----------------------------------------------------------------------------
835
#if defined(QGC_GST_STREAMING)
836 837
void
VideoReceiver::_cleanupOldVideos()
838
{
839
    //-- Only perform cleanup if storage limit is enabled
840
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
841 842 843 844 845 846 847 848
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
849
        }
850 851 852 853 854 855
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
856
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
857 858 859 860 861 862 863 864 865 866 867 868
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
869 870 871
        }
    }
}
872
#endif
873

874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
            qCDebug(VideoReceiverLog) << "Unable to find sink pad of video sink";
        }
    }
}
#endif

906
//-----------------------------------------------------------------------------
907 908 909 910
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
911
//                         source-->tee
912
//                                   |
Andrew Voznytsa's avatar
Andrew Voznytsa committed
913 914 915 916 917
//                                   |    +---------_sink----------+
//                                   |    |                        |
//   we are adding these elements->  +->teepad-->queue-->_filesink |
//                                        |                        |
//                                        +------------------------+
918
#if defined(QGC_GST_STREAMING)
919 920 921 922 923 924 925 926 927 928
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, unsigned format)
{
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;

    do{
Andrew Voznytsa's avatar
Andrew Voznytsa committed
929
        if ((mux = gst_element_factory_make(kVideoMuxes[format], nullptr)) == nullptr) {
930 931 932 933
            qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_factory_make('" << kVideoMuxes[format] << "')";
            break;
        }

Andrew Voznytsa's avatar
Andrew Voznytsa committed
934
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999
            qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_factory_make('filesink')";
            break;
        }

        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);

        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
            qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_bin_new('sinkbin')";
            break;
        }

        GstPadTemplate* padTemplate;

        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
            qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_class_get_pad_template(mux)";
            break;
        }

        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
            qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_request_pad(mux)";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
            qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_link()";
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
    }

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    return fileSink;
}
1000
#endif
1001

1002
void
1003
VideoReceiver::startRecording(const QString &videoFile)
1004
{
1005
#if defined(QGC_GST_STREAMING)
1006

1007 1008
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
1009
    if(_pipeline == nullptr || _recording) {
1010 1011 1012 1013
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

1014
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
1015 1016 1017 1018 1019 1020 1021 1022
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

Tomaz Canabrava's avatar
Tomaz Canabrava committed
1023 1024 1025 1026
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
    if(savePath.isEmpty()) {
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
        return;
1027
    }
Tomaz Canabrava's avatar
Tomaz Canabrava committed
1028 1029 1030
    _videoFile = savePath + "/"
                + (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
                + "." + kVideoExtensions[muxIdx];
1031 1032
    qDebug() << "New video file:" << _videoFile;

1033
    emit videoFileChanged();
1034

1035 1036 1037 1038 1039 1040 1041 1042 1043 1044
    _sink           = new Sink();
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", nullptr);
    _sink->filesink = _makeFileSink(_videoFile, muxIdx);
    _sink->removing = false;

    if(!_sink->teepad || !_sink->queue || !_sink->filesink) {
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }
1045 1046 1047 1048

    gst_object_ref(_sink->queue);
    gst_object_ref(_sink->filesink);

1049
    gst_bin_add(GST_BIN(_pipeline), _sink->queue);
1050 1051 1052

    gst_element_sync_state_with_parent(_sink->queue);

1053 1054 1055
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
1056 1057
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
1058
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
1059
    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffer
1060 1061 1062
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
1063 1064 1065 1066
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

1067 1068 1069 1070 1071 1072 1073
//    // Add the filesink once we have a valid I-frame
//    gst_bin_add(GST_BIN(_pipeline), _sink->filesink);
//    if (!gst_element_link(_sink->queue, _sink->filesink)) {
//        qCritical() << "Failed to link queue and file sink";
//    }
//    gst_element_sync_state_with_parent(_sink->filesink);

1074 1075
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

1076 1077 1078
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
1079 1080
#else
    Q_UNUSED(videoFile)
1081 1082 1083
#endif
}

1084 1085 1086
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
1087
{
1088
#if defined(QGC_GST_STREAMING)
1089 1090
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
1091
    if(_pipeline == nullptr || !_recording) {
1092 1093 1094 1095
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
1096
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
1097 1098 1099
#endif
}

1100
//-----------------------------------------------------------------------------
1101 1102 1103 1104 1105 1106
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
1107 1108
void
VideoReceiver::_shutdownRecordingBranch()
1109
{
1110 1111
    gst_bin_remove(GST_BIN(_pipeline), _sink->queue);
    gst_bin_remove(GST_BIN(_pipeline), _sink->filesink);
1112

1113
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
1114
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
1115 1116 1117 1118 1119

    gst_object_unref(_sink->queue);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
1120
    _sink = nullptr;
1121
    _recording = false;
1122

1123 1124 1125 1126 1127
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

1128
//-----------------------------------------------------------------------------
1129 1130 1131 1132 1133
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
1134
void
1135
VideoReceiver::_unlinkRecordingBranch(GstPadProbeInfo* info)
1136 1137 1138 1139
{
    Q_UNUSED(info)
    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
1140
    gst_pad_unlink(_sink->teepad, sinkpad);
1141 1142
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
1143 1144 1145 1146
    qCDebug(VideoReceiverLog) << "Recording EOS was sent";
    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);
1147 1148 1149
}
#endif

1150
//-----------------------------------------------------------------------------
1151
#if defined(QGC_GST_STREAMING)
1152 1153
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1154 1155
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1156 1157
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1158 1159
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
1160
            pThis->_unlinkRecordingBranch(info);
1161 1162
        }
    }
1163 1164 1165
    return GST_PAD_PROBE_REMOVE;
}
#endif
1166

1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
1187
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
1188 1189 1190
}
#endif

1191 1192 1193 1194 1195 1196
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1197
    if(info != nullptr && user_data != nullptr) {
1198 1199 1200 1201
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1202
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1203 1204

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
1205
            gst_pad_set_offset(pad, -buf->pts);
1206 1207

            // Add the filesink once we have a valid I-frame
1208 1209 1210 1211
            gst_bin_add(GST_BIN(pThis->_pipeline), pThis->_sink->filesink);
            if (!gst_element_link(pThis->_sink->queue, pThis->_sink->filesink)) {
                qCritical() << "Failed to link queue and file sink";
            }
1212 1213
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1214
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1215
            pThis->gotFirstRecordingKeyFrame();
1216 1217 1218 1219 1220 1221 1222
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1223 1224 1225 1226 1227
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1228 1229 1230 1231 1232 1233 1234 1235
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1236
        }
1237
    } else {
1238
        if(_videoRunning) {
1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1250
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1251 1252 1253 1254 1255 1256 1257 1258 1259 1260

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1261 1262 1263 1264 1265
        }
    }
#endif
}