VideoReceiver.cc 38.3 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65 66
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _pipelineStopRec(nullptr)
    , _videoSink(nullptr)
67 68
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
69
    , _restart_time_ms(1389)
70 71 72
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
73
    , _udpReconnect_us(5000000)
74
#endif
75 76
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
77
    , _videoSettings(nullptr)
Gus Grubba's avatar
Gus Grubba committed
78
{
Gus Grubba's avatar
Gus Grubba committed
79
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
80
#if defined(QGC_GST_STREAMING)
81 82
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
83 84
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
85 86 87
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
88 89
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
90
#endif
Gus Grubba's avatar
Gus Grubba committed
91 92 93 94
}

VideoReceiver::~VideoReceiver()
{
95
#if defined(QGC_GST_STREAMING)
96
    stop();
97
    setVideoSink(nullptr);
98
#endif
Gus Grubba's avatar
Gus Grubba committed
99 100
}

101 102 103 104 105 106 107 108 109
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
110
#if defined(QGC_GST_STREAMING)
111 112
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
113
{
114
    gchar* name = gst_pad_get_name(pad);
115
    //g_print("A new pad %s was created\n", name);
116 117
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
118
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
119
    g_free(description);
120 121
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
122 123 124 125
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}

126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
        qCritical() << "autoplugQueryCaps(): No sink pad found";
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
        qCritical() << "autoplugQueryContext(): No sink pad found";
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
    gchar* name = gst_pad_get_name(pad);

    GstPad* ghostpad = gst_ghost_pad_new(name, pad);

    g_free(name);

    gst_pad_set_active(ghostpad, TRUE);

    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
        qCritical() << "Failed to add ghost pad to source";
    }
}

static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
    gboolean isRtpPad = FALSE;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
        GstCaps* caps;

        if ((caps = gst_pad_query_caps(pad, filter)) && !gst_caps_is_empty(caps)) {
            qDebug() << gst_caps_to_string(caps);
            isRtpPad = TRUE;

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (isRtpPad) {
        GstElement* buffer;

239
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);

            gst_element_sync_state_with_parent(buffer);

            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");

            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);

                gst_object_unref(sinkpad);
                sinkpad = nullptr;

                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
                    qCritical() << "_wrapWithGhostPad partially failed. Error with gst_pad_link()";
                }
            } else {
                qCritical() << "_wrapWithGhostPad partially failed. Error with gst_element_get_static_pad()";
            }
        } else {
            qCritical() << "_wrapWithGhostPad partially failed. Error with gst_element_factory_make('rtpjitterbuffer')";
        }
    }

    newPadCB(element, pad, data);
}

static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
    int* probeRes = (int*)user_data;

    *probeRes |= 1;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
        GstCaps* caps;

        if ((caps = gst_pad_query_caps(pad, filter)) && !gst_caps_is_empty(caps)) {
            *probeRes |= 2;

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    return TRUE;
}

GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
    if (uri.isEmpty()) {
        qCritical() << "VideoReceiver::_makeSource() failed because URI is not specified";
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
                        qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_caps_from_string()";
                        break;
                    }
                } else if (isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
                        qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_caps_from_string()";
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
            qWarning() << "VideoReceiver::_makeSource(): URI is not recognized";
        }

        if (!source) {
            qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make() for data source";
            break;
        }

        if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
            qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('parsebin')";
            break;
        }

        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
            qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_bin_new('sourcebin')";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
378
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429
                    qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('rtpjitterbuffer')";
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
                    qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_link()";
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
                    qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_link()";
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

        source = buffer = parser = nullptr;

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

430 431
//-----------------------------------------------------------------------------
void
432
VideoReceiver::_restart_timeout()
433
{
434
    qgcApp()->toolbox()->videoManager()->restartVideo();
435
}
436
#endif
437

438 439 440 441 442 443
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
444 445 446
    delete _socket;
    _socket = nullptr;

447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

500
//-----------------------------------------------------------------------------
501 502 503 504 505 506 507 508
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
509 510
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
511
{
512
    qCDebug(VideoReceiverLog) << "start():" << _uri;
Gus Grubba's avatar
Gus Grubba committed
513
    if(qgcApp()->runningUnitTests()) {
514 515
        return;
    }
516 517
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
518 519 520
        qCDebug(VideoReceiverLog) << "start() but not enabled/configured";
        return;
    }
521

522
#if defined(QGC_GST_STREAMING)
523
    _stop = false;
524

525 526
    QString uri = _uri;

527
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
528
    //-- Taisync on iOS or Android sends a raw h.264 stream
529 530 531
    if (qgcApp()->toolbox()->videoManager()->isTaisync()) {
        uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
    }
532 533
#endif

534
    if (uri.isEmpty()) {
Gus Grubba's avatar
Gus Grubba committed
535 536 537
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
538 539 540

    bool useTcpConnection = uri.contains("rtsp://") || uri.contains("tcp://");

Gus Grubba's avatar
Gus Grubba committed
541
    if (_videoSink == nullptr) {
Gus Grubba's avatar
Gus Grubba committed
542 543 544
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
545 546 547 548
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
549

550
    _starting = true;
551

552
    //-- For RTSP and TCP, check to see if server is there first
553
    if(!_serverPresent && useTcpConnection) {
554 555 556 557
        _tcp_timer.start(100);
        return;
    }

558 559 560
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

561
    bool running    = false;
562
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
563

564 565 566
    GstElement* source  = nullptr;
    GstElement* queue   = nullptr;
    GstElement* decoder = nullptr;
567

Gus Grubba's avatar
Gus Grubba committed
568
    do {
Gus Grubba's avatar
Gus Grubba committed
569
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
570
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
571 572 573
            break;
        }

574 575
        if ((source = _makeSource(uri)) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with _makeSource()";
Gus Grubba's avatar
Gus Grubba committed
576 577 578
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
579
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
580 581 582
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
583

Gus Grubba's avatar
Gus Grubba committed
584
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
585 586
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
587
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
588 589
            break;
        }
590

591 592
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('decodebin')";
593 594 595
            break;
        }

596 597
        gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);

598
        pipelineUp = true;
599

600 601 602 603 604
        g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);

        if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
            qCritical() << "Unable to link UDP elements.";
            break;
605 606
        }

607 608 609
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

610
        source = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
611

Gus Grubba's avatar
Gus Grubba committed
612
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
613

Gus Grubba's avatar
Gus Grubba committed
614
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
615 616 617
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
618
            bus = nullptr;
619
        }
Gus Grubba's avatar
Gus Grubba committed
620

621
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
622 623 624 625 626 627 628
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

629
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
630
        if (_pipeline != nullptr) {
631
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
632
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
633 634
        }

635 636
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
637
            if (decoder != nullptr) {
638
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
639
                decoder = nullptr;
640
            }
Gus Grubba's avatar
Gus Grubba committed
641

642 643 644 645 646
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

647 648 649
            if (source != nullptr) {
                gst_object_unref(source);
                source = nullptr;
650
            }
651

Gus Grubba's avatar
Gus Grubba committed
652
            if (_tee != nullptr) {
653
                gst_object_unref(_tee);
654
                _tee = nullptr;
655
            }
656

Gus Grubba's avatar
Gus Grubba committed
657
        }
658 659 660

        _running = false;
    } else {
661
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
662 663
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
664
    }
665
    _starting = false;
666
#endif
Gus Grubba's avatar
Gus Grubba committed
667 668
}

669 670 671
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
672
{
673
    if(qgcApp() && qgcApp()->runningUnitTests()) {
674 675
        return;
    }
676
#if defined(QGC_GST_STREAMING)
677
    _stop = true;
678
    qCDebug(VideoReceiverLog) << "stop()";
679 680
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
681
    } else if (_pipeline != nullptr && !_stopping) {
682
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
683 684
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
685 686
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
687
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
688
        gst_object_unref(bus);
689 690 691 692 693 694
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
695
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
696
    }
697
#endif
Gus Grubba's avatar
Gus Grubba committed
698 699
}

700 701 702
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
703 704 705 706
{
    _uri = uri;
}

707
//-----------------------------------------------------------------------------
708
#if defined(QGC_GST_STREAMING)
709 710
void
VideoReceiver::_shutdownPipeline() {
711 712 713 714
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
715 716
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
717 718
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
719
        bus = nullptr;
720 721 722
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
723
    _pipeline = nullptr;
724
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
725
    _sink = nullptr;
726
    _serverPresent = false;
727 728 729 730 731 732
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
733
#endif
734

735
//-----------------------------------------------------------------------------
736
#if defined(QGC_GST_STREAMING)
737 738
void
VideoReceiver::_handleError() {
739
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
740
    stop();
741
    _restart_timer.start(_restart_time_ms);
742 743 744
}
#endif

745
//-----------------------------------------------------------------------------
746
#if defined(QGC_GST_STREAMING)
747 748
void
VideoReceiver::_handleEOS() {
749 750
    if(_stopping) {
        _shutdownPipeline();
751
        qCDebug(VideoReceiverLog) << "Stopped";
752 753 754
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
755
        qWarning() << "VideoReceiver: Unexpected EOS!";
756
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
757 758
    }
}
759
#endif
Gus Grubba's avatar
Gus Grubba committed
760

761
//-----------------------------------------------------------------------------
762
#if defined(QGC_GST_STREAMING)
763 764 765 766
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
767
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
768
    }
769 770 771
}
#endif

772
//-----------------------------------------------------------------------------
773
#if defined(QGC_GST_STREAMING)
774 775
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
776 777
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
778
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
779
    VideoReceiver* pThis = (VideoReceiver*)data;
780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
802 803
    return TRUE;
}
804
#endif
805

806
//-----------------------------------------------------------------------------
807
#if defined(QGC_GST_STREAMING)
808 809
void
VideoReceiver::_cleanupOldVideos()
810
{
811
    //-- Only perform cleanup if storage limit is enabled
812
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
813 814 815 816 817 818 819 820
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
821
        }
822 823 824 825 826 827
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
828
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
829 830 831 832 833 834 835 836 837 838 839 840
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
841 842 843
        }
    }
}
844
#endif
845

846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
            qCDebug(VideoReceiverLog) << "Unable to find sink pad of video sink";
        }
    }
}
#endif

878
//-----------------------------------------------------------------------------
879 880 881 882
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
883
//                         source-->tee
884 885 886 887 888 889
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
890
void
891
VideoReceiver::startRecording(const QString &videoFile)
892
{
893
#if defined(QGC_GST_STREAMING)
894

895 896
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
897
    if(_pipeline == nullptr || _recording) {
898 899 900 901
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

902
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
903 904 905 906 907 908 909 910
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

911
    _sink           = new Sink();
912
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
Gus Grubba's avatar
Gus Grubba committed
913 914 915
    _sink->queue    = gst_element_factory_make("queue", nullptr);
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], nullptr);
    _sink->filesink = gst_element_factory_make("filesink", nullptr);
916 917
    _sink->removing = false;

918
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink) {
919 920 921 922
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

923 924 925 926 927 928 929 930 931 932 933
    if(videoFile.isEmpty()) {
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        if(savePath.isEmpty()) {
            qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
            return;
        }
        _videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
    } else {
        _videoFile = videoFile;
    }
    emit videoFileChanged();
934

935
    g_object_set(static_cast<gpointer>(_sink->filesink), "location", qPrintable(_videoFile), nullptr);
936
    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
937 938 939 940 941

    gst_object_ref(_sink->queue);
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

942 943
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->mux, nullptr);
    gst_element_link_many(_sink->queue, _sink->mux, nullptr);
944 945 946 947

    gst_element_sync_state_with_parent(_sink->queue);
    gst_element_sync_state_with_parent(_sink->mux);

948 949 950
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
951 952
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
953
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
Gus Grubba's avatar
Gus Grubba committed
954
    gst_pad_add_probe(probepad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER /* | GST_PAD_PROBE_TYPE_BLOCK */), _keyframeWatch, this, nullptr); // to drop the buffer or to block the buffer?
955 956 957
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
958 959 960 961
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

962 963
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

964 965 966
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
967 968
#else
    Q_UNUSED(videoFile)
969 970 971
#endif
}

972 973 974
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
975
{
976
#if defined(QGC_GST_STREAMING)
977 978
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
979
    if(_pipeline == nullptr || !_recording) {
980 981 982 983
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
984
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
985 986 987
#endif
}

988
//-----------------------------------------------------------------------------
989 990 991 992 993 994
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
995 996
void
VideoReceiver::_shutdownRecordingBranch()
997 998 999 1000 1001 1002 1003
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
Gus Grubba's avatar
Gus Grubba committed
1004
    _pipelineStopRec = nullptr;
1005

1006 1007 1008
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
1009 1010 1011 1012 1013 1014

    gst_object_unref(_sink->queue);
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
1015
    _sink = nullptr;
1016
    _recording = false;
1017

1018 1019 1020 1021 1022
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

1023
//-----------------------------------------------------------------------------
1024 1025 1026 1027 1028
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
1029 1030
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
1031 1032 1033 1034
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
1035
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->mux, _sink->filesink, nullptr);
1036 1037 1038 1039 1040 1041 1042 1043 1044

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
1045 1046
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->mux, _sink->filesink, nullptr);
    gst_element_link_many(_sink->queue, _sink->mux, _sink->filesink, nullptr);
1047

1048 1049 1050
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
1051
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
1052
    gst_object_unref(bus);
1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

1066
//-----------------------------------------------------------------------------
1067
#if defined(QGC_GST_STREAMING)
1068 1069
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1070 1071
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1072 1073
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1074 1075 1076 1077 1078
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
1079 1080 1081
    return GST_PAD_PROBE_REMOVE;
}
#endif
1082

1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
1103
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
1104 1105 1106
}
#endif

1107 1108 1109 1110 1111 1112
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1113
    if(info != nullptr && user_data != nullptr) {
1114 1115 1116 1117
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1118
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!

            gint64 position;

            if (gst_element_query_position(pThis->_pipeline, GST_FORMAT_TIME, &position) != TRUE) {
                qCDebug(VideoReceiverLog) << "Unable to get timeline position, let's hope that downstream elements will survive";

                if (buf->pts != GST_CLOCK_TIME_NONE) {
                    position = buf->pts;
                } else {
                    position = gst_pad_get_offset(pad);
                }
            }

            gst_pad_set_offset(pad, position);
1135 1136 1137 1138 1139 1140

            // Add the filesink once we have a valid I-frame
            gst_bin_add_many(GST_BIN(pThis->_pipeline), pThis->_sink->filesink, nullptr);
            gst_element_link_many(pThis->_sink->mux, pThis->_sink->filesink, nullptr);
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1141
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1142
            pThis->gotFirstRecordingKeyFrame();
1143 1144 1145 1146 1147 1148 1149
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1150 1151 1152 1153 1154
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1155 1156 1157 1158 1159 1160 1161 1162
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1163
        }
1164
    } else {
1165
        if(_videoRunning) {
1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1177
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1178 1179 1180 1181 1182 1183 1184 1185 1186 1187

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1188 1189 1190 1191 1192
        }
    }
#endif
}