VideoReceiver.cc 37.6 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _videoSink(nullptr)
66 67
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
68
    , _restart_time_ms(1389)
69
    , _udpReconnect_us(5000000)
70
#endif
71 72
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
73
    , _videoSettings(nullptr)
Gus Grubba's avatar
Gus Grubba committed
74
{
75 76
    // FIXME: AV: temporal workaround to allow for Qt::QueuedConnection for gstreamer signals. Need to evaluate proper solution - perhaps QtGst will be helpful
    moveToThread(qgcApp()->thread());
Gus Grubba's avatar
Gus Grubba committed
77
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
78
#if defined(QGC_GST_STREAMING)
79 80
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
81 82 83
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError, Qt::QueuedConnection);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS, Qt::QueuedConnection);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged, Qt::QueuedConnection);
84 85
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
86
#endif
Gus Grubba's avatar
Gus Grubba committed
87 88 89 90
}

VideoReceiver::~VideoReceiver()
{
91
#if defined(QGC_GST_STREAMING)
92
    stop();
93
    setVideoSink(nullptr);
94
#endif
Gus Grubba's avatar
Gus Grubba committed
95 96
}

97 98 99 100 101 102 103 104 105
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
106
#if defined(QGC_GST_STREAMING)
107 108
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
109
{
110
    gchar* name = gst_pad_get_name(pad);
111
    //g_print("A new pad %s was created\n", name);
112 113
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
114
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
115
    g_free(description);
116 117
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
118
        qCCritical(VideoReceiverLog) << "Failed to link elements\n";
119 120 121
    g_free(name);
}

122 123 124 125 126 127 128 129
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
130
        qCCritical(VideoReceiverLog) << "No sink pad found";
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
161
        qCCritical(VideoReceiverLog) << "No sink pad found";
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

193 194 195 196 197 198 199 200 201 202 203 204 205
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
    gchar* name = gst_pad_get_name(pad);

    GstPad* ghostpad = gst_ghost_pad_new(name, pad);

    g_free(name);

    gst_pad_set_active(ghostpad, TRUE);

    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
206
        qCCritical(VideoReceiverLog) << "Failed to add ghost pad to source";
207 208 209 210 211 212 213 214 215 216 217
    }
}

static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
    gboolean isRtpPad = FALSE;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
218
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
219

220 221 222 223
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = TRUE;
            }
224 225 226 227 228 229 230 231 232 233 234
            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (isRtpPad) {
        GstElement* buffer;

235
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);

            gst_element_sync_state_with_parent(buffer);

            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");

            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);

                gst_object_unref(sinkpad);
                sinkpad = nullptr;

                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
252
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
253 254
                }
            } else {
255
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
256 257
            }
        } else {
258
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274
        }
    }

    newPadCB(element, pad, data);
}

static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
    int* probeRes = (int*)user_data;

    *probeRes |= 1;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
275
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
276

277 278 279 280
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }
281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    return TRUE;
}

GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
    if (uri.isEmpty()) {
297
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
333
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
334 335 336 337
                        break;
                    }
                } else if (isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
338
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
339 340 341 342 343 344 345 346 347 348 349
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
350
            qCDebug(VideoReceiverLog) << "URI is not recognized";
351 352 353
        }

        if (!source) {
354
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
355 356 357
            break;
        }

358 359 360 361 362 363 364 365 366 367 368
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
        if (isTcpMPEGTS || isUdpMPEGTS) {
            if ((parser = gst_element_factory_make("tsdemux", "parser")) == nullptr) {
                qCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
                break;
            }
        } else {
            if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
                qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('parsebin')";
                break;
            }
369 370 371
        }

        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
372
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
373 374 375 376 377 378 379 380 381 382 383
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
384
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
385
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
386 387 388 389 390 391
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
392
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
393 394 395 396
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
397
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

        source = buffer = parser = nullptr;

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

436 437
//-----------------------------------------------------------------------------
void
438
VideoReceiver::_restart_timeout()
439
{
440
    qgcApp()->toolbox()->videoManager()->restartVideo();
441
}
442
#endif
443

444
//-----------------------------------------------------------------------------
445 446 447 448 449 450 451 452
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
453 454
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
455
{
456
    qCDebug(VideoReceiverLog) << "Starting " << _uri;
Gus Grubba's avatar
Gus Grubba committed
457
    if(qgcApp()->runningUnitTests()) {
458 459
        return;
    }
460 461
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
462
        qCDebug(VideoReceiverLog) << "Stream not enabled/configured";
463 464
        return;
    }
465

466
#if defined(QGC_GST_STREAMING)
467
    _stop = false;
468

469 470
    QString uri = _uri;

471
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
472
    //-- Taisync on iOS or Android sends a raw h.264 stream
473 474 475
    if (qgcApp()->toolbox()->videoManager()->isTaisync()) {
        uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
    }
476 477
#endif

478
    if (uri.isEmpty()) {
479
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
Gus Grubba's avatar
Gus Grubba committed
480 481
        return;
    }
482

Gus Grubba's avatar
Gus Grubba committed
483
    if (_videoSink == nullptr) {
484
        qCWarning(VideoReceiverLog) << "Failed because video sink is not set";
Gus Grubba's avatar
Gus Grubba committed
485 486
        return;
    }
487 488 489 490
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
491

492
    _starting = true;
493

494 495 496
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

497
    bool running    = false;
498
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
499

500 501 502
    GstElement* source  = nullptr;
    GstElement* queue   = nullptr;
    GstElement* decoder = nullptr;
503

Gus Grubba's avatar
Gus Grubba committed
504
    do {
Gus Grubba's avatar
Gus Grubba committed
505
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
506
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
Gus Grubba's avatar
Gus Grubba committed
507 508 509
            break;
        }

510 511
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);

512
        if ((source = _makeSource(uri)) == nullptr) {
513
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
Gus Grubba's avatar
Gus Grubba committed
514 515 516
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
517
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
518
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
519 520
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
521

Gus Grubba's avatar
Gus Grubba committed
522
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
523 524
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
525
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
526 527
            break;
        }
528

529
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
530
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
531 532 533
            break;
        }

534 535
        gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);

536
        pipelineUp = true;
537

538 539 540
        g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);

        if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
541
            qCCritical(VideoReceiverLog) << "Unable to receiver pipeline.";
542
            break;
543 544
        }

545 546 547
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

548
        source = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
549

Gus Grubba's avatar
Gus Grubba committed
550
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
551

Gus Grubba's avatar
Gus Grubba committed
552
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
553 554 555
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
556
            bus = nullptr;
557
        }
Gus Grubba's avatar
Gus Grubba committed
558

559
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
560 561 562 563 564
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (!running) {
565
        qCCritical(VideoReceiverLog) << "Failed";
Gus Grubba's avatar
Gus Grubba committed
566

567
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
568
        if (_pipeline != nullptr) {
569
            gst_bin_remove(GST_BIN(_pipeline), _videoSink);
570
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
571
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
572 573
        }

574 575
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
576
            if (decoder != nullptr) {
577
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
578
                decoder = nullptr;
579
            }
Gus Grubba's avatar
Gus Grubba committed
580

581 582 583 584 585
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

586 587 588
            if (source != nullptr) {
                gst_object_unref(source);
                source = nullptr;
589
            }
590

Gus Grubba's avatar
Gus Grubba committed
591
            if (_tee != nullptr) {
592
                gst_object_unref(_tee);
593
                _tee = nullptr;
594
            }
595

Gus Grubba's avatar
Gus Grubba committed
596
        }
597 598 599

        _running = false;
    } else {
600
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
601 602
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
603
    }
604
    _starting = false;
605
#endif
Gus Grubba's avatar
Gus Grubba committed
606 607
}

608 609 610
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
611
{
612
    if(qgcApp() && qgcApp()->runningUnitTests()) {
613 614
        return;
    }
615
#if defined(QGC_GST_STREAMING)
616
    _stop = true;
617
    qCDebug(VideoReceiverLog) << "Stopping";
618 619
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
620
    } else if (_pipeline != nullptr && !_stopping) {
621
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
622 623
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
624 625
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
626
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
627
        gst_object_unref(bus);
628 629
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
630
            qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
631 632 633
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
634
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
635
    }
636
#endif
Gus Grubba's avatar
Gus Grubba committed
637 638
}

639 640 641
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
642 643 644 645
{
    _uri = uri;
}

646
//-----------------------------------------------------------------------------
647
#if defined(QGC_GST_STREAMING)
648 649
void
VideoReceiver::_shutdownPipeline() {
650 651 652 653
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
654 655
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
656 657
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
658
        bus = nullptr;
659 660
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
661 662
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    _tee = nullptr;
663
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
664
    _pipeline = nullptr;
665
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
666
    _sink = nullptr;
667 668 669 670 671 672
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
673
#endif
674

675
//-----------------------------------------------------------------------------
676
#if defined(QGC_GST_STREAMING)
677 678
void
VideoReceiver::_handleError() {
679
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
680
    stop();
681
    _restart_timer.start(_restart_time_ms);
682 683 684
}
#endif

685
//-----------------------------------------------------------------------------
686
#if defined(QGC_GST_STREAMING)
687 688
void
VideoReceiver::_handleEOS() {
689
    if(_stopping) {
690 691 692
        if(_recording && _sink->removing) {
            _shutdownRecordingBranch();
        }
693
        _shutdownPipeline();
694
        qCDebug(VideoReceiverLog) << "Stopped";
695 696 697
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
698
        qCWarning(VideoReceiverLog) << "Unexpected EOS!";
699
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
700 701
    }
}
702
#endif
Gus Grubba's avatar
Gus Grubba committed
703

704
//-----------------------------------------------------------------------------
705
#if defined(QGC_GST_STREAMING)
706 707 708 709
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
710
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
711
    }
712 713 714
}
#endif

715
//-----------------------------------------------------------------------------
716
#if defined(QGC_GST_STREAMING)
717 718
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
719 720
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
721
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
722
    VideoReceiver* pThis = (VideoReceiver*)data;
723 724 725 726 727 728 729

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
730
        qCCritical(VideoReceiverLog) << error->message;
731 732 733 734 735 736 737 738 739 740
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756
    case(GST_MESSAGE_ELEMENT): {
        const GstStructure *s = gst_message_get_structure (msg);

        if (gst_structure_has_name (s, "GstBinForwarded")) {
            GstMessage *forward_msg = NULL;
            gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
            if (forward_msg != nullptr) {
                if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
                    pThis->msgEOSReceived();
                }
                gst_message_unref(forward_msg);
                forward_msg = nullptr;
            }
        }
    }
        break;
757 758 759 760
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
761 762
    return TRUE;
}
763
#endif
764

765
//-----------------------------------------------------------------------------
766
#if defined(QGC_GST_STREAMING)
767 768
void
VideoReceiver::_cleanupOldVideos()
769
{
770
    //-- Only perform cleanup if storage limit is enabled
771
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
772 773 774 775 776 777 778 779
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
780
        }
781 782 783 784 785 786
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
787
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
788 789 790 791 792 793 794 795 796 797 798 799
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
800 801 802
        }
    }
}
803
#endif
804

805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
831
            qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
832 833 834 835 836
        }
    }
}
#endif

837
//-----------------------------------------------------------------------------
838 839 840 841
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
842
//                         source-->tee
843
//                                   |
Andrew Voznytsa's avatar
Andrew Voznytsa committed
844 845 846 847 848
//                                   |    +---------_sink----------+
//                                   |    |                        |
//   we are adding these elements->  +->teepad-->queue-->_filesink |
//                                        |                        |
//                                        +------------------------+
849
#if defined(QGC_GST_STREAMING)
850 851 852 853 854 855 856 857 858 859
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, unsigned format)
{
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;

    do{
Andrew Voznytsa's avatar
Andrew Voznytsa committed
860
        if ((mux = gst_element_factory_make(kVideoMuxes[format], nullptr)) == nullptr) {
861
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << kVideoMuxes[format] << "') failed";
862 863 864
            break;
        }

Andrew Voznytsa's avatar
Andrew Voznytsa committed
865
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
866
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
867 868 869 870 871 872
            break;
        }

        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);

        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
873
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
874 875 876 877 878 879
            break;
        }

        GstPadTemplate* padTemplate;

        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
880
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
881 882 883 884 885 886 887
            break;
        }

        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
888
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
889 890 891 892 893 894 895 896 897 898 899 900 901 902 903
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
904
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
    }

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    return fileSink;
}
931
#endif
932

933
void
934
VideoReceiver::startRecording(const QString &videoFile)
935
{
936
#if defined(QGC_GST_STREAMING)
937

938
    qCDebug(VideoReceiverLog) << "Starting recording";
939
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
940
    if(_pipeline == nullptr || _recording) {
941 942 943 944
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

945
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
946 947 948 949 950 951 952 953
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

Tomaz Canabrava's avatar
Tomaz Canabrava committed
954 955 956 957
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
    if(savePath.isEmpty()) {
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
        return;
958
    }
959

Tomaz Canabrava's avatar
Tomaz Canabrava committed
960 961 962
    _videoFile = savePath + "/"
                + (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
                + "." + kVideoExtensions[muxIdx];
963 964

    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
965

966
    emit videoFileChanged();
967

968 969 970 971 972 973 974
    _sink           = new Sink();
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", nullptr);
    _sink->filesink = _makeFileSink(_videoFile, muxIdx);
    _sink->removing = false;

    if(!_sink->teepad || !_sink->queue || !_sink->filesink) {
975
        qCCritical(VideoReceiverLog) << "Failed to make _sink elements";
976 977
        return;
    }
978 979 980 981

    gst_object_ref(_sink->queue);
    gst_object_ref(_sink->filesink);

982
    gst_bin_add(GST_BIN(_pipeline), _sink->queue);
983 984 985

    gst_element_sync_state_with_parent(_sink->queue);

986 987 988
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
989 990
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
991
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
992
    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffer
993 994 995
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
996 997 998 999
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

1000 1001 1002 1003 1004 1005 1006
//    // Add the filesink once we have a valid I-frame
//    gst_bin_add(GST_BIN(_pipeline), _sink->filesink);
//    if (!gst_element_link(_sink->queue, _sink->filesink)) {
//        qCritical() << "Failed to link queue and file sink";
//    }
//    gst_element_sync_state_with_parent(_sink->filesink);

1007 1008
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

1009 1010 1011
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
1012 1013
#else
    Q_UNUSED(videoFile)
1014 1015 1016
#endif
}

1017 1018 1019
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
1020
{
1021
#if defined(QGC_GST_STREAMING)
1022
    qCDebug(VideoReceiverLog) << "Stopping recording";
1023
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
1024
    if(_pipeline == nullptr || !_recording) {
1025 1026 1027 1028
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
1029
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
1030 1031 1032
#endif
}

1033
//-----------------------------------------------------------------------------
1034 1035 1036 1037 1038 1039
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
1040 1041
void
VideoReceiver::_shutdownRecordingBranch()
1042
{
1043 1044
    gst_bin_remove(GST_BIN(_pipeline), _sink->queue);
    gst_bin_remove(GST_BIN(_pipeline), _sink->filesink);
1045

1046
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
1047
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
1048 1049 1050 1051 1052

    gst_object_unref(_sink->queue);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
1053
    _sink = nullptr;
1054
    _recording = false;
1055

1056
    emit recordingChanged();
1057
    qCDebug(VideoReceiverLog) << "Recording stopped";
1058 1059 1060
}
#endif

1061
//-----------------------------------------------------------------------------
1062 1063 1064 1065 1066
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
1067
void
1068
VideoReceiver::_unlinkRecordingBranch(GstPadProbeInfo* info)
1069 1070 1071 1072
{
    Q_UNUSED(info)
    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
1073
    gst_pad_unlink(_sink->teepad, sinkpad);
1074 1075
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
1076 1077 1078 1079
    qCDebug(VideoReceiverLog) << "Recording EOS was sent";
    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);
1080 1081 1082
}
#endif

1083
//-----------------------------------------------------------------------------
1084
#if defined(QGC_GST_STREAMING)
1085 1086
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1087 1088
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1089 1090
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1091 1092
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
1093
            pThis->_unlinkRecordingBranch(info);
1094 1095
        }
    }
1096 1097 1098
    return GST_PAD_PROBE_REMOVE;
}
#endif
1099

1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
1120
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
1121 1122 1123
}
#endif

1124 1125 1126 1127 1128 1129
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1130
    if(info != nullptr && user_data != nullptr) {
1131 1132 1133 1134
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1135
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1136 1137

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
1138
            gst_pad_set_offset(pad, -buf->pts);
1139 1140

            // Add the filesink once we have a valid I-frame
1141 1142
            gst_bin_add(GST_BIN(pThis->_pipeline), pThis->_sink->filesink);
            if (!gst_element_link(pThis->_sink->queue, pThis->_sink->filesink)) {
1143
                qCCritical(VideoReceiverLog) << "Failed to link queue and file sink";
1144
            }
1145 1146
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1147
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1148
            pThis->gotFirstRecordingKeyFrame();
1149 1150 1151 1152 1153 1154 1155
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1156 1157 1158 1159 1160
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1161 1162 1163 1164 1165 1166 1167 1168
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1169
        }
1170
    } else {
1171
        if(_videoRunning) {
1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1183
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1184 1185 1186 1187 1188 1189 1190 1191 1192 1193

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1194 1195 1196 1197 1198
        }
    }
#endif
}