VideoReceiver.cc 37 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18
#include "VideoManager.h"
19 20 21
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
22
#include <QDebug>
23
#include <QUrl>
24 25
#include <QDir>
#include <QDateTime>
26
#include <QSysInfo>
27

28 29
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

30 31
#if defined(QGC_GST_STREAMING)

32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

48 49 50
#endif


Gus Grubba's avatar
Gus Grubba committed
51 52
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
53
#if defined(QGC_GST_STREAMING)
54
    , _running(false)
55
    , _recording(false)
56
    , _streaming(false)
57 58
    , _starting(false)
    , _stopping(false)
59
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
60 61 62 63
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _videoSink(nullptr)
64 65
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
66
    , _restart_time_ms(1389)
67
    , _udpReconnect_us(5000000)
68
#endif
69 70
    , _videoRunning(false)
    , _showFullScreen(false)
71 72 73 74
    , _streamEnabled(false)
    , _streamConfigured(false)
    , _unittTestMode(false)
    , _isTaisync(false)
Gus Grubba's avatar
Gus Grubba committed
75
{
76
    // FIXME: AV: temporal workaround to allow for Qt::QueuedConnection for gstreamer signals. Need to evaluate proper solution - perhaps QtGst will be helpful
77
#if defined(QGC_GST_STREAMING)
78
    _restart_timer.setSingleShot(true);
79
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::restartTimeout);
80 81 82
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError, Qt::QueuedConnection);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS, Qt::QueuedConnection);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged, Qt::QueuedConnection);
83 84
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
85
#endif
Gus Grubba's avatar
Gus Grubba committed
86 87 88 89
}

VideoReceiver::~VideoReceiver()
{
90
#if defined(QGC_GST_STREAMING)
91
    stop();
92
    setVideoSink(nullptr);
93
#endif
Gus Grubba's avatar
Gus Grubba committed
94 95
}

96 97 98 99 100 101 102 103 104
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
105
#if defined(QGC_GST_STREAMING)
106 107
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
108
{
109
    gchar* name = gst_pad_get_name(pad);
110
    //g_print("A new pad %s was created\n", name);
111 112
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
113
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
114
    g_free(description);
115 116
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
117
        qCCritical(VideoReceiverLog) << "Failed to link elements\n";
118 119 120
    g_free(name);
}

121 122 123 124 125 126 127 128
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
129
        qCCritical(VideoReceiverLog) << "No sink pad found";
130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
160
        qCCritical(VideoReceiverLog) << "No sink pad found";
161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

192 193 194 195 196 197 198 199 200 201 202 203 204
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
    gchar* name = gst_pad_get_name(pad);

    GstPad* ghostpad = gst_ghost_pad_new(name, pad);

    g_free(name);

    gst_pad_set_active(ghostpad, TRUE);

    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
205
        qCCritical(VideoReceiverLog) << "Failed to add ghost pad to source";
206 207 208 209 210 211 212 213 214 215 216
    }
}

static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
    gboolean isRtpPad = FALSE;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
217
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
218

219 220 221 222
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = TRUE;
            }
223 224 225 226 227 228 229 230 231 232 233
            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (isRtpPad) {
        GstElement* buffer;

234
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);

            gst_element_sync_state_with_parent(buffer);

            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");

            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);

                gst_object_unref(sinkpad);
                sinkpad = nullptr;

                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
251
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
252 253
                }
            } else {
254
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
255 256
            }
        } else {
257
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
        }
    }

    newPadCB(element, pad, data);
}

static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
    int* probeRes = (int*)user_data;

    *probeRes |= 1;

    GstCaps* filter = gst_caps_from_string("application/x-rtp");

    if (filter != nullptr) {
274
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
275

276 277 278 279
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }
280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    return TRUE;
}

GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
    if (uri.isEmpty()) {
296
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
332
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
333 334
                        break;
                    }
335
                } else if (isUdp265) {
336
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
337
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
338 339 340 341 342 343 344 345 346 347 348
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
349
            qCDebug(VideoReceiverLog) << "URI is not recognized";
350 351 352
        }

        if (!source) {
353
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
354 355 356
            break;
        }

357 358 359 360 361 362 363 364 365 366 367
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
        if (isTcpMPEGTS || isUdpMPEGTS) {
            if ((parser = gst_element_factory_make("tsdemux", "parser")) == nullptr) {
                qCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
                break;
            }
        } else {
            if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
                qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('parsebin')";
                break;
            }
368 369 370
        }

        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
371
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
372 373 374 375 376 377 378 379 380 381 382
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
383
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
384
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
385 386 387 388 389 390
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
391
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
392 393 394 395
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
396
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

        source = buffer = parser = nullptr;

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500
bool VideoReceiver::streamEnabled() const
{
    return _streamEnabled;
}

void VideoReceiver::setStreamEnabled(bool enabled)
{
    if (_streamEnabled != enabled) {
        _streamEnabled = enabled;
        emit streamEnabledChanged();
    }
}

bool VideoReceiver::streamConfigured() const
{
    return _streamConfigured;
}

void VideoReceiver::setStreamConfigured(bool enabled)
{
    if (_streamConfigured != enabled) {
        _streamConfigured = enabled;
        emit streamEnabledChanged();
    }
}

bool VideoReceiver::isTaisync() const
{
    return _isTaisync;
}

void VideoReceiver::setIsTaysinc(bool enabled)
{
    if (_isTaisync != enabled) {
        _isTaisync = enabled;
        emit isTaisyncChanged();
    }
}

QString VideoReceiver::videoPath() const
{
    return _videoPath;
}

void VideoReceiver::setVideoPath(const QString& value)
{
    if (_videoPath != value) {
        _videoPath = value;
        emit videoPathChanged();
    }
}

QString VideoReceiver::imagePath() const
{
    return _imagePath;
}

void VideoReceiver::setImagePath(const QString& value)
{
    if (_imagePath != value) {
        _imagePath = value;
        emit imagePathChanged();
    }
}

int VideoReceiver::recordingFormatId() const
501
{
502
    return _recordingFormatId;
503
}
504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530

void VideoReceiver::setRecordingFormatId(int value)
{
    if (_recordingFormatId != value && value < (int) NUM_MUXES) {
        _recordingFormatId = value;
        emit recordingFormatIdChanged();
    }
}

int VideoReceiver::rtspTimeout() const
{
    return _rtspTimeout;
}

void VideoReceiver::setRtspTimeout(int value)
{
    if (_rtspTimeout != value) {
        _rtspTimeout = value;
        emit rtspTimeoutChanged();
    }
}

void VideoReceiver::setUnittestMode(bool runUnitTests)
{
    _unittTestMode = runUnitTests;
}

531
#endif
532

533
//-----------------------------------------------------------------------------
534 535 536 537 538 539 540 541
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
542 543
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
544
{
545
    qCDebug(VideoReceiverLog) << "Starting " << _uri;
546
    if(_unittTestMode) {
547 548
        return;
    }
549
    if(!_streamEnabled || !_streamConfigured) {
550
        qCDebug(VideoReceiverLog) << "Stream not enabled/configured";
551 552
        return;
    }
553

554
#if defined(QGC_GST_STREAMING)
555
    _stop = false;
556

557 558
    QString uri = _uri;

559
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
560
    //-- Taisync on iOS or Android sends a raw h.264 stream
561
    if (_isTaisync) {
562 563
        uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
    }
564 565
#endif

566
    if (uri.isEmpty()) {
567
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
Gus Grubba's avatar
Gus Grubba committed
568 569
        return;
    }
570

Gus Grubba's avatar
Gus Grubba committed
571
    if (_videoSink == nullptr) {
572
        qCWarning(VideoReceiverLog) << "Failed because video sink is not set";
Gus Grubba's avatar
Gus Grubba committed
573 574
        return;
    }
575 576 577 578
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
579

580
    _starting = true;
581

582 583 584
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

585
    bool running    = false;
586
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
587

588 589 590
    GstElement* source  = nullptr;
    GstElement* queue   = nullptr;
    GstElement* decoder = nullptr;
591

Gus Grubba's avatar
Gus Grubba committed
592
    do {
Gus Grubba's avatar
Gus Grubba committed
593
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
594
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
Gus Grubba's avatar
Gus Grubba committed
595 596 597
            break;
        }

598 599
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);

600
        if ((source = _makeSource(uri)) == nullptr) {
601
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
Gus Grubba's avatar
Gus Grubba committed
602 603 604
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
605
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
606
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
607 608
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
609

Gus Grubba's avatar
Gus Grubba committed
610
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
611 612
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
613
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
614 615
            break;
        }
616

617
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
618
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
619 620 621
            break;
        }

622 623
        gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);

624
        pipelineUp = true;
625

626 627 628
        g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);

        if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
629
            qCCritical(VideoReceiverLog) << "Unable to receiver pipeline.";
630
            break;
631 632
        }

633 634 635
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

636
        source = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
637

Gus Grubba's avatar
Gus Grubba committed
638
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
639

Gus Grubba's avatar
Gus Grubba committed
640
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
641 642 643
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
644
            bus = nullptr;
645
        }
Gus Grubba's avatar
Gus Grubba committed
646

647
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
648 649 650 651 652
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (!running) {
653
        qCCritical(VideoReceiverLog) << "Failed";
Gus Grubba's avatar
Gus Grubba committed
654

655
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
656
        if (_pipeline != nullptr) {
657
            gst_bin_remove(GST_BIN(_pipeline), _videoSink);
658
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
659
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
660 661
        }

662 663
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
664
            if (decoder != nullptr) {
665
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
666
                decoder = nullptr;
667
            }
Gus Grubba's avatar
Gus Grubba committed
668

669 670 671 672 673
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

674 675 676
            if (source != nullptr) {
                gst_object_unref(source);
                source = nullptr;
677
            }
678

Gus Grubba's avatar
Gus Grubba committed
679
            if (_tee != nullptr) {
680
                gst_object_unref(_tee);
681
                _tee = nullptr;
682
            }
683

Gus Grubba's avatar
Gus Grubba committed
684
        }
685 686 687

        _running = false;
    } else {
688
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
689 690
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
691
    }
692
    _starting = false;
693
#endif
Gus Grubba's avatar
Gus Grubba committed
694 695
}

696 697 698
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
699
{
700
    if(_unittTestMode) {
701 702
        return;
    }
703
#if defined(QGC_GST_STREAMING)
704
    _stop = true;
705
    qCDebug(VideoReceiverLog) << "Stopping";
706 707
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
708
    } else if (_pipeline != nullptr && !_stopping) {
709
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
710 711
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
712 713
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
714
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
715
        gst_object_unref(bus);
716 717
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
718
            qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
719 720 721
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
722
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
723
    }
724
#endif
Gus Grubba's avatar
Gus Grubba committed
725 726
}

727 728 729
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
730 731 732 733
{
    _uri = uri;
}

734
//-----------------------------------------------------------------------------
735
#if defined(QGC_GST_STREAMING)
736 737
void
VideoReceiver::_shutdownPipeline() {
738 739 740 741
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
742 743
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
744 745
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
746
        bus = nullptr;
747 748
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
749 750
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    _tee = nullptr;
751
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
752
    _pipeline = nullptr;
753
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
754
    _sink = nullptr;
755 756 757 758 759 760
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
761
#endif
762

763
//-----------------------------------------------------------------------------
764
#if defined(QGC_GST_STREAMING)
765 766
void
VideoReceiver::_handleError() {
767
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
768
    stop();
769
    _restart_timer.start(_restart_time_ms);
770 771 772
}
#endif

773
//-----------------------------------------------------------------------------
774
#if defined(QGC_GST_STREAMING)
775 776
void
VideoReceiver::_handleEOS() {
777
    if(_stopping) {
778 779 780
        if(_recording && _sink->removing) {
            _shutdownRecordingBranch();
        }
781
        _shutdownPipeline();
782
        qCDebug(VideoReceiverLog) << "Stopped";
783 784 785
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
786
        qCWarning(VideoReceiverLog) << "Unexpected EOS!";
787
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
788 789
    }
}
790
#endif
Gus Grubba's avatar
Gus Grubba committed
791

792
//-----------------------------------------------------------------------------
793
#if defined(QGC_GST_STREAMING)
794 795 796 797
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
798
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
799
    }
800 801 802
}
#endif

803
//-----------------------------------------------------------------------------
804
#if defined(QGC_GST_STREAMING)
805 806
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
807 808
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
809
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
810
    VideoReceiver* pThis = (VideoReceiver*)data;
811 812 813 814 815 816 817

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
818
        qCCritical(VideoReceiverLog) << error->message;
819 820 821 822 823 824 825 826 827 828
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844
    case(GST_MESSAGE_ELEMENT): {
        const GstStructure *s = gst_message_get_structure (msg);

        if (gst_structure_has_name (s, "GstBinForwarded")) {
            GstMessage *forward_msg = NULL;
            gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
            if (forward_msg != nullptr) {
                if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
                    pThis->msgEOSReceived();
                }
                gst_message_unref(forward_msg);
                forward_msg = nullptr;
            }
        }
    }
        break;
845 846 847 848
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
849 850
    return TRUE;
}
851
#endif
852

853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
879
            qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
880 881 882 883 884
        }
    }
}
#endif

885
//-----------------------------------------------------------------------------
886 887 888 889
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
890
//                         source-->tee
891
//                                   |
Andrew Voznytsa's avatar
Andrew Voznytsa committed
892 893 894 895 896
//                                   |    +---------_sink----------+
//                                   |    |                        |
//   we are adding these elements->  +->teepad-->queue-->_filesink |
//                                        |                        |
//                                        +------------------------+
897
#if defined(QGC_GST_STREAMING)
898 899 900 901 902 903 904 905 906 907
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, unsigned format)
{
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;

    do{
Andrew Voznytsa's avatar
Andrew Voznytsa committed
908
        if ((mux = gst_element_factory_make(kVideoMuxes[format], nullptr)) == nullptr) {
909
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << kVideoMuxes[format] << "') failed";
910 911 912
            break;
        }

Andrew Voznytsa's avatar
Andrew Voznytsa committed
913
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
914
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
915 916 917 918 919 920
            break;
        }

        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);

        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
921
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
922 923 924 925 926 927
            break;
        }

        GstPadTemplate* padTemplate;

        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
928
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
929 930 931 932 933 934 935
            break;
        }

        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
936
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
937 938 939 940 941 942 943 944 945 946 947 948 949 950 951
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
952
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
    }

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    return fileSink;
}
979
#endif
980

981
void
982
VideoReceiver::startRecording(const QString &videoFile)
983
{
984
#if defined(QGC_GST_STREAMING)
985
    emit beforeRecording();
986

987
    qCDebug(VideoReceiverLog) << "Starting recording";
988
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
989
    if(_pipeline == nullptr || _recording) {
990 991 992 993
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

994
    uint32_t muxIdx = _recordingFormatId;
995
    if(muxIdx >= NUM_MUXES) {
996
        emit sendMessage(tr("Invalid video format defined."));
997 998 999
        return;
    }

1000
    QString savePath = _videoPath;
Tomaz Canabrava's avatar
Tomaz Canabrava committed
1001
    if(savePath.isEmpty()) {
1002
        emit sendMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
Tomaz Canabrava's avatar
Tomaz Canabrava committed
1003
        return;
1004
    }
1005

Tomaz Canabrava's avatar
Tomaz Canabrava committed
1006 1007 1008
    _videoFile = savePath + "/"
                + (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
                + "." + kVideoExtensions[muxIdx];
1009 1010

    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
1011

1012
    emit videoFileChanged();
1013

1014 1015 1016 1017 1018 1019 1020
    _sink           = new Sink();
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", nullptr);
    _sink->filesink = _makeFileSink(_videoFile, muxIdx);
    _sink->removing = false;

    if(!_sink->teepad || !_sink->queue || !_sink->filesink) {
1021
        qCCritical(VideoReceiverLog) << "Failed to make _sink elements";
1022 1023
        return;
    }
1024 1025 1026 1027

    gst_object_ref(_sink->queue);
    gst_object_ref(_sink->filesink);

1028
    gst_bin_add(GST_BIN(_pipeline), _sink->queue);
1029 1030 1031

    gst_element_sync_state_with_parent(_sink->queue);

1032 1033 1034
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
1035 1036
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
1037
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
1038
    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffer
1039 1040 1041
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
1042 1043 1044 1045
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

1046 1047 1048 1049 1050 1051 1052
//    // Add the filesink once we have a valid I-frame
//    gst_bin_add(GST_BIN(_pipeline), _sink->filesink);
//    if (!gst_element_link(_sink->queue, _sink->filesink)) {
//        qCritical() << "Failed to link queue and file sink";
//    }
//    gst_element_sync_state_with_parent(_sink->filesink);

1053 1054
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

1055 1056 1057
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
1058 1059
#else
    Q_UNUSED(videoFile)
1060 1061 1062
#endif
}

1063 1064 1065
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
1066
{
1067
#if defined(QGC_GST_STREAMING)
1068
    qCDebug(VideoReceiverLog) << "Stopping recording";
1069
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
1070
    if(_pipeline == nullptr || !_recording) {
1071 1072 1073 1074
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
1075
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
1076 1077 1078
#endif
}

1079
//-----------------------------------------------------------------------------
1080 1081 1082 1083 1084 1085
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
1086 1087
void
VideoReceiver::_shutdownRecordingBranch()
1088
{
1089 1090
    gst_bin_remove(GST_BIN(_pipeline), _sink->queue);
    gst_bin_remove(GST_BIN(_pipeline), _sink->filesink);
1091

1092
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
1093
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
1094 1095 1096 1097 1098

    gst_object_unref(_sink->queue);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
1099
    _sink = nullptr;
1100
    _recording = false;
1101

1102
    emit recordingChanged();
1103
    qCDebug(VideoReceiverLog) << "Recording stopped";
1104 1105 1106
}
#endif

1107
//-----------------------------------------------------------------------------
1108 1109 1110 1111 1112
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
1113
void
1114
VideoReceiver::_unlinkRecordingBranch(GstPadProbeInfo* info)
1115 1116 1117 1118
{
    Q_UNUSED(info)
    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
1119
    gst_pad_unlink(_sink->teepad, sinkpad);
1120 1121
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
1122 1123 1124 1125
    qCDebug(VideoReceiverLog) << "Recording EOS was sent";
    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);
1126 1127 1128
}
#endif

1129
//-----------------------------------------------------------------------------
1130
#if defined(QGC_GST_STREAMING)
1131 1132
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1133 1134
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1135 1136
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1137 1138
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
1139
            pThis->_unlinkRecordingBranch(info);
1140 1141
        }
    }
1142 1143 1144
    return GST_PAD_PROBE_REMOVE;
}
#endif
1145

1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
1166
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
1167 1168 1169
}
#endif

1170 1171 1172 1173 1174 1175
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1176
    if(info != nullptr && user_data != nullptr) {
1177 1178 1179 1180
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1181
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1182 1183

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
1184
            gst_pad_set_offset(pad, -buf->pts);
1185 1186

            // Add the filesink once we have a valid I-frame
1187 1188
            gst_bin_add(GST_BIN(pThis->_pipeline), pThis->_sink->filesink);
            if (!gst_element_link(pThis->_sink->queue, pThis->_sink->filesink)) {
1189
                qCCritical(VideoReceiverLog) << "Failed to link queue and file sink";
1190
            }
1191 1192
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1193
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1194
            pThis->gotFirstRecordingKeyFrame();
1195 1196 1197 1198 1199 1200 1201
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1202 1203 1204 1205 1206
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1207 1208 1209 1210 1211 1212 1213 1214
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1215
        }
1216
    } else {
1217
        if(_videoRunning) {
1218 1219 1220 1221 1222 1223
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
1224
        uint32_t timeout = _rtspTimeout;
1225
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1226 1227 1228 1229 1230 1231 1232 1233

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
1234
        if(!_stop && !_running && !_uri.isEmpty() && _streamEnabled) {
1235
            start();
1236 1237 1238 1239 1240
        }
    }
#endif
}