GstVideoReceiver.cc 41.9 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16
 */

17
#include "GstVideoReceiver.h"
18

Gus Grubba's avatar
Gus Grubba committed
19
#include <QDebug>
20
#include <QUrl>
21
#include <QDateTime>
22
#include <QSysInfo>
23

24 25
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

26 27 28 29 30 31 32 33 34
//-----------------------------------------------------------------------------
// Our pipeline look like this:
//
//              +-->queue-->_decoderValve[-->_decoder-->_videoSink]
//              |
// _source-->_tee
//              |
//              +-->queue-->_recorderValve[-->_fileSink]
//
35

36 37
GstVideoReceiver::GstVideoReceiver(QObject* parent)
    : VideoReceiver(parent)
38 39 40
    , _removingDecoder(false)
    , _removingRecorder(false)
    , _source(nullptr)
Gus Grubba's avatar
Gus Grubba committed
41
    , _tee(nullptr)
42 43 44
    , _decoderValve(nullptr)
    , _recorderValve(nullptr)
    , _decoder(nullptr)
Gus Grubba's avatar
Gus Grubba committed
45
    , _videoSink(nullptr)
46 47 48 49 50 51
    , _fileSink(nullptr)
    , _pipeline(nullptr)
    , _lastSourceFrameTime(0)
    , _lastVideoFrameTime(0)
    , _resetVideoSink(true)
    , _videoSinkProbeId(0)
52
    , _udpReconnect_us(5000000)
53
    , _endOfStream(false)
Gus Grubba's avatar
Gus Grubba committed
54
{
55 56 57
    _apiHandler.start();
    _notificationHandler.start();
    connect(&_watchdogTimer, &QTimer::timeout, this, &GstVideoReceiver::_watchdog);
58
    _watchdogTimer.start(1000);
Gus Grubba's avatar
Gus Grubba committed
59 60
}

61
GstVideoReceiver::~GstVideoReceiver(void)
Gus Grubba's avatar
Gus Grubba committed
62
{
63
    stop();
64 65
    _notificationHandler.shutdown();
    _apiHandler.shutdown();
Gus Grubba's avatar
Gus Grubba committed
66 67
}

68
void
69
GstVideoReceiver::start(const QString& uri, unsigned timeout)
70
{
71 72
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this, uri, timeout]() {
73 74 75 76
            start(uri, timeout);
        });
        return;
    }
77

78 79 80 81
    if(_pipeline) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
82

83 84 85
    if (uri.isEmpty()) {
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
        return;
86 87
    }

88
    qCDebug(VideoReceiverLog) << "Starting";
89

90 91
    _endOfStream = false;

92
    _timeout = timeout;
93

94 95
    bool running    = false;
    bool pipelineUp = false;
96

97 98
    GstElement* decoderQueue = nullptr;
    GstElement* recorderQueue = nullptr;
99

100 101 102 103 104
    do {
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
            break;
        }
105

106
        GstPad* pad;
107

108 109 110 111
        if ((pad = gst_element_get_static_pad(_tee, "sink")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
            break;
        }
112

113
        _lastSourceFrameTime = 0;
114

115 116 117
        gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, _teeProbe, this, nullptr);
        gst_object_unref(pad);
        pad = nullptr;
118

119 120 121 122
        if((decoderQueue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
            break;
        }
123

124 125 126 127
        if((_decoderValve = gst_element_factory_make("valve", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('valve') failed";
            break;
        }
128

129
        g_object_set(_decoderValve, "drop", TRUE, nullptr);
130

131 132 133 134
        if((recorderQueue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
            break;
        }
135

136 137 138 139
        if((_recorderValve = gst_element_factory_make("valve", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('valve') failed";
            break;
        }
140

141
        g_object_set(_recorderValve, "drop", TRUE, nullptr);
142

143 144 145 146
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
            break;
        }
147

148
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);
149

150 151 152 153
        if ((_source = _makeSource(uri)) == nullptr) {
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
            break;
        }
154

155
        g_signal_connect(_source, "pad-added", G_CALLBACK(_onNewPad), this);
156

157
        gst_bin_add_many(GST_BIN(_pipeline), _source, _tee, decoderQueue, _decoderValve, recorderQueue, _recorderValve, nullptr);
158

159
        pipelineUp = true;
160

161 162 163 164
        if(!gst_element_link_many(_tee, decoderQueue, _decoderValve, nullptr)) {
            qCCritical(VideoReceiverLog) << "Unable to link decoder queue";
            break;
        }
165

166 167 168 169
        if(!gst_element_link_many(_tee, recorderQueue, _recorderValve, nullptr)) {
            qCCritical(VideoReceiverLog) << "Unable to link recorder queue";
            break;
        }
170

171
        GstBus* bus = nullptr;
172

173 174 175 176 177 178
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = nullptr;
        }
179

180 181 182 183 184 185 186 187 188 189 190 191
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-initial");
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;
    } while(0);

    if (!running) {
        qCCritical(VideoReceiverLog) << "Failed";

        // In newer versions, the pipeline will clean up all references that are added to it
        if (_pipeline != nullptr) {
            gst_element_set_state(_pipeline, GST_STATE_NULL);
            gst_object_unref(_pipeline);
            _pipeline = nullptr;
192 193
        }

194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
            if (_recorderValve != nullptr) {
                gst_object_unref(_recorderValve);
                _recorderValve = nullptr;
            }

            if (recorderQueue != nullptr) {
                gst_object_unref(recorderQueue);
                recorderQueue = nullptr;
            }

            if (_decoderValve != nullptr) {
                gst_object_unref(_decoderValve);
                _decoderValve = nullptr;
            }

            if (decoderQueue != nullptr) {
                gst_object_unref(decoderQueue);
                decoderQueue = nullptr;
            }

            if (_tee != nullptr) {
                gst_object_unref(_tee);
                _tee = nullptr;
            }

            if (_source != nullptr) {
                gst_object_unref(_source);
                _source = nullptr;
            }
        }
    } else {
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
        qCDebug(VideoReceiverLog) << "Started";
229
    }
230
}
231

232
void
233
GstVideoReceiver::stop(void)
234
{
235 236
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this]() {
237 238 239 240
            stop();
        });
        return;
    }
241

242
    qCDebug(VideoReceiverLog) << "Stopping";
243

244 245
    if (_pipeline != nullptr) {
        GstBus* bus;
246

247 248
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
            gst_bus_disable_sync_message_emission(bus);
249

250
            gst_element_send_event(_pipeline, gst_event_new_eos());
251

252
            GstMessage* msg;
253

254 255 256 257 258
            if((msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR))) != nullptr) {
                if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
                    qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
                } else if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS) {
                    qCDebug(VideoReceiverLog) << "End of stream received!";
259
                }
260 261 262

                gst_message_unref(msg);
                msg = nullptr;
263
            } else {
264
                qCCritical(VideoReceiverLog) << "gst_bus_timed_pop_filtered() failed";
265
            }
266 267 268

            gst_object_unref(bus);
            bus = nullptr;
269
        } else {
270
            qCCritical(VideoReceiverLog) << "gst_pipeline_get_bus() failed";
271 272
        }

273
        gst_element_set_state(_pipeline, GST_STATE_NULL);
274

275 276 277 278
        // FIXME: check if branch is connected and remove all elements from branch
        if (_fileSink != nullptr) {
           _shutdownRecordingBranch();
        }
279

280 281 282
        if (_videoSink != nullptr) {
            _shutdownDecodingBranch();
        }
283

284
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-stopped");
285

286 287
        gst_object_unref(_pipeline);
        _pipeline = nullptr;
288

289 290 291 292
        _recorderValve = nullptr;
        _decoderValve = nullptr;
        _tee = nullptr;
        _source = nullptr;
293

294
        _lastSourceFrameTime = 0;
295

296 297 298
        if (_streaming) {
            _streaming = false;
            qCDebug(VideoReceiverLog) << "Streaming stopped";
299 300 301
            _notificationHandler.dispatch([this](){
                emit streamingChanged();
            });
302
        }
303 304
    }

305
    qCDebug(VideoReceiverLog) << "Stopped";
306 307
}

308
void
309
GstVideoReceiver::startDecoding(void* sink)
310
{
311 312 313 314 315 316 317
    if (sink == nullptr) {
        qCCritical(VideoReceiverLog) << "VideoSink is NULL";
        return;
    }

    if (_apiHandler.needDispatch()) {
        GstElement* videoSink = GST_ELEMENT(sink);
318
        gst_object_ref(videoSink);
319
        _apiHandler.dispatch([this, videoSink]() mutable {
320 321 322 323
            startDecoding(videoSink);
            gst_object_unref(videoSink);
        });
        return;
324 325
    }

326
    qCDebug(VideoReceiverLog) << "Starting decoding";
327

328 329 330 331 332 333
    if (_pipeline == nullptr) {
        if (_videoSink != nullptr) {
            gst_object_unref(_videoSink);
            _videoSink = nullptr;
        }
    }
334

335 336
    GstElement* videoSink = GST_ELEMENT(sink);

337 338 339 340
    if(_videoSink != nullptr || _decoding) {
        qCDebug(VideoReceiverLog) << "Already decoding!";
        return;
    }
341

342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375
    GstPad* pad;

    if ((pad = gst_element_get_static_pad(videoSink, "sink")) == nullptr) {
        qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
        return;
    }

    _lastVideoFrameTime = 0;
    _resetVideoSink = true;

    _videoSinkProbeId = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, _videoSinkProbe, this, nullptr);
    gst_object_unref(pad);
    pad = nullptr;

    _videoSink = videoSink;
    gst_object_ref(_videoSink);

    _removingDecoder = false;

    if (!_streaming) {
        return;
    }

    if (!_addDecoder(_decoderValve)) {
        qCCritical(VideoReceiverLog) << "_addDecoder() failed";
        return;
    }

    g_object_set(_decoderValve, "drop", FALSE, nullptr);

    qCDebug(VideoReceiverLog) << "Decoding started";
}

void
376
GstVideoReceiver::stopDecoding(void)
377
{
378 379
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this]() {
380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400
            stopDecoding();
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Stopping decoding";

    // exit immediately if we are not decoding
    if (_pipeline == nullptr || !_decoding) {
        qCDebug(VideoReceiverLog) << "Not decoding!";
        return;
    }

    g_object_set(_decoderValve, "drop", TRUE, nullptr);

    _removingDecoder = true;

    _unlinkBranch(_decoderValve);
}

void
401
GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
402
{
403 404
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this, videoFile, format]() {
405 406 407 408 409 410 411
            startRecording(videoFile, format);
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Starting recording";

412 413 414 415 416 417
    if (_pipeline == nullptr) {
        qCDebug(VideoReceiverLog) << "Streaming is not active!";
        return;
    }

    if (_recording) {
418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;

    if ((_fileSink = _makeFileSink(videoFile, format)) == nullptr) {
        qCCritical(VideoReceiverLog) << "_makeFileSink() failed";
        return;
    }

    _removingRecorder = false;

    gst_object_ref(_fileSink);

    gst_bin_add(GST_BIN(_pipeline), _fileSink);

    if (!gst_element_link(_recorderValve, _fileSink)) {
        qCCritical(VideoReceiverLog) << "Failed to link valve and file sink";
        return;
    }

    gst_element_sync_state_with_parent(_fileSink);

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-filesink");

    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
    GstPad* probepad;

    if ((probepad  = gst_element_get_static_pad(_recorderValve, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return;
    }

    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffers until key frame is received
    gst_object_unref(probepad);
    probepad = nullptr;

    g_object_set(_recorderValve, "drop", FALSE, nullptr);

    _recording = true;
    qCDebug(VideoReceiverLog) << "Recording started";
462 463 464
    _notificationHandler.dispatch([this](){
        emit recordingChanged();
    });
465 466 467 468
}

//-----------------------------------------------------------------------------
void
469
GstVideoReceiver::stopRecording(void)
470
{
471 472
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this]() {
473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493
            stopRecording();
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Stopping recording";

    // exit immediately if we are not recording
    if (_pipeline == nullptr || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }

    g_object_set(_recorderValve, "drop", TRUE, nullptr);

    _removingRecorder = true;

    _unlinkBranch(_recorderValve);
}

void
494
GstVideoReceiver::takeScreenshot(const QString& imageFile)
495
{
496 497
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this, imageFile]() {
498 499 500 501 502 503
            takeScreenshot(imageFile);
        });
        return;
    }

    // FIXME: AV: record screenshot here
504 505 506
    _notificationHandler.dispatch([this](){
        emit screenshotComplete();
    });
507 508
}

509
const char* GstVideoReceiver::_kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN] = {
510 511 512 513 514 515
    "matroskamux",
    "qtmux",
    "mp4mux"
};

void
516
GstVideoReceiver::_watchdog(void)
517
{
518
    _apiHandler.dispatch([this](){
519 520 521 522 523 524 525 526 527 528 529
        if(_pipeline == nullptr) {
            return;
        }

        const qint64 now = QDateTime::currentSecsSinceEpoch();

        if (_lastSourceFrameTime == 0) {
            _lastSourceFrameTime = now;
        }

        if (now - _lastSourceFrameTime > _timeout) {
530 531 532 533
            qCDebug(VideoReceiverLog) << "Stream timeout, no frames for " << now - _lastSourceFrameTime;
            _notificationHandler.dispatch([this](){
                emit timeout();
            });
534 535 536 537 538 539 540 541
        }

        if (_decoding && !_removingDecoder) {
            if (_lastVideoFrameTime == 0) {
                _lastVideoFrameTime = now;
            }

            if (now - _lastVideoFrameTime > _timeout * 2) {
542 543 544 545
                qCDebug(VideoReceiverLog) << "Video decoder timeout, no frames for " << now - _lastVideoFrameTime;
                _notificationHandler.dispatch([this](){
                    emit timeout();
                });
546 547 548 549 550 551
            }
        }
    });
}

void
552
GstVideoReceiver::_handleEOS(void)
553 554 555 556 557 558
{
    if(_pipeline == nullptr) {
        qCWarning(VideoReceiverLog) << "We should not be here";
        return;
    }

559
    if (_endOfStream) {
560 561 562 563 564 565
        stop();
    } else {
        if(_decoding && _removingDecoder) {
            _shutdownDecodingBranch();
        } else if(_recording && _removingRecorder) {
            _shutdownRecordingBranch();
566
        } /*else {
567 568
            qCWarning(VideoReceiverLog) << "Unexpected EOS!";
            stop();
569
        }*/
570 571 572 573
    }
}

GstElement*
574
GstVideoReceiver::_makeSource(const QString& uri)
575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607
{
    if (uri.isEmpty()) {
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* tsdemux = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
608 609 610 611 612 613
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
614
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
615 616
                        break;
                    }
617
                } else if (isUdp265) {
618
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
619
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
620 621 622 623 624 625 626 627 628 629 630
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
631
            qCDebug(VideoReceiverLog) << "URI is not recognized";
632 633 634
        }

        if (!source) {
635
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
636 637 638
            break;
        }

639 640 641 642 643 644 645 646 647 648 649 650
        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
            break;
        }

        if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('parsebin') failed";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

651
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
652
        // FIXME: AV: tsdemux handling is a bit ugly - let's try to find elegant solution for that later
653
        if (isTcpMPEGTS || isUdpMPEGTS) {
654 655
            if ((tsdemux = gst_element_factory_make("tsdemux", nullptr)) == nullptr) {
                qCCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
656 657
                break;
            }
658 659 660 661 662

            gst_bin_add(GST_BIN(bin), tsdemux);

            if (!gst_element_link(source, tsdemux)) {
                qCCritical(VideoReceiverLog) << "gst_element_link() failed";
663 664
                break;
            }
665

666 667
            source = tsdemux;
            tsdemux = nullptr;
668 669 670 671 672 673 674 675
        }

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
676
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
677
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
678 679 680 681 682 683
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
684
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
685 686 687 688
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
689
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
690 691 692 693 694 695 696 697 698
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

699
        source = tsdemux = buffer = parser = nullptr;
700 701 702 703 704 705 706 707 708 709 710 711 712 713 714

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

715 716 717 718 719
    if (tsdemux != nullptr) {
        gst_object_unref(tsdemux);
        tsdemux = nullptr;
    }

720 721 722 723 724 725 726 727 728 729 730 731 732
    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

733
GstElement*
734
GstVideoReceiver::_makeDecoder(GstCaps* caps, GstElement* videoSink)
735
{
736 737
    Q_UNUSED(caps);

738
    GstElement* decoder = nullptr;
739

740 741 742 743 744
    do {
        if ((decoder = gst_element_factory_make("decodebin", nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
            break;
        }
745

746 747
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(_autoplugQuery), videoSink);
    } while(0);
748

749
    return decoder;
750 751
}

752
GstElement*
753
GstVideoReceiver::_makeFileSink(const QString& videoFile, FILE_FORMAT format)
754
{
755 756 757 758 759
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;
760

761 762 763 764 765
    do{
        if (format < FILE_FORMAT_MIN || format >= FILE_FORMAT_MAX) {
            qCCritical(VideoReceiverLog) << "Unsupported file format";
            break;
        }
766

767 768 769 770
        if ((mux = gst_element_factory_make(_kFileMux[format - FILE_FORMAT_MIN], nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << _kFileMux[format - FILE_FORMAT_MIN] << "') failed";
            break;
        }
771

772 773 774 775
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
            break;
        }
776

777
        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);
778

779 780 781 782
        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
            break;
        }
783

784
        GstPadTemplate* padTemplate;
785

786 787 788 789
        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
            break;
        }
790

791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828
        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
829 830
    }

831 832 833 834
    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }
835

836 837
    return fileSink;
}
838

839
void
840
GstVideoReceiver::_onNewSourcePad(GstPad* pad)
Gus Grubba's avatar
Gus Grubba committed
841
{
842 843 844
    // FIXME: check for caps - if this is not video stream (and preferably - one of these which we have to support) then simply skip it
    if(!gst_element_link(_source, _tee)) {
        qCCritical(VideoReceiverLog) << "Unable to link source";
845 846
        return;
    }
847 848 849 850

    if (!_streaming) {
        _streaming = true;
        qCDebug(VideoReceiverLog) << "Streaming started";
851 852 853
        _notificationHandler.dispatch([this](){
            emit streamingChanged();
        });
854 855 856 857 858
    }

    gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, _eosProbe, this, nullptr);

    if (_videoSink == nullptr) {
859 860
        return;
    }
861

862 863 864 865 866 867 868 869 870 871 872
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-source-pad");

    if (!_addDecoder(_decoderValve)) {
        qCCritical(VideoReceiverLog) << "_addDecoder() failed";
        return;
    }

    g_object_set(_decoderValve, "drop", FALSE, nullptr);

    qCDebug(VideoReceiverLog) << "Decoding started";
}
873

874
void
875
GstVideoReceiver::_onNewDecoderPad(GstPad* pad)
876 877
{
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-decoder-pad");
878

879 880
    if (!_addVideoSink(pad)) {
        qCCritical(VideoReceiverLog) << "_addVideoSink() failed";
881
    }
882
}
883

884
bool
885
GstVideoReceiver::_addDecoder(GstElement* src)
886 887 888 889 890 891
{
    GstPad* srcpad;

    if ((srcpad = gst_element_get_static_pad(src, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return false;
Gus Grubba's avatar
Gus Grubba committed
892
    }
893

894 895 896 897 898 899 900
    GstCaps* caps;

    if ((caps = gst_pad_query_caps(srcpad, nullptr)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_pad_query_caps() failed";
        gst_object_unref(srcpad);
        srcpad = nullptr;
        return false;
Gus Grubba's avatar
Gus Grubba committed
901
    }
902 903 904 905 906 907 908 909 910

    gst_object_unref(srcpad);
    srcpad = nullptr;

    if ((_decoder = _makeDecoder(caps, _videoSink)) == nullptr) {
        qCCritical(VideoReceiverLog) << "_makeDecoder() failed";
        gst_caps_unref(caps);
        caps = nullptr;
        return false;
911
    }
Gus Grubba's avatar
Gus Grubba committed
912

913
    gst_object_ref(_decoder);
914

915 916
    gst_caps_unref(caps);
    caps = nullptr;
917

918 919 920 921
    // FIXME: AV: check if srcpad exists - if it does then no need to wait for new pad
    //    int probeRes = 0;
    //    gst_element_foreach_src_pad(source, _padProbe, &probeRes);
    g_signal_connect(_decoder, "pad-added", G_CALLBACK(_onNewPad), this);
Gus Grubba's avatar
Gus Grubba committed
922

923
    gst_bin_add(GST_BIN(_pipeline), _decoder);
924

925
    gst_element_sync_state_with_parent(_decoder);
Gus Grubba's avatar
Gus Grubba committed
926

927
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-decoder");
928

929 930 931 932
    if (!gst_element_link(src, _decoder)) {
        qCCritical(VideoReceiverLog) << "Unable to link decoder";
        return false;
    }
Gus Grubba's avatar
Gus Grubba committed
933

934 935
    return true;
}
Gus Grubba's avatar
Gus Grubba committed
936

937
bool
938
GstVideoReceiver::_addVideoSink(GstPad* pad)
939 940
{
    GstCaps* caps = gst_pad_query_caps(pad, nullptr);
941

942 943 944 945 946 947 948 949 950 951
    gst_object_ref(_videoSink); // gst_bin_add() will steal one reference

    gst_bin_add(GST_BIN(_pipeline), _videoSink);

    if(!gst_element_link(_decoder, _videoSink)) {
        gst_bin_remove(GST_BIN(_pipeline), _videoSink);
        qCCritical(VideoReceiverLog) << "Unable to link video sink";
        if (caps != nullptr) {
            gst_caps_unref(caps);
            caps = nullptr;
952
        }
953 954
        return false;
    }
955

956
    gst_element_sync_state_with_parent(_videoSink);
957

958
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-videosink");
959

960 961
    if (caps != nullptr) {
        GstStructure* s = gst_caps_get_structure(caps, 0);
962

963 964 965 966 967
        if (s != nullptr) {
            gint width, height;
            gst_structure_get_int(s, "width", &width);
            gst_structure_get_int(s, "height", &height);
            _setVideoSize(QSize(width, height));
968 969
        }

970 971 972 973 974
        gst_caps_unref(caps);
        caps = nullptr;
    } else {
        _setVideoSize(QSize(0, 0));
    }
975

976
    _decoding = true;
977 978 979 980
    qCDebug(VideoReceiverLog) << "Decoding started";
    _notificationHandler.dispatch([this](){
        emit decodingChanged();
    });
Gus Grubba's avatar
Gus Grubba committed
981

982 983
    return true;
}
Gus Grubba's avatar
Gus Grubba committed
984

985
void
986
GstVideoReceiver::_noteTeeFrame(void)
987 988 989
{
    _lastSourceFrameTime = QDateTime::currentSecsSinceEpoch();
}
Gus Grubba's avatar
Gus Grubba committed
990

991
void
992
GstVideoReceiver::_noteVideoSinkFrame(void)
993 994 995
{
    _lastVideoFrameTime = QDateTime::currentSecsSinceEpoch();
}
Gus Grubba's avatar
Gus Grubba committed
996

997
void
998
GstVideoReceiver::_noteEndOfStream(void)
999
{
1000
    _endOfStream = true;
1001
}
Gus Grubba's avatar
Gus Grubba committed
1002

1003 1004 1005
// -Unlink the branch from the src pad
// -Send an EOS event at the beginning of that branch
void
1006
GstVideoReceiver::_unlinkBranch(GstElement* from)
1007 1008
{
    GstPad* src;
Gus Grubba's avatar
Gus Grubba committed
1009

1010 1011 1012 1013
    if ((src = gst_element_get_static_pad(from, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
1014

1015
    GstPad* sink;
Gus Grubba's avatar
Gus Grubba committed
1016

1017 1018 1019 1020 1021 1022
    if ((sink = gst_pad_get_peer(src)) == nullptr) {
        gst_object_unref(src);
        src = nullptr;
        qCCritical(VideoReceiverLog) << "gst_pad_get_peer() failed";
        return;
    }
1023

1024 1025 1026 1027 1028 1029 1030 1031
    if (!gst_pad_unlink(src, sink)) {
        gst_object_unref(src);
        src = nullptr;
        gst_object_unref(sink);
        sink = nullptr;
        qCCritical(VideoReceiverLog) << "gst_pad_unlink() failed";
        return;
    }
1032

1033 1034
    gst_object_unref(src);
    src = nullptr;
1035

1036 1037
    // Send EOS at the beginning of the branch
    const gboolean ret = gst_pad_send_event(sink, gst_event_new_eos());
1038

1039 1040 1041 1042 1043
    gst_object_unref(sink);
    sink = nullptr;

    if (ret) {
        qCDebug(VideoReceiverLog) << "Branch EOS was sent";
1044
    } else {
1045
        qCCritical(VideoReceiverLog) << "Branch EOS was NOT sent";
Gus Grubba's avatar
Gus Grubba committed
1046 1047 1048
    }
}

1049
void
1050
GstVideoReceiver::_shutdownDecodingBranch(void)
Gus Grubba's avatar
Gus Grubba committed
1051
{
1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063
    if (_decoder != nullptr) {
        GstObject* parent;

        if ((parent = gst_element_get_parent(_decoder)) != nullptr) {
            gst_bin_remove(GST_BIN(_pipeline), _decoder);
            gst_element_set_state(_decoder, GST_STATE_NULL);
            gst_object_unref(parent);
            parent = nullptr;
        }

        gst_object_unref(_decoder);
        _decoder = nullptr;
1064
    }
1065 1066 1067 1068 1069 1070 1071

    if (_videoSinkProbeId != 0) {
        GstPad* sinkpad;
        if ((sinkpad = gst_element_get_static_pad(_videoSink, "sink")) != nullptr) {
            gst_pad_remove_probe(sinkpad, _videoSinkProbeId);
            gst_object_unref(sinkpad);
            sinkpad = nullptr;
1072
        }
1073
        _videoSinkProbeId = 0;
Gus Grubba's avatar
Gus Grubba committed
1074
    }
1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094

    _lastVideoFrameTime = 0;

    GstObject* parent;

    if ((parent = gst_element_get_parent(_videoSink)) != nullptr) {
        gst_bin_remove(GST_BIN(_pipeline), _videoSink);
        gst_element_set_state(_videoSink, GST_STATE_NULL);
        gst_object_unref(parent);
        parent = nullptr;
    }

    gst_object_unref(_videoSink);
    _videoSink = nullptr;

    _removingDecoder = false;

    if (_decoding) {
        _decoding = false;
        qCDebug(VideoReceiverLog) << "Decoding stopped";
1095 1096 1097
        _notificationHandler.dispatch([this](){
            emit decodingChanged();
        });
1098 1099 1100
    }

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-decoding-stopped");
Gus Grubba's avatar
Gus Grubba committed
1101 1102
}

1103
void
1104
GstVideoReceiver::_shutdownRecordingBranch(void)
Gus Grubba's avatar
Gus Grubba committed
1105
{
1106 1107 1108 1109
    gst_bin_remove(GST_BIN(_pipeline), _fileSink);
    gst_element_set_state(_fileSink, GST_STATE_NULL);
    gst_object_unref(_fileSink);
    _fileSink = nullptr;
Gus Grubba's avatar
Gus Grubba committed
1110

1111 1112 1113 1114 1115
    _removingRecorder = false;

    if (_recording) {
        _recording = false;
        qCDebug(VideoReceiverLog) << "Recording stopped";
1116 1117 1118
        _notificationHandler.dispatch([this](){
            emit recordingChanged();
        });
1119
    }
1120 1121

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording-stopped");
1122 1123
}

1124
gboolean
1125
GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
1126 1127
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
1128
    Q_ASSERT(msg != nullptr && data != nullptr);
1129
    GstVideoReceiver* pThis = (GstVideoReceiver*)data;
1130

1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149
    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_ERROR:
        do {
            gchar* debug;
            GError* error;

            gst_message_parse_error(msg, &error, &debug);

            if (debug != nullptr) {
                g_free(debug);
                debug = nullptr;
            }

            if (error != nullptr) {
                qCCritical(VideoReceiverLog) << error->message;
                g_error_free(error);
                error = nullptr;
            }

1150
            pThis->_apiHandler.dispatch([pThis](){
1151 1152 1153
                pThis->stop();
            });
        } while(0);
1154
        break;
1155
    case GST_MESSAGE_EOS:
1156
        pThis->_apiHandler.dispatch([pThis](){
1157 1158
            pThis->_handleEOS();
        });
1159
        break;
1160 1161 1162 1163 1164 1165
    case GST_MESSAGE_ELEMENT:
        do {
            const GstStructure* s = gst_message_get_structure (msg);

            if (!gst_structure_has_name (s, "GstBinForwarded")) {
                break;
1166
            }
1167 1168 1169 1170 1171 1172 1173 1174 1175 1176

            GstMessage* forward_msg = nullptr;

            gst_structure_get(s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);

            if (forward_msg == nullptr) {
                break;
            }

            if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
1177
                pThis->_apiHandler.dispatch([pThis](){
1178 1179 1180 1181 1182 1183 1184
                    pThis->_handleEOS();
                });
            }

            gst_message_unref(forward_msg);
            forward_msg = nullptr;
        } while(0);
1185
        break;
1186 1187 1188 1189
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
1190 1191
    return TRUE;
}
1192

1193
void
1194
GstVideoReceiver::_onNewPad(GstElement* element, GstPad* pad, gpointer data)
1195
{
1196
    GstVideoReceiver* self = static_cast<GstVideoReceiver*>(data);
1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207

    if (element == self->_source) {
        self->_onNewSourcePad(pad);
    } else if (element == self->_decoder) {
        self->_onNewDecoderPad(pad);
    } else {
        qCDebug(VideoReceiverLog) << "Unexpected call!";
    }
}

void
1208
GstVideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
1209
{
1210 1211
    Q_UNUSED(data)

1212 1213 1214 1215
    gchar* name;

    if ((name = gst_pad_get_name(pad)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_pad_get_name() failed";
1216 1217 1218
        return;
    }

1219 1220 1221 1222 1223 1224 1225
    GstPad* ghostpad;

    if ((ghostpad = gst_ghost_pad_new(name, pad)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_ghost_pad_new() failed";
        g_free(name);
        name = nullptr;
        return;
1226 1227
    }

1228 1229
    g_free(name);
    name = nullptr;
1230

1231
    gst_pad_set_active(ghostpad, TRUE);
1232

1233 1234
    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
        qCCritical(VideoReceiverLog) << "gst_element_add_pad() failed";
1235 1236 1237
    }
}

1238
void
1239
GstVideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
1240
{
1241
    bool isRtpPad = false;
1242

1243
    GstCaps* filter;
1244

1245 1246
    if ((filter = gst_caps_from_string("application/x-rtp")) != nullptr) {
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1247

1248 1249 1250 1251 1252 1253
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = true;
            }
            gst_caps_unref(caps);
            caps = nullptr;
1254 1255
        }

1256 1257 1258
        gst_caps_unref(filter);
        filter = nullptr;
    }
1259

1260 1261
    if (isRtpPad) {
        GstElement* buffer;
1262

1263 1264
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);
1265

1266
            gst_element_sync_state_with_parent(buffer);
1267

1268
            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");
1269

1270 1271
            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
1272

1273 1274
                gst_object_unref(sinkpad);
                sinkpad = nullptr;
1275

1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286
                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
                }
            } else {
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
            }
        } else {
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
1287
        }
1288
    }
1289

1290
    gchar* name;
1291

1292 1293 1294
    if ((name = gst_pad_get_name(pad)) != nullptr) {
        if(gst_element_link_pads(element, name, GST_ELEMENT(data), "sink") == false) {
            qCCritical(VideoReceiverLog) << "gst_element_link_pads() failed";
1295 1296
        }

1297 1298 1299 1300
        g_free(name);
        name = nullptr;
    } else {
        qCCritical(VideoReceiverLog) << "gst_pad_get_name() failed";
1301 1302 1303
    }
}

1304
gboolean
1305
GstVideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
1306
{
1307 1308
    Q_UNUSED(element)

1309
    int* probeRes = (int*)user_data;
1310

1311
    *probeRes |= 1;
1312

1313
    GstCaps* filter = gst_caps_from_string("application/x-rtp");
1314

1315 1316
    if (filter != nullptr) {
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1317

1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }
1330

1331 1332
    return TRUE;
}
1333

1334
gboolean
1335
GstVideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1336
{
1337 1338 1339 1340
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)

1341
    GstElement* glupload = (GstElement* )data;
1342

1343
    GstPad* sinkpad;
1344

1345 1346 1347
    if ((sinkpad = gst_element_get_static_pad(glupload, "sink")) == nullptr) {
        qCCritical(VideoReceiverLog) << "No sink pad found";
        return FALSE;
1348
    }
1349

1350
    GstCaps* filter;
1351

1352
    gst_query_parse_caps(query, &filter);
1353

1354
    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);
1355

1356
    gst_query_set_caps_result(query, sinkcaps);
1357

1358
    const gboolean ret = !gst_caps_is_empty(sinkcaps);
1359

1360 1361
    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;
1362

1363 1364
    gst_object_unref(sinkpad);
    sinkpad = nullptr;
1365

1366
    return ret;
1367 1368
}

1369
gboolean
1370
GstVideoReceiver::_autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1371
{
1372 1373 1374 1375
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)

1376
    GstElement* glsink = (GstElement* )data;
1377

1378
    GstPad* sinkpad;
1379

1380 1381 1382 1383
    if ((sinkpad = gst_element_get_static_pad(glsink, "sink")) == nullptr){
        qCCritical(VideoReceiverLog) << "No sink pad found";
        return FALSE;
    }
1384

1385
    const gboolean ret = gst_pad_query(sinkpad, query);
1386

1387 1388
    gst_object_unref(sinkpad);
    sinkpad = nullptr;
1389

1390
    return ret;
1391 1392
}

1393
gboolean
1394
GstVideoReceiver::_autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1395
{
1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410
    gboolean ret;

    switch (GST_QUERY_TYPE(query)) {
    case GST_QUERY_CAPS:
        ret = _autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = _autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
1411 1412
}

1413
GstPadProbeReturn
1414
GstVideoReceiver::_teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1415
{
1416
    Q_UNUSED(pad)
1417 1418 1419
    Q_UNUSED(info)

    if(user_data != nullptr) {
1420
        GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1421
        pThis->_noteTeeFrame();
1422
    }
1423 1424

    return GST_PAD_PROBE_OK;
1425
}
1426

1427
GstPadProbeReturn
1428
GstVideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1429
{
1430 1431 1432
    Q_UNUSED(pad)
    Q_UNUSED(info)

1433
    if(user_data != nullptr) {
1434
        GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462

        if (pThis->_resetVideoSink) {
            pThis->_resetVideoSink = false;

// FIXME: AV: this makes MPEG2-TS playing smooth but breaks RTSP
//            gst_pad_send_event(pad, gst_event_new_flush_start());
//            gst_pad_send_event(pad, gst_event_new_flush_stop(TRUE));

//            GstBuffer* buf;

//            if ((buf = gst_pad_probe_info_get_buffer(info)) != nullptr) {
//                GstSegment* seg;

//                if ((seg = gst_segment_new()) != nullptr) {
//                    gst_segment_init(seg, GST_FORMAT_TIME);

//                    seg->start = buf->pts;

//                    gst_pad_send_event(pad, gst_event_new_segment(seg));

//                    gst_segment_free(seg);
//                    seg = nullptr;
//                }

//                gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
//            }
        }

1463 1464 1465 1466 1467 1468
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}

1469
GstPadProbeReturn
1470
GstVideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1471 1472
{
    Q_UNUSED(pad);
1473
    Q_ASSERT(user_data != nullptr);
1474

1475 1476
    if(info != nullptr) {
        GstEvent* event = gst_pad_probe_info_get_event(info);
1477

1478
        if (GST_EVENT_TYPE(event) == GST_EVENT_EOS) {
1479
            GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1480
            pThis->_noteEndOfStream();
1481 1482 1483
        }
    }

1484
    return GST_PAD_PROBE_OK;
1485 1486
}

1487
GstPadProbeReturn
1488
GstVideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1489
{
1490 1491 1492
    if (info == nullptr || user_data == nullptr) {
        qCCritical(VideoReceiverLog) << "Invalid arguments";
        return GST_PAD_PROBE_DROP;
1493 1494
    }

1495 1496 1497 1498
    GstBuffer* buf = gst_pad_probe_info_get_buffer(info);

    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
        return GST_PAD_PROBE_DROP;
1499 1500
    }

1501 1502
    // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
    gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
1503

1504
    GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1505 1506

    qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1507

1508 1509 1510 1511
    pThis->recordingStarted();

    return GST_PAD_PROBE_REMOVE;
}