GstVideoReceiver.cc 46.5 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16
 */

17
#include "GstVideoReceiver.h"
18

Gus Grubba's avatar
Gus Grubba committed
19
#include <QDebug>
20
#include <QUrl>
21
#include <QDateTime>
22
#include <QSysInfo>
23

24 25
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

26 27 28 29 30 31 32 33 34
//-----------------------------------------------------------------------------
// Our pipeline look like this:
//
//              +-->queue-->_decoderValve[-->_decoder-->_videoSink]
//              |
// _source-->_tee
//              |
//              +-->queue-->_recorderValve[-->_fileSink]
//
35

36 37
GstVideoReceiver::GstVideoReceiver(QObject* parent)
    : VideoReceiver(parent)
38 39 40
    , _removingDecoder(false)
    , _removingRecorder(false)
    , _source(nullptr)
Gus Grubba's avatar
Gus Grubba committed
41
    , _tee(nullptr)
42 43 44
    , _decoderValve(nullptr)
    , _recorderValve(nullptr)
    , _decoder(nullptr)
Gus Grubba's avatar
Gus Grubba committed
45
    , _videoSink(nullptr)
46 47 48 49 50 51
    , _fileSink(nullptr)
    , _pipeline(nullptr)
    , _lastSourceFrameTime(0)
    , _lastVideoFrameTime(0)
    , _resetVideoSink(true)
    , _videoSinkProbeId(0)
52
    , _udpReconnect_us(5000000)
53
    , _signalDepth(0)
54
    , _endOfStream(false)
Gus Grubba's avatar
Gus Grubba committed
55
{
56
    _slotHandler.start();
57
    connect(&_watchdogTimer, &QTimer::timeout, this, &GstVideoReceiver::_watchdog);
58
    _watchdogTimer.start(1000);
Gus Grubba's avatar
Gus Grubba committed
59 60
}

61
GstVideoReceiver::~GstVideoReceiver(void)
Gus Grubba's avatar
Gus Grubba committed
62
{
63
    stop();
64
    _slotHandler.shutdown();
Gus Grubba's avatar
Gus Grubba committed
65 66
}

67
void
68
GstVideoReceiver::start(const QString& uri, unsigned timeout)
69
{
70
    if (_needDispatch()) {
71
        QString cachedUri = uri;
72
        _slotHandler.dispatch([this, cachedUri, timeout]() {
73
            start(cachedUri, timeout);
74 75 76
        });
        return;
    }
77

78 79
    if(_pipeline) {
        qCDebug(VideoReceiverLog) << "Already running!";
80
        _dispatchSignal([this](){
81
            emit onStartComplete(STATUS_INVALID_STATE);
82
        });
83 84
        return;
    }
85

86 87
    if (uri.isEmpty()) {
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
88
        _dispatchSignal([this](){
89
            emit onStartComplete(STATUS_INVALID_URL);
90
        });
91
        return;
92 93
    }

94
    qCDebug(VideoReceiverLog) << "Starting";
95

96 97
    _endOfStream = false;

98
    _timeout = timeout;
99

100 101
    bool running    = false;
    bool pipelineUp = false;
102

103 104
    GstElement* decoderQueue = nullptr;
    GstElement* recorderQueue = nullptr;
105

106 107 108 109 110
    do {
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
            break;
        }
111

112
        GstPad* pad;
113

114 115 116 117
        if ((pad = gst_element_get_static_pad(_tee, "sink")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
            break;
        }
118

119
        _lastSourceFrameTime = 0;
120

121 122 123
        gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, _teeProbe, this, nullptr);
        gst_object_unref(pad);
        pad = nullptr;
124

125 126 127 128
        if((decoderQueue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
            break;
        }
129

130 131 132 133
        if((_decoderValve = gst_element_factory_make("valve", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('valve') failed";
            break;
        }
134

135
        g_object_set(_decoderValve, "drop", TRUE, nullptr);
136

137 138 139 140
        if((recorderQueue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
            break;
        }
141

142 143 144 145
        if((_recorderValve = gst_element_factory_make("valve", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('valve') failed";
            break;
        }
146

147
        g_object_set(_recorderValve, "drop", TRUE, nullptr);
148

149 150 151 152
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
            break;
        }
153

154
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);
155

156 157 158 159
        if ((_source = _makeSource(uri)) == nullptr) {
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
            break;
        }
160

161
        g_signal_connect(_source, "pad-added", G_CALLBACK(_onNewPad), this);
162

163
        gst_bin_add_many(GST_BIN(_pipeline), _source, _tee, decoderQueue, _decoderValve, recorderQueue, _recorderValve, nullptr);
164

165
        pipelineUp = true;
166

167 168 169 170
        if(!gst_element_link_many(_tee, decoderQueue, _decoderValve, nullptr)) {
            qCCritical(VideoReceiverLog) << "Unable to link decoder queue";
            break;
        }
171

172 173 174 175
        if(!gst_element_link_many(_tee, recorderQueue, _recorderValve, nullptr)) {
            qCCritical(VideoReceiverLog) << "Unable to link recorder queue";
            break;
        }
176

177
        GstBus* bus = nullptr;
178

179 180 181 182 183 184
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = nullptr;
        }
185

186 187 188 189 190 191 192 193 194 195 196 197
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-initial");
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;
    } while(0);

    if (!running) {
        qCCritical(VideoReceiverLog) << "Failed";

        // In newer versions, the pipeline will clean up all references that are added to it
        if (_pipeline != nullptr) {
            gst_element_set_state(_pipeline, GST_STATE_NULL);
            gst_object_unref(_pipeline);
            _pipeline = nullptr;
198 199
        }

200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
            if (_recorderValve != nullptr) {
                gst_object_unref(_recorderValve);
                _recorderValve = nullptr;
            }

            if (recorderQueue != nullptr) {
                gst_object_unref(recorderQueue);
                recorderQueue = nullptr;
            }

            if (_decoderValve != nullptr) {
                gst_object_unref(_decoderValve);
                _decoderValve = nullptr;
            }

            if (decoderQueue != nullptr) {
                gst_object_unref(decoderQueue);
                decoderQueue = nullptr;
            }

            if (_tee != nullptr) {
                gst_object_unref(_tee);
                _tee = nullptr;
            }

            if (_source != nullptr) {
                gst_object_unref(_source);
                _source = nullptr;
            }
        }
232

233
        _dispatchSignal([this](){
234
            emit onStartComplete(STATUS_FAIL);
235
        });
236 237 238
    } else {
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
        qCDebug(VideoReceiverLog) << "Started";
239

240
        _dispatchSignal([this](){
241
            emit onStartComplete(STATUS_OK);
242
        });
243
    }
244
}
245

246
void
247
GstVideoReceiver::stop(void)
248
{
249 250
    if (_needDispatch()) {
        _slotHandler.dispatch([this]() {
251 252 253 254
            stop();
        });
        return;
    }
255

256
    qCDebug(VideoReceiverLog) << "Stopping";
257

258 259
    if (_pipeline != nullptr) {
        GstBus* bus;
260

261 262
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
            gst_bus_disable_sync_message_emission(bus);
263

264 265
            g_signal_handlers_disconnect_by_data(bus, this);

266
            gboolean recordingValveClosed = TRUE;
267

268
            g_object_get(_recorderValve, "drop", &recordingValveClosed, nullptr);
269

270 271 272 273
            if (recordingValveClosed == FALSE) {
                gst_element_send_event(_pipeline, gst_event_new_eos());

                GstMessage* msg;
274

275 276 277 278 279 280 281 282 283 284 285 286
                if((msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR))) != nullptr) {
                    if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
                        qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
                    } else if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS) {
                        qCDebug(VideoReceiverLog) << "End of stream received!";
                    }

                    gst_message_unref(msg);
                    msg = nullptr;
                } else {
                    qCCritical(VideoReceiverLog) << "gst_bus_timed_pop_filtered() failed";
                }
287
            }
288 289 290

            gst_object_unref(bus);
            bus = nullptr;
291
        } else {
292
            qCCritical(VideoReceiverLog) << "gst_pipeline_get_bus() failed";
293 294
        }

295
        gst_element_set_state(_pipeline, GST_STATE_NULL);
296

297 298 299 300
        // FIXME: check if branch is connected and remove all elements from branch
        if (_fileSink != nullptr) {
           _shutdownRecordingBranch();
        }
301

302 303 304
        if (_videoSink != nullptr) {
            _shutdownDecodingBranch();
        }
305

306
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-stopped");
307

308 309
        gst_object_unref(_pipeline);
        _pipeline = nullptr;
310

311 312 313 314
        _recorderValve = nullptr;
        _decoderValve = nullptr;
        _tee = nullptr;
        _source = nullptr;
315

316
        _lastSourceFrameTime = 0;
317

318 319 320
        if (_streaming) {
            _streaming = false;
            qCDebug(VideoReceiverLog) << "Streaming stopped";
321
            _dispatchSignal([this](){
322 323
                emit streamingChanged();
            });
324 325
        } else {
            qCDebug(VideoReceiverLog) << "Streaming did not start";
326
           _dispatchSignal([this](){
327 328
               emit timeout();
           });
329
        }
330 331
    }

332
    qCDebug(VideoReceiverLog) << "Stopped";
333

334
    _dispatchSignal([this](){
335 336
        emit onStopComplete(STATUS_OK);
    });
337 338
}

339
void
340
GstVideoReceiver::startDecoding(void* sink)
341
{
342 343 344 345 346
    if (sink == nullptr) {
        qCCritical(VideoReceiverLog) << "VideoSink is NULL";
        return;
    }

347
    if (_needDispatch()) {
348
        GstElement* videoSink = GST_ELEMENT(sink);
349
        gst_object_ref(videoSink);
350
        _slotHandler.dispatch([this, videoSink]() mutable {
351 352 353 354
            startDecoding(videoSink);
            gst_object_unref(videoSink);
        });
        return;
355 356
    }

357
    qCDebug(VideoReceiverLog) << "Starting decoding";
358

359 360 361 362 363 364
    if (_pipeline == nullptr) {
        if (_videoSink != nullptr) {
            gst_object_unref(_videoSink);
            _videoSink = nullptr;
        }
    }
365

366 367
    GstElement* videoSink = GST_ELEMENT(sink);

368 369
    if(_videoSink != nullptr || _decoding) {
        qCDebug(VideoReceiverLog) << "Already decoding!";
370
        _dispatchSignal([this](){
371 372
            emit onStartDecodingComplete(STATUS_INVALID_STATE);
        });
373 374
        return;
    }
375

376 377 378 379
    GstPad* pad;

    if ((pad = gst_element_get_static_pad(videoSink, "sink")) == nullptr) {
        qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
380
        _dispatchSignal([this](){
381 382
            emit onStartDecodingComplete(STATUS_FAIL);
        });
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398
        return;
    }

    _lastVideoFrameTime = 0;
    _resetVideoSink = true;

    _videoSinkProbeId = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, _videoSinkProbe, this, nullptr);
    gst_object_unref(pad);
    pad = nullptr;

    _videoSink = videoSink;
    gst_object_ref(_videoSink);

    _removingDecoder = false;

    if (!_streaming) {
399
        _dispatchSignal([this](){
400 401
            emit onStartDecodingComplete(STATUS_OK);
        });
402 403 404 405 406
        return;
    }

    if (!_addDecoder(_decoderValve)) {
        qCCritical(VideoReceiverLog) << "_addDecoder() failed";
407
        _dispatchSignal([this](){
408 409
            emit onStartDecodingComplete(STATUS_FAIL);
        });
410 411 412 413 414 415
        return;
    }

    g_object_set(_decoderValve, "drop", FALSE, nullptr);

    qCDebug(VideoReceiverLog) << "Decoding started";
416

417
    _dispatchSignal([this](){
418 419
        emit onStartDecodingComplete(STATUS_OK);
    });
420 421 422
}

void
423
GstVideoReceiver::stopDecoding(void)
424
{
425 426
    if (_needDispatch()) {
        _slotHandler.dispatch([this]() {
427 428 429 430 431 432 433 434 435 436
            stopDecoding();
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Stopping decoding";

    // exit immediately if we are not decoding
    if (_pipeline == nullptr || !_decoding) {
        qCDebug(VideoReceiverLog) << "Not decoding!";
437
        _dispatchSignal([this](){
438 439
            emit onStopDecodingComplete(STATUS_INVALID_STATE);
        });
440 441 442 443 444 445 446
        return;
    }

    g_object_set(_decoderValve, "drop", TRUE, nullptr);

    _removingDecoder = true;

447 448 449 450
    bool ret = _unlinkBranch(_decoderValve);

    // FIXME: AV: it is much better to emit onStopDecodingComplete() after decoding is really stopped
    // (which happens later due to async design) but as for now it is also not so bad...
451
    _dispatchSignal([this, ret](){
452 453
        emit onStopDecodingComplete(ret ? STATUS_OK : STATUS_FAIL);
    });
454 455 456
}

void
457
GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
458
{
459
    if (_needDispatch()) {
460
        QString cachedVideoFile = videoFile;
461
        _slotHandler.dispatch([this, cachedVideoFile, format]() {
462
            startRecording(cachedVideoFile, format);
463 464 465 466 467 468
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Starting recording";

469 470
    if (_pipeline == nullptr) {
        qCDebug(VideoReceiverLog) << "Streaming is not active!";
471
        _dispatchSignal([this](){
472 473
            emit onStartRecordingComplete(STATUS_INVALID_STATE);
        });
474 475 476 477
        return;
    }

    if (_recording) {
478
        qCDebug(VideoReceiverLog) << "Already recording!";
479
        _dispatchSignal([this](){
480 481
            emit onStartRecordingComplete(STATUS_INVALID_STATE);
        });
482 483 484 485 486 487 488
        return;
    }

    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;

    if ((_fileSink = _makeFileSink(videoFile, format)) == nullptr) {
        qCCritical(VideoReceiverLog) << "_makeFileSink() failed";
489
        _dispatchSignal([this](){
490 491
            emit onStartRecordingComplete(STATUS_FAIL);
        });
492 493 494 495 496 497 498 499 500 501 502
        return;
    }

    _removingRecorder = false;

    gst_object_ref(_fileSink);

    gst_bin_add(GST_BIN(_pipeline), _fileSink);

    if (!gst_element_link(_recorderValve, _fileSink)) {
        qCCritical(VideoReceiverLog) << "Failed to link valve and file sink";
503
        _dispatchSignal([this](){
504 505
            emit onStartRecordingComplete(STATUS_FAIL);
        });
506 507 508 509 510 511 512 513 514 515 516 517 518 519
        return;
    }

    gst_element_sync_state_with_parent(_fileSink);

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-filesink");

    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
    GstPad* probepad;

    if ((probepad  = gst_element_get_static_pad(_recorderValve, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
520
        _dispatchSignal([this](){
521 522
            emit onStartRecordingComplete(STATUS_FAIL);
        });
523 524 525 526 527 528 529 530 531 532 533
        return;
    }

    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffers until key frame is received
    gst_object_unref(probepad);
    probepad = nullptr;

    g_object_set(_recorderValve, "drop", FALSE, nullptr);

    _recording = true;
    qCDebug(VideoReceiverLog) << "Recording started";
534
    _dispatchSignal([this](){
535
        emit onStartRecordingComplete(STATUS_OK);
536 537
        emit recordingChanged();
    });
538 539 540 541
}

//-----------------------------------------------------------------------------
void
542
GstVideoReceiver::stopRecording(void)
543
{
544 545
    if (_needDispatch()) {
        _slotHandler.dispatch([this]() {
546 547 548 549 550 551 552 553 554 555
            stopRecording();
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Stopping recording";

    // exit immediately if we are not recording
    if (_pipeline == nullptr || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
556
        _dispatchSignal([this](){
557 558
            emit onStopRecordingComplete(STATUS_INVALID_STATE);
        });
559 560 561 562 563 564 565
        return;
    }

    g_object_set(_recorderValve, "drop", TRUE, nullptr);

    _removingRecorder = true;

566 567 568 569
    bool ret = _unlinkBranch(_recorderValve);

    // FIXME: AV: it is much better to emit onStopRecordingComplete() after recording is really stopped
    // (which happens later due to async design) but as for now it is also not so bad...
570
    _dispatchSignal([this, ret](){
571 572
        emit onStopRecordingComplete(ret ? STATUS_OK : STATUS_FAIL);
    });
573 574 575
}

void
576
GstVideoReceiver::takeScreenshot(const QString& imageFile)
577
{
578
    if (_needDispatch()) {
579
        QString cachedImageFile = imageFile;
580
        _slotHandler.dispatch([this, cachedImageFile]() {
581
            takeScreenshot(cachedImageFile);
582 583 584 585 586
        });
        return;
    }

    // FIXME: AV: record screenshot here
587
    _dispatchSignal([this](){
588
        emit onTakeScreenshotComplete(STATUS_NOT_IMPLEMENTED);
589
    });
590 591
}

592
const char* GstVideoReceiver::_kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN] = {
593 594 595 596 597 598
    "matroskamux",
    "qtmux",
    "mp4mux"
};

void
599
GstVideoReceiver::_watchdog(void)
600
{
601
    _slotHandler.dispatch([this](){
602 603 604 605 606 607 608 609 610 611 612
        if(_pipeline == nullptr) {
            return;
        }

        const qint64 now = QDateTime::currentSecsSinceEpoch();

        if (_lastSourceFrameTime == 0) {
            _lastSourceFrameTime = now;
        }

        if (now - _lastSourceFrameTime > _timeout) {
613
            qCDebug(VideoReceiverLog) << "Stream timeout, no frames for " << now - _lastSourceFrameTime;
614
            _dispatchSignal([this](){
615 616
                emit timeout();
            });
617 618 619 620 621 622 623 624
        }

        if (_decoding && !_removingDecoder) {
            if (_lastVideoFrameTime == 0) {
                _lastVideoFrameTime = now;
            }

            if (now - _lastVideoFrameTime > _timeout * 2) {
625
                qCDebug(VideoReceiverLog) << "Video decoder timeout, no frames for " << now - _lastVideoFrameTime;
626
                _dispatchSignal([this](){
627 628
                    emit timeout();
                });
629 630 631 632 633 634
            }
        }
    });
}

void
635
GstVideoReceiver::_handleEOS(void)
636 637 638 639 640 641
{
    if(_pipeline == nullptr) {
        qCWarning(VideoReceiverLog) << "We should not be here";
        return;
    }

642
    if (_endOfStream) {
643 644 645 646 647 648
        stop();
    } else {
        if(_decoding && _removingDecoder) {
            _shutdownDecodingBranch();
        } else if(_recording && _removingRecorder) {
            _shutdownRecordingBranch();
649
        } /*else {
650 651
            qCWarning(VideoReceiverLog) << "Unexpected EOS!";
            stop();
652
        }*/
653 654 655 656
    }
}

GstElement*
657
GstVideoReceiver::_makeSource(const QString& uri)
658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690
{
    if (uri.isEmpty()) {
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* tsdemux = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
691 692 693 694 695 696
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
697
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
698 699
                        break;
                    }
700
                } else if (isUdp265) {
701
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
702
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
703 704 705 706 707 708 709 710 711 712 713
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
714
            qCDebug(VideoReceiverLog) << "URI is not recognized";
715 716 717
        }

        if (!source) {
718
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
719 720 721
            break;
        }

722 723 724 725 726 727 728 729 730 731
        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
            break;
        }

        if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('parsebin') failed";
            break;
        }

732 733
        g_signal_connect(parser, "autoplug-query", G_CALLBACK(_filterParserCaps), nullptr);

734 735
        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

736
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
737
        // FIXME: AV: tsdemux handling is a bit ugly - let's try to find elegant solution for that later
738
        if (isTcpMPEGTS || isUdpMPEGTS) {
739 740
            if ((tsdemux = gst_element_factory_make("tsdemux", nullptr)) == nullptr) {
                qCCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
741 742
                break;
            }
743 744 745 746 747

            gst_bin_add(GST_BIN(bin), tsdemux);

            if (!gst_element_link(source, tsdemux)) {
                qCCritical(VideoReceiverLog) << "gst_element_link() failed";
748 749
                break;
            }
750

751 752
            source = tsdemux;
            tsdemux = nullptr;
753 754 755 756 757 758 759 760
        }

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
761
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
762
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
763 764 765 766 767 768
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
769
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
770 771 772 773
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
774
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
775 776 777 778 779 780 781 782 783
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

784
        source = tsdemux = buffer = parser = nullptr;
785 786 787 788 789 790 791 792 793 794 795 796 797 798 799

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

800 801 802 803 804
    if (tsdemux != nullptr) {
        gst_object_unref(tsdemux);
        tsdemux = nullptr;
    }

805 806 807 808 809 810 811 812 813 814 815 816 817
    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

818
GstElement*
819
GstVideoReceiver::_makeDecoder(GstCaps* caps, GstElement* videoSink)
820
{
821 822
    Q_UNUSED(caps);

823
    GstElement* decoder = nullptr;
824

825 826 827 828 829
    do {
        if ((decoder = gst_element_factory_make("decodebin", nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
            break;
        }
830

831 832
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(_autoplugQuery), videoSink);
    } while(0);
833

834
    return decoder;
835 836
}

837
GstElement*
838
GstVideoReceiver::_makeFileSink(const QString& videoFile, FILE_FORMAT format)
839
{
840 841 842 843 844
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;
845

846 847 848 849 850
    do{
        if (format < FILE_FORMAT_MIN || format >= FILE_FORMAT_MAX) {
            qCCritical(VideoReceiverLog) << "Unsupported file format";
            break;
        }
851

852 853 854 855
        if ((mux = gst_element_factory_make(_kFileMux[format - FILE_FORMAT_MIN], nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << _kFileMux[format - FILE_FORMAT_MIN] << "') failed";
            break;
        }
856

857 858 859 860
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
            break;
        }
861

862
        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);
863

864 865 866 867
        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
            break;
        }
868

869
        GstPadTemplate* padTemplate;
870

871 872 873 874
        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
            break;
        }
875

876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913
        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
914 915
    }

916 917 918 919
    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }
920

921 922
    return fileSink;
}
923

924
void
925
GstVideoReceiver::_onNewSourcePad(GstPad* pad)
Gus Grubba's avatar
Gus Grubba committed
926
{
927 928 929
    // FIXME: check for caps - if this is not video stream (and preferably - one of these which we have to support) then simply skip it
    if(!gst_element_link(_source, _tee)) {
        qCCritical(VideoReceiverLog) << "Unable to link source";
930 931
        return;
    }
932 933 934 935

    if (!_streaming) {
        _streaming = true;
        qCDebug(VideoReceiverLog) << "Streaming started";
936
        _dispatchSignal([this](){
937 938
            emit streamingChanged();
        });
939 940 941 942 943
    }

    gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, _eosProbe, this, nullptr);

    if (_videoSink == nullptr) {
944 945
        return;
    }
946

947 948 949 950 951 952 953 954 955 956 957
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-source-pad");

    if (!_addDecoder(_decoderValve)) {
        qCCritical(VideoReceiverLog) << "_addDecoder() failed";
        return;
    }

    g_object_set(_decoderValve, "drop", FALSE, nullptr);

    qCDebug(VideoReceiverLog) << "Decoding started";
}
958

959
void
960
GstVideoReceiver::_onNewDecoderPad(GstPad* pad)
961 962
{
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-decoder-pad");
963

964 965
    if (!_addVideoSink(pad)) {
        qCCritical(VideoReceiverLog) << "_addVideoSink() failed";
966
    }
967
}
968

969
bool
970
GstVideoReceiver::_addDecoder(GstElement* src)
971 972 973 974 975 976
{
    GstPad* srcpad;

    if ((srcpad = gst_element_get_static_pad(src, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return false;
Gus Grubba's avatar
Gus Grubba committed
977
    }
978

979 980 981 982 983 984 985
    GstCaps* caps;

    if ((caps = gst_pad_query_caps(srcpad, nullptr)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_pad_query_caps() failed";
        gst_object_unref(srcpad);
        srcpad = nullptr;
        return false;
Gus Grubba's avatar
Gus Grubba committed
986
    }
987 988 989 990 991 992 993 994 995

    gst_object_unref(srcpad);
    srcpad = nullptr;

    if ((_decoder = _makeDecoder(caps, _videoSink)) == nullptr) {
        qCCritical(VideoReceiverLog) << "_makeDecoder() failed";
        gst_caps_unref(caps);
        caps = nullptr;
        return false;
996
    }
Gus Grubba's avatar
Gus Grubba committed
997

998
    gst_object_ref(_decoder);
999

1000 1001
    gst_caps_unref(caps);
    caps = nullptr;
1002

1003 1004 1005 1006
    // FIXME: AV: check if srcpad exists - if it does then no need to wait for new pad
    //    int probeRes = 0;
    //    gst_element_foreach_src_pad(source, _padProbe, &probeRes);
    g_signal_connect(_decoder, "pad-added", G_CALLBACK(_onNewPad), this);
Gus Grubba's avatar
Gus Grubba committed
1007

1008
    gst_bin_add(GST_BIN(_pipeline), _decoder);
1009

1010
    gst_element_sync_state_with_parent(_decoder);
Gus Grubba's avatar
Gus Grubba committed
1011

1012
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-decoder");
1013

1014 1015 1016 1017
    if (!gst_element_link(src, _decoder)) {
        qCCritical(VideoReceiverLog) << "Unable to link decoder";
        return false;
    }
Gus Grubba's avatar
Gus Grubba committed
1018

1019 1020
    return true;
}
Gus Grubba's avatar
Gus Grubba committed
1021

1022
bool
1023
GstVideoReceiver::_addVideoSink(GstPad* pad)
1024 1025
{
    GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1026

1027 1028 1029 1030 1031 1032 1033 1034 1035 1036
    gst_object_ref(_videoSink); // gst_bin_add() will steal one reference

    gst_bin_add(GST_BIN(_pipeline), _videoSink);

    if(!gst_element_link(_decoder, _videoSink)) {
        gst_bin_remove(GST_BIN(_pipeline), _videoSink);
        qCCritical(VideoReceiverLog) << "Unable to link video sink";
        if (caps != nullptr) {
            gst_caps_unref(caps);
            caps = nullptr;
1037
        }
1038 1039
        return false;
    }
1040

1041
    gst_element_sync_state_with_parent(_videoSink);
1042

1043
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-videosink");
1044

1045 1046
    if (caps != nullptr) {
        GstStructure* s = gst_caps_get_structure(caps, 0);
1047

1048 1049 1050 1051 1052
        if (s != nullptr) {
            gint width, height;
            gst_structure_get_int(s, "width", &width);
            gst_structure_get_int(s, "height", &height);
            _setVideoSize(QSize(width, height));
1053 1054
        }

1055 1056 1057 1058 1059
        gst_caps_unref(caps);
        caps = nullptr;
    } else {
        _setVideoSize(QSize(0, 0));
    }
1060

1061
    _decoding = true;
1062
    qCDebug(VideoReceiverLog) << "Decoding started";
1063
    _dispatchSignal([this](){
1064 1065
        emit decodingChanged();
    });
Gus Grubba's avatar
Gus Grubba committed
1066

1067 1068
    return true;
}
Gus Grubba's avatar
Gus Grubba committed
1069

1070
void
1071
GstVideoReceiver::_noteTeeFrame(void)
1072 1073 1074
{
    _lastSourceFrameTime = QDateTime::currentSecsSinceEpoch();
}
Gus Grubba's avatar
Gus Grubba committed
1075

1076
void
1077
GstVideoReceiver::_noteVideoSinkFrame(void)
1078 1079 1080
{
    _lastVideoFrameTime = QDateTime::currentSecsSinceEpoch();
}
Gus Grubba's avatar
Gus Grubba committed
1081

1082
void
1083
GstVideoReceiver::_noteEndOfStream(void)
1084
{
1085
    _endOfStream = true;
1086
}
Gus Grubba's avatar
Gus Grubba committed
1087

1088 1089
// -Unlink the branch from the src pad
// -Send an EOS event at the beginning of that branch
1090
bool
1091
GstVideoReceiver::_unlinkBranch(GstElement* from)
1092 1093
{
    GstPad* src;
Gus Grubba's avatar
Gus Grubba committed
1094

1095 1096
    if ((src = gst_element_get_static_pad(from, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
1097
        return false;
1098
    }
Gus Grubba's avatar
Gus Grubba committed
1099

1100
    GstPad* sink;
Gus Grubba's avatar
Gus Grubba committed
1101

1102 1103 1104 1105
    if ((sink = gst_pad_get_peer(src)) == nullptr) {
        gst_object_unref(src);
        src = nullptr;
        qCCritical(VideoReceiverLog) << "gst_pad_get_peer() failed";
1106
        return false;
1107
    }
1108

1109 1110 1111 1112 1113 1114
    if (!gst_pad_unlink(src, sink)) {
        gst_object_unref(src);
        src = nullptr;
        gst_object_unref(sink);
        sink = nullptr;
        qCCritical(VideoReceiverLog) << "gst_pad_unlink() failed";
1115
        return false;
1116
    }
1117

1118 1119
    gst_object_unref(src);
    src = nullptr;
1120

1121 1122
    // Send EOS at the beginning of the branch
    const gboolean ret = gst_pad_send_event(sink, gst_event_new_eos());
1123

1124 1125 1126
    gst_object_unref(sink);
    sink = nullptr;

1127
    if (!ret) {
1128
        qCCritical(VideoReceiverLog) << "Branch EOS was NOT sent";
1129
        return false;
Gus Grubba's avatar
Gus Grubba committed
1130
    }
1131 1132 1133 1134

    qCDebug(VideoReceiverLog) << "Branch EOS was sent";

    return true;
Gus Grubba's avatar
Gus Grubba committed
1135 1136
}

1137
void
1138
GstVideoReceiver::_shutdownDecodingBranch(void)
Gus Grubba's avatar
Gus Grubba committed
1139
{
1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151
    if (_decoder != nullptr) {
        GstObject* parent;

        if ((parent = gst_element_get_parent(_decoder)) != nullptr) {
            gst_bin_remove(GST_BIN(_pipeline), _decoder);
            gst_element_set_state(_decoder, GST_STATE_NULL);
            gst_object_unref(parent);
            parent = nullptr;
        }

        gst_object_unref(_decoder);
        _decoder = nullptr;
1152
    }
1153 1154 1155 1156 1157 1158 1159

    if (_videoSinkProbeId != 0) {
        GstPad* sinkpad;
        if ((sinkpad = gst_element_get_static_pad(_videoSink, "sink")) != nullptr) {
            gst_pad_remove_probe(sinkpad, _videoSinkProbeId);
            gst_object_unref(sinkpad);
            sinkpad = nullptr;
1160
        }
1161
        _videoSinkProbeId = 0;
Gus Grubba's avatar
Gus Grubba committed
1162
    }
1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182

    _lastVideoFrameTime = 0;

    GstObject* parent;

    if ((parent = gst_element_get_parent(_videoSink)) != nullptr) {
        gst_bin_remove(GST_BIN(_pipeline), _videoSink);
        gst_element_set_state(_videoSink, GST_STATE_NULL);
        gst_object_unref(parent);
        parent = nullptr;
    }

    gst_object_unref(_videoSink);
    _videoSink = nullptr;

    _removingDecoder = false;

    if (_decoding) {
        _decoding = false;
        qCDebug(VideoReceiverLog) << "Decoding stopped";
1183
        _dispatchSignal([this](){
1184 1185
            emit decodingChanged();
        });
1186 1187 1188
    }

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-decoding-stopped");
Gus Grubba's avatar
Gus Grubba committed
1189 1190
}

1191
void
1192
GstVideoReceiver::_shutdownRecordingBranch(void)
Gus Grubba's avatar
Gus Grubba committed
1193
{
1194 1195 1196 1197
    gst_bin_remove(GST_BIN(_pipeline), _fileSink);
    gst_element_set_state(_fileSink, GST_STATE_NULL);
    gst_object_unref(_fileSink);
    _fileSink = nullptr;
Gus Grubba's avatar
Gus Grubba committed
1198

1199 1200 1201 1202 1203
    _removingRecorder = false;

    if (_recording) {
        _recording = false;
        qCDebug(VideoReceiverLog) << "Recording stopped";
1204
        _dispatchSignal([this](){
1205 1206
            emit recordingChanged();
        });
1207
    }
1208 1209

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording-stopped");
1210 1211
}

1212 1213 1214 1215 1216 1217
bool
GstVideoReceiver::_needDispatch(void)
{
    return _slotHandler.needDispatch();
}

1218
void
1219
GstVideoReceiver::_dispatchSignal(std::function<void()> emitter)
1220
{
1221 1222 1223
    _signalDepth += 1;
    emitter();
    _signalDepth -= 1;
1224 1225
}

1226
gboolean
1227
GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
1228 1229
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
1230
    Q_ASSERT(msg != nullptr && data != nullptr);
1231
    GstVideoReceiver* pThis = (GstVideoReceiver*)data;
1232

1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246
    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_ERROR:
        do {
            gchar* debug;
            GError* error;

            gst_message_parse_error(msg, &error, &debug);

            if (debug != nullptr) {
                g_free(debug);
                debug = nullptr;
            }

            if (error != nullptr) {
1247
                qCCritical(VideoReceiverLog) << "GStreamer error:" << error->message;
1248 1249 1250 1251
                g_error_free(error);
                error = nullptr;
            }

1252
            pThis->_slotHandler.dispatch([pThis](){
1253 1254 1255
                qCDebug(VideoReceiverLog) << "Stoppping because of error";
                pThis->stop();
            });
1256
        } while(0);
1257
        break;
1258
    case GST_MESSAGE_EOS:
1259
        pThis->_slotHandler.dispatch([pThis](){
1260
            qCDebug(VideoReceiverLog) << "Received EOS";
1261 1262
            pThis->_handleEOS();
        });
1263
        break;
1264 1265 1266 1267 1268 1269
    case GST_MESSAGE_ELEMENT:
        do {
            const GstStructure* s = gst_message_get_structure (msg);

            if (!gst_structure_has_name (s, "GstBinForwarded")) {
                break;
1270
            }
1271 1272 1273 1274 1275 1276 1277 1278 1279 1280

            GstMessage* forward_msg = nullptr;

            gst_structure_get(s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);

            if (forward_msg == nullptr) {
                break;
            }

            if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
1281
                pThis->_slotHandler.dispatch([pThis](){
1282
                    qCDebug(VideoReceiverLog) << "Received branch EOS";
1283 1284 1285 1286 1287 1288 1289
                    pThis->_handleEOS();
                });
            }

            gst_message_unref(forward_msg);
            forward_msg = nullptr;
        } while(0);
1290
        break;
1291 1292 1293 1294
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
1295 1296
    return TRUE;
}
1297

1298
void
1299
GstVideoReceiver::_onNewPad(GstElement* element, GstPad* pad, gpointer data)
1300
{
1301
    GstVideoReceiver* self = static_cast<GstVideoReceiver*>(data);
1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312

    if (element == self->_source) {
        self->_onNewSourcePad(pad);
    } else if (element == self->_decoder) {
        self->_onNewDecoderPad(pad);
    } else {
        qCDebug(VideoReceiverLog) << "Unexpected call!";
    }
}

void
1313
GstVideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
1314
{
1315 1316
    Q_UNUSED(data)

1317 1318 1319 1320
    gchar* name;

    if ((name = gst_pad_get_name(pad)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_pad_get_name() failed";
1321 1322 1323
        return;
    }

1324 1325 1326 1327 1328 1329 1330
    GstPad* ghostpad;

    if ((ghostpad = gst_ghost_pad_new(name, pad)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_ghost_pad_new() failed";
        g_free(name);
        name = nullptr;
        return;
1331 1332
    }

1333 1334
    g_free(name);
    name = nullptr;
1335

1336
    gst_pad_set_active(ghostpad, TRUE);
1337

1338 1339
    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
        qCCritical(VideoReceiverLog) << "gst_element_add_pad() failed";
1340 1341 1342
    }
}

1343
void
1344
GstVideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
1345
{
1346
    bool isRtpPad = false;
1347

1348
    GstCaps* filter;
1349

1350 1351
    if ((filter = gst_caps_from_string("application/x-rtp")) != nullptr) {
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1352

1353 1354 1355 1356 1357 1358
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = true;
            }
            gst_caps_unref(caps);
            caps = nullptr;
1359 1360
        }

1361 1362 1363
        gst_caps_unref(filter);
        filter = nullptr;
    }
1364

1365 1366
    if (isRtpPad) {
        GstElement* buffer;
1367

1368 1369
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);
1370

1371
            gst_element_sync_state_with_parent(buffer);
1372

1373
            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");
1374

1375 1376
            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
1377

1378 1379
                gst_object_unref(sinkpad);
                sinkpad = nullptr;
1380

1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391
                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
                }
            } else {
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
            }
        } else {
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
1392
        }
1393
    }
1394

1395
    gchar* name;
1396

1397 1398 1399
    if ((name = gst_pad_get_name(pad)) != nullptr) {
        if(gst_element_link_pads(element, name, GST_ELEMENT(data), "sink") == false) {
            qCCritical(VideoReceiverLog) << "gst_element_link_pads() failed";
1400 1401
        }

1402 1403 1404 1405
        g_free(name);
        name = nullptr;
    } else {
        qCCritical(VideoReceiverLog) << "gst_pad_get_name() failed";
1406 1407 1408
    }
}

1409
gboolean
1410
GstVideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
1411
{
1412 1413
    Q_UNUSED(element)

1414
    int* probeRes = (int*)user_data;
1415

1416
    *probeRes |= 1;
1417

1418
    GstCaps* filter = gst_caps_from_string("application/x-rtp");
1419

1420 1421
    if (filter != nullptr) {
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1422

1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }
1435

1436 1437
    return TRUE;
}
1438

1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490
gboolean
GstVideoReceiver::_filterParserCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)
    Q_UNUSED(data)

    if (GST_QUERY_TYPE(query) != GST_QUERY_CAPS) {
        return FALSE;
    }

    GstCaps* srcCaps;

    gst_query_parse_caps(query, &srcCaps);

    if (srcCaps == nullptr || gst_caps_is_any(srcCaps)) {
        return FALSE;
    }

    GstCaps* sinkCaps = nullptr;

    GstCaps* filter;

    if (sinkCaps == nullptr && (filter = gst_caps_from_string("video/x-h264")) != nullptr) {
        if (gst_caps_can_intersect(srcCaps, filter)) {
            sinkCaps = gst_caps_from_string("video/x-h264,stream-format=avc");
        }

        gst_caps_unref(filter);
        filter = nullptr;
    } else if (sinkCaps == nullptr && (filter = gst_caps_from_string("video/x-h265")) != nullptr) {
        if (gst_caps_can_intersect(srcCaps, filter)) {
            sinkCaps = gst_caps_from_string("video/x-h265,stream-format=hvc1");
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }

    if (sinkCaps == nullptr) {
        return FALSE;
    }

    gst_query_set_caps_result(query, sinkCaps);

    gst_caps_unref(sinkCaps);
    sinkCaps = nullptr;

    return TRUE;
}

1491
gboolean
1492
GstVideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1493
{
1494 1495 1496 1497
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)

1498
    GstElement* glupload = (GstElement* )data;
1499

1500
    GstPad* sinkpad;
1501

1502 1503 1504
    if ((sinkpad = gst_element_get_static_pad(glupload, "sink")) == nullptr) {
        qCCritical(VideoReceiverLog) << "No sink pad found";
        return FALSE;
1505
    }
1506

1507
    GstCaps* filter;
1508

1509
    gst_query_parse_caps(query, &filter);
1510

1511
    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);
1512

1513
    gst_query_set_caps_result(query, sinkcaps);
1514

1515
    const gboolean ret = !gst_caps_is_empty(sinkcaps);
1516

1517 1518
    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;
1519

1520 1521
    gst_object_unref(sinkpad);
    sinkpad = nullptr;
1522

1523
    return ret;
1524 1525
}

1526
gboolean
1527
GstVideoReceiver::_autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1528
{
1529 1530 1531 1532
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)

1533
    GstElement* glsink = (GstElement* )data;
1534

1535
    GstPad* sinkpad;
1536

1537 1538 1539 1540
    if ((sinkpad = gst_element_get_static_pad(glsink, "sink")) == nullptr){
        qCCritical(VideoReceiverLog) << "No sink pad found";
        return FALSE;
    }
1541

1542
    const gboolean ret = gst_pad_query(sinkpad, query);
1543

1544 1545
    gst_object_unref(sinkpad);
    sinkpad = nullptr;
1546

1547
    return ret;
1548 1549
}

1550
gboolean
1551
GstVideoReceiver::_autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1552
{
1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567
    gboolean ret;

    switch (GST_QUERY_TYPE(query)) {
    case GST_QUERY_CAPS:
        ret = _autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = _autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
1568 1569
}

1570
GstPadProbeReturn
1571
GstVideoReceiver::_teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1572
{
1573
    Q_UNUSED(pad)
1574 1575 1576
    Q_UNUSED(info)

    if(user_data != nullptr) {
1577
        GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1578
        pThis->_noteTeeFrame();
1579
    }
1580 1581

    return GST_PAD_PROBE_OK;
1582
}
1583

1584
GstPadProbeReturn
1585
GstVideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1586
{
1587 1588 1589
    Q_UNUSED(pad)
    Q_UNUSED(info)

1590
    if(user_data != nullptr) {
1591
        GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619

        if (pThis->_resetVideoSink) {
            pThis->_resetVideoSink = false;

// FIXME: AV: this makes MPEG2-TS playing smooth but breaks RTSP
//            gst_pad_send_event(pad, gst_event_new_flush_start());
//            gst_pad_send_event(pad, gst_event_new_flush_stop(TRUE));

//            GstBuffer* buf;

//            if ((buf = gst_pad_probe_info_get_buffer(info)) != nullptr) {
//                GstSegment* seg;

//                if ((seg = gst_segment_new()) != nullptr) {
//                    gst_segment_init(seg, GST_FORMAT_TIME);

//                    seg->start = buf->pts;

//                    gst_pad_send_event(pad, gst_event_new_segment(seg));

//                    gst_segment_free(seg);
//                    seg = nullptr;
//                }

//                gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
//            }
        }

1620 1621 1622 1623 1624 1625
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}

1626
GstPadProbeReturn
1627
GstVideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1628 1629
{
    Q_UNUSED(pad);
1630
    Q_ASSERT(user_data != nullptr);
1631

1632 1633
    if(info != nullptr) {
        GstEvent* event = gst_pad_probe_info_get_event(info);
1634

1635
        if (GST_EVENT_TYPE(event) == GST_EVENT_EOS) {
1636
            GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1637
            pThis->_noteEndOfStream();
1638 1639 1640
        }
    }

1641
    return GST_PAD_PROBE_OK;
1642 1643
}

1644
GstPadProbeReturn
1645
GstVideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1646
{
1647 1648 1649
    if (info == nullptr || user_data == nullptr) {
        qCCritical(VideoReceiverLog) << "Invalid arguments";
        return GST_PAD_PROBE_DROP;
1650 1651
    }

1652 1653 1654 1655
    GstBuffer* buf = gst_pad_probe_info_get_buffer(info);

    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
        return GST_PAD_PROBE_DROP;
1656 1657
    }

1658 1659
    // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
    gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
1660

1661
    GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1662 1663

    qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1664

1665 1666 1667 1668
    pThis->recordingStarted();

    return GST_PAD_PROBE_REMOVE;
}