GstVideoReceiver.cc 43.1 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16
 */

17
#include "GstVideoReceiver.h"
18

Gus Grubba's avatar
Gus Grubba committed
19
#include <QDebug>
20
#include <QUrl>
21
#include <QDateTime>
22
#include <QSysInfo>
23

24 25
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

26 27 28 29 30 31 32 33 34
//-----------------------------------------------------------------------------
// Our pipeline look like this:
//
//              +-->queue-->_decoderValve[-->_decoder-->_videoSink]
//              |
// _source-->_tee
//              |
//              +-->queue-->_recorderValve[-->_fileSink]
//
35

36 37
GstVideoReceiver::GstVideoReceiver(QObject* parent)
    : VideoReceiver(parent)
38 39 40
    , _removingDecoder(false)
    , _removingRecorder(false)
    , _source(nullptr)
Gus Grubba's avatar
Gus Grubba committed
41
    , _tee(nullptr)
42 43 44
    , _decoderValve(nullptr)
    , _recorderValve(nullptr)
    , _decoder(nullptr)
Gus Grubba's avatar
Gus Grubba committed
45
    , _videoSink(nullptr)
46 47 48 49 50 51
    , _fileSink(nullptr)
    , _pipeline(nullptr)
    , _lastSourceFrameTime(0)
    , _lastVideoFrameTime(0)
    , _resetVideoSink(true)
    , _videoSinkProbeId(0)
52
    , _udpReconnect_us(5000000)
53
    , _endOfStream(false)
Gus Grubba's avatar
Gus Grubba committed
54
{
55 56 57
    _apiHandler.start();
    _notificationHandler.start();
    connect(&_watchdogTimer, &QTimer::timeout, this, &GstVideoReceiver::_watchdog);
58
    _watchdogTimer.start(1000);
Gus Grubba's avatar
Gus Grubba committed
59 60
}

61
GstVideoReceiver::~GstVideoReceiver(void)
Gus Grubba's avatar
Gus Grubba committed
62
{
63
    stop();
64 65
    _notificationHandler.shutdown();
    _apiHandler.shutdown();
Gus Grubba's avatar
Gus Grubba committed
66 67
}

68
void
69
GstVideoReceiver::start(const QString& uri, unsigned timeout)
70
{
71
    if (_apiHandler.needDispatch()) {
72 73 74
        QString cachedUri = uri;
        _apiHandler.dispatch([this, cachedUri, timeout]() {
            start(cachedUri, timeout);
75 76 77
        });
        return;
    }
78

79 80
    if(_pipeline) {
        qCDebug(VideoReceiverLog) << "Already running!";
81 82 83
        _notificationHandler.dispatch([this](){
            emit onStartComplete(false);
        });
84 85
        return;
    }
86

87 88
    if (uri.isEmpty()) {
        qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
89 90 91
        _notificationHandler.dispatch([this](){
            emit onStartComplete(false);
        });
92
        return;
93 94
    }

95
    qCDebug(VideoReceiverLog) << "Starting";
96

97 98
    _endOfStream = false;

99
    _timeout = timeout;
100

101 102
    bool running    = false;
    bool pipelineUp = false;
103

104 105
    GstElement* decoderQueue = nullptr;
    GstElement* recorderQueue = nullptr;
106

107 108 109 110 111
    do {
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('tee') failed";
            break;
        }
112

113
        GstPad* pad;
114

115 116 117 118
        if ((pad = gst_element_get_static_pad(_tee, "sink")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
            break;
        }
119

120
        _lastSourceFrameTime = 0;
121

122 123 124
        gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, _teeProbe, this, nullptr);
        gst_object_unref(pad);
        pad = nullptr;
125

126 127 128 129
        if((decoderQueue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
            break;
        }
130

131 132 133 134
        if((_decoderValve = gst_element_factory_make("valve", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('valve') failed";
            break;
        }
135

136
        g_object_set(_decoderValve, "drop", TRUE, nullptr);
137

138 139 140 141
        if((recorderQueue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('queue') failed";
            break;
        }
142

143 144 145 146
        if((_recorderValve = gst_element_factory_make("valve", nullptr)) == nullptr)  {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('valve') failed";
            break;
        }
147

148
        g_object_set(_recorderValve, "drop", TRUE, nullptr);
149

150 151 152 153
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_pipeline_new() failed";
            break;
        }
154

155
        g_object_set(_pipeline, "message-forward", TRUE, nullptr);
156

157 158 159 160
        if ((_source = _makeSource(uri)) == nullptr) {
            qCCritical(VideoReceiverLog) << "_makeSource() failed";
            break;
        }
161

162
        g_signal_connect(_source, "pad-added", G_CALLBACK(_onNewPad), this);
163

164
        gst_bin_add_many(GST_BIN(_pipeline), _source, _tee, decoderQueue, _decoderValve, recorderQueue, _recorderValve, nullptr);
165

166
        pipelineUp = true;
167

168 169 170 171
        if(!gst_element_link_many(_tee, decoderQueue, _decoderValve, nullptr)) {
            qCCritical(VideoReceiverLog) << "Unable to link decoder queue";
            break;
        }
172

173 174 175 176
        if(!gst_element_link_many(_tee, recorderQueue, _recorderValve, nullptr)) {
            qCCritical(VideoReceiverLog) << "Unable to link recorder queue";
            break;
        }
177

178
        GstBus* bus = nullptr;
179

180 181 182 183 184 185
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = nullptr;
        }
186

187 188 189 190 191 192 193 194 195 196 197 198
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-initial");
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;
    } while(0);

    if (!running) {
        qCCritical(VideoReceiverLog) << "Failed";

        // In newer versions, the pipeline will clean up all references that are added to it
        if (_pipeline != nullptr) {
            gst_element_set_state(_pipeline, GST_STATE_NULL);
            gst_object_unref(_pipeline);
            _pipeline = nullptr;
199 200
        }

201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
            if (_recorderValve != nullptr) {
                gst_object_unref(_recorderValve);
                _recorderValve = nullptr;
            }

            if (recorderQueue != nullptr) {
                gst_object_unref(recorderQueue);
                recorderQueue = nullptr;
            }

            if (_decoderValve != nullptr) {
                gst_object_unref(_decoderValve);
                _decoderValve = nullptr;
            }

            if (decoderQueue != nullptr) {
                gst_object_unref(decoderQueue);
                decoderQueue = nullptr;
            }

            if (_tee != nullptr) {
                gst_object_unref(_tee);
                _tee = nullptr;
            }

            if (_source != nullptr) {
                gst_object_unref(_source);
                _source = nullptr;
            }
        }
233 234 235 236

        _notificationHandler.dispatch([this](){
            emit onStartComplete(false);
        });
237 238 239
    } else {
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
        qCDebug(VideoReceiverLog) << "Started";
240 241 242 243

        _notificationHandler.dispatch([this](){
            emit onStartComplete(true);
        });
244
    }
245
}
246

247
void
248
GstVideoReceiver::stop(void)
249
{
250 251
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this]() {
252 253 254 255
            stop();
        });
        return;
    }
256

257
    qCDebug(VideoReceiverLog) << "Stopping";
258

259 260
    if (_pipeline != nullptr) {
        GstBus* bus;
261

262 263
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
            gst_bus_disable_sync_message_emission(bus);
264

265 266
            g_signal_handlers_disconnect_by_data(bus, this);

267
            gboolean recordingValveClosed = TRUE;
268

269
            g_object_get(_recorderValve, "drop", &recordingValveClosed, nullptr);
270

271 272 273 274
            if (recordingValveClosed == FALSE) {
                gst_element_send_event(_pipeline, gst_event_new_eos());

                GstMessage* msg;
275

276 277 278 279 280 281 282 283 284 285 286 287
                if((msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR))) != nullptr) {
                    if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
                        qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
                    } else if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS) {
                        qCDebug(VideoReceiverLog) << "End of stream received!";
                    }

                    gst_message_unref(msg);
                    msg = nullptr;
                } else {
                    qCCritical(VideoReceiverLog) << "gst_bus_timed_pop_filtered() failed";
                }
288
            }
289 290 291

            gst_object_unref(bus);
            bus = nullptr;
292
        } else {
293
            qCCritical(VideoReceiverLog) << "gst_pipeline_get_bus() failed";
294 295
        }

296
        gst_element_set_state(_pipeline, GST_STATE_NULL);
297

298 299 300 301
        // FIXME: check if branch is connected and remove all elements from branch
        if (_fileSink != nullptr) {
           _shutdownRecordingBranch();
        }
302

303 304 305
        if (_videoSink != nullptr) {
            _shutdownDecodingBranch();
        }
306

307
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-stopped");
308

309 310
        gst_object_unref(_pipeline);
        _pipeline = nullptr;
311

312 313 314 315
        _recorderValve = nullptr;
        _decoderValve = nullptr;
        _tee = nullptr;
        _source = nullptr;
316

317
        _lastSourceFrameTime = 0;
318

319 320 321
        if (_streaming) {
            _streaming = false;
            qCDebug(VideoReceiverLog) << "Streaming stopped";
322 323 324
            _notificationHandler.dispatch([this](){
                emit streamingChanged();
            });
325 326 327 328 329
        } else {
            qCDebug(VideoReceiverLog) << "Streaming did not start";
           _notificationHandler.dispatch([this](){
               emit timeout();
           });
330
        }
331 332
    }

333
    qCDebug(VideoReceiverLog) << "Stopped";
334 335
}

336
void
337
GstVideoReceiver::startDecoding(void* sink)
338
{
339 340 341 342 343 344 345
    if (sink == nullptr) {
        qCCritical(VideoReceiverLog) << "VideoSink is NULL";
        return;
    }

    if (_apiHandler.needDispatch()) {
        GstElement* videoSink = GST_ELEMENT(sink);
346
        gst_object_ref(videoSink);
347
        _apiHandler.dispatch([this, videoSink]() mutable {
348 349 350 351
            startDecoding(videoSink);
            gst_object_unref(videoSink);
        });
        return;
352 353
    }

354
    qCDebug(VideoReceiverLog) << "Starting decoding";
355

356 357 358 359 360 361
    if (_pipeline == nullptr) {
        if (_videoSink != nullptr) {
            gst_object_unref(_videoSink);
            _videoSink = nullptr;
        }
    }
362

363 364
    GstElement* videoSink = GST_ELEMENT(sink);

365 366 367 368
    if(_videoSink != nullptr || _decoding) {
        qCDebug(VideoReceiverLog) << "Already decoding!";
        return;
    }
369

370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403
    GstPad* pad;

    if ((pad = gst_element_get_static_pad(videoSink, "sink")) == nullptr) {
        qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
        return;
    }

    _lastVideoFrameTime = 0;
    _resetVideoSink = true;

    _videoSinkProbeId = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, _videoSinkProbe, this, nullptr);
    gst_object_unref(pad);
    pad = nullptr;

    _videoSink = videoSink;
    gst_object_ref(_videoSink);

    _removingDecoder = false;

    if (!_streaming) {
        return;
    }

    if (!_addDecoder(_decoderValve)) {
        qCCritical(VideoReceiverLog) << "_addDecoder() failed";
        return;
    }

    g_object_set(_decoderValve, "drop", FALSE, nullptr);

    qCDebug(VideoReceiverLog) << "Decoding started";
}

void
404
GstVideoReceiver::stopDecoding(void)
405
{
406 407
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this]() {
408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428
            stopDecoding();
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Stopping decoding";

    // exit immediately if we are not decoding
    if (_pipeline == nullptr || !_decoding) {
        qCDebug(VideoReceiverLog) << "Not decoding!";
        return;
    }

    g_object_set(_decoderValve, "drop", TRUE, nullptr);

    _removingDecoder = true;

    _unlinkBranch(_decoderValve);
}

void
429
GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
430
{
431 432
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this, videoFile, format]() {
433 434 435 436 437 438 439
            startRecording(videoFile, format);
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Starting recording";

440 441 442 443 444 445
    if (_pipeline == nullptr) {
        qCDebug(VideoReceiverLog) << "Streaming is not active!";
        return;
    }

    if (_recording) {
446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;

    if ((_fileSink = _makeFileSink(videoFile, format)) == nullptr) {
        qCCritical(VideoReceiverLog) << "_makeFileSink() failed";
        return;
    }

    _removingRecorder = false;

    gst_object_ref(_fileSink);

    gst_bin_add(GST_BIN(_pipeline), _fileSink);

    if (!gst_element_link(_recorderValve, _fileSink)) {
        qCCritical(VideoReceiverLog) << "Failed to link valve and file sink";
        return;
    }

    gst_element_sync_state_with_parent(_fileSink);

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-filesink");

    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
    GstPad* probepad;

    if ((probepad  = gst_element_get_static_pad(_recorderValve, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return;
    }

    gst_pad_add_probe(probepad, GST_PAD_PROBE_TYPE_BUFFER, _keyframeWatch, this, nullptr); // to drop the buffers until key frame is received
    gst_object_unref(probepad);
    probepad = nullptr;

    g_object_set(_recorderValve, "drop", FALSE, nullptr);

    _recording = true;
    qCDebug(VideoReceiverLog) << "Recording started";
490 491 492
    _notificationHandler.dispatch([this](){
        emit recordingChanged();
    });
493 494 495 496
}

//-----------------------------------------------------------------------------
void
497
GstVideoReceiver::stopRecording(void)
498
{
499 500
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this]() {
501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521
            stopRecording();
        });
        return;
    }

    qCDebug(VideoReceiverLog) << "Stopping recording";

    // exit immediately if we are not recording
    if (_pipeline == nullptr || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }

    g_object_set(_recorderValve, "drop", TRUE, nullptr);

    _removingRecorder = true;

    _unlinkBranch(_recorderValve);
}

void
522
GstVideoReceiver::takeScreenshot(const QString& imageFile)
523
{
524 525
    if (_apiHandler.needDispatch()) {
        _apiHandler.dispatch([this, imageFile]() {
526 527 528 529 530 531
            takeScreenshot(imageFile);
        });
        return;
    }

    // FIXME: AV: record screenshot here
532 533 534
    _notificationHandler.dispatch([this](){
        emit screenshotComplete();
    });
535 536
}

537
const char* GstVideoReceiver::_kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN] = {
538 539 540 541 542 543
    "matroskamux",
    "qtmux",
    "mp4mux"
};

void
544
GstVideoReceiver::_watchdog(void)
545
{
546
    _apiHandler.dispatch([this](){
547 548 549 550 551 552 553 554 555 556 557
        if(_pipeline == nullptr) {
            return;
        }

        const qint64 now = QDateTime::currentSecsSinceEpoch();

        if (_lastSourceFrameTime == 0) {
            _lastSourceFrameTime = now;
        }

        if (now - _lastSourceFrameTime > _timeout) {
558 559 560 561
            qCDebug(VideoReceiverLog) << "Stream timeout, no frames for " << now - _lastSourceFrameTime;
            _notificationHandler.dispatch([this](){
                emit timeout();
            });
562 563 564 565 566 567 568 569
        }

        if (_decoding && !_removingDecoder) {
            if (_lastVideoFrameTime == 0) {
                _lastVideoFrameTime = now;
            }

            if (now - _lastVideoFrameTime > _timeout * 2) {
570 571 572 573
                qCDebug(VideoReceiverLog) << "Video decoder timeout, no frames for " << now - _lastVideoFrameTime;
                _notificationHandler.dispatch([this](){
                    emit timeout();
                });
574 575 576 577 578 579
            }
        }
    });
}

void
580
GstVideoReceiver::_handleEOS(void)
581 582 583 584 585 586
{
    if(_pipeline == nullptr) {
        qCWarning(VideoReceiverLog) << "We should not be here";
        return;
    }

587
    if (_endOfStream) {
588 589 590 591 592 593
        stop();
    } else {
        if(_decoding && _removingDecoder) {
            _shutdownDecodingBranch();
        } else if(_recording && _removingRecorder) {
            _shutdownRecordingBranch();
594
        } /*else {
595 596
            qCWarning(VideoReceiverLog) << "Unexpected EOS!";
            stop();
597
        }*/
598 599 600 601
    }
}

GstElement*
602
GstVideoReceiver::_makeSource(const QString& uri)
603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635
{
    if (uri.isEmpty()) {
        qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
        return nullptr;
    }

    bool isTaisync  = uri.contains("tsusb://");
    bool isUdp264   = uri.contains("udp://");
    bool isRtsp     = uri.contains("rtsp://");
    bool isUdp265   = uri.contains("udp265://");
    bool isTcpMPEGTS= uri.contains("tcp://");
    bool isUdpMPEGTS= uri.contains("mpegts://");

    GstElement* source  = nullptr;
    GstElement* buffer  = nullptr;
    GstElement* tsdemux = nullptr;
    GstElement* parser  = nullptr;
    GstElement* bin     = nullptr;
    GstElement* srcbin  = nullptr;

    do {
        QUrl url(uri);

        if(isTcpMPEGTS) {
            if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
            }
        } else if (isRtsp) {
            if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
                g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
            }
        } else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
            if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
636 637 638 639 640 641
                g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);

                GstCaps* caps = nullptr;

                if(isUdp264) {
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
642
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
643 644
                        break;
                    }
645
                } else if (isUdp265) {
646
                    if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
647
                        qCCritical(VideoReceiverLog) << "gst_caps_from_string() failed";
648 649 650 651 652 653 654 655 656 657 658
                        break;
                    }
                }

                if (caps != nullptr) {
                    g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
                    gst_caps_unref(caps);
                    caps = nullptr;
                }
            }
        } else {
659
            qCDebug(VideoReceiverLog) << "URI is not recognized";
660 661 662
        }

        if (!source) {
663
            qCCritical(VideoReceiverLog) << "gst_element_factory_make() for data source failed";
664 665 666
            break;
        }

667 668 669 670 671 672 673 674 675 676 677 678
        if ((bin = gst_bin_new("sourcebin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sourcebin') failed";
            break;
        }

        if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('parsebin') failed";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);

679
        // FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
680
        // FIXME: AV: tsdemux handling is a bit ugly - let's try to find elegant solution for that later
681
        if (isTcpMPEGTS || isUdpMPEGTS) {
682 683
            if ((tsdemux = gst_element_factory_make("tsdemux", nullptr)) == nullptr) {
                qCCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
684 685
                break;
            }
686 687 688 689 690

            gst_bin_add(GST_BIN(bin), tsdemux);

            if (!gst_element_link(source, tsdemux)) {
                qCCritical(VideoReceiverLog) << "gst_element_link() failed";
691 692
                break;
            }
693

694 695
            source = tsdemux;
            tsdemux = nullptr;
696 697 698 699 700 701 702 703
        }

        int probeRes = 0;

        gst_element_foreach_src_pad(source, _padProbe, &probeRes);

        if (probeRes & 1) {
            if (probeRes & 2) {
704
                if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
705
                    qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
706 707 708 709 710 711
                    break;
                }

                gst_bin_add(GST_BIN(bin), buffer);

                if (!gst_element_link_many(source, buffer, parser, nullptr)) {
712
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
713 714 715 716
                    break;
                }
            } else {
                if (!gst_element_link(source, parser)) {
717
                    qCCritical(VideoReceiverLog) << "gst_element_link() failed";
718 719 720 721 722 723 724 725 726
                    break;
                }
            }
        } else {
            g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
        }

        g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);

727
        source = tsdemux = buffer = parser = nullptr;
728 729 730 731 732 733 734 735 736 737 738 739 740 741 742

        srcbin = bin;
        bin = nullptr;
    } while(0);

    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }

    if (parser != nullptr) {
        gst_object_unref(parser);
        parser = nullptr;
    }

743 744 745 746 747
    if (tsdemux != nullptr) {
        gst_object_unref(tsdemux);
        tsdemux = nullptr;
    }

748 749 750 751 752 753 754 755 756 757 758 759 760
    if (buffer != nullptr) {
        gst_object_unref(buffer);
        buffer = nullptr;
    }

    if (source != nullptr) {
        gst_object_unref(source);
        source = nullptr;
    }

    return srcbin;
}

761
GstElement*
762
GstVideoReceiver::_makeDecoder(GstCaps* caps, GstElement* videoSink)
763
{
764 765
    Q_UNUSED(caps);

766
    GstElement* decoder = nullptr;
767

768 769 770 771 772
    do {
        if ((decoder = gst_element_factory_make("decodebin", nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('decodebin') failed";
            break;
        }
773

774 775
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(_autoplugQuery), videoSink);
    } while(0);
776

777
    return decoder;
778 779
}

780
GstElement*
781
GstVideoReceiver::_makeFileSink(const QString& videoFile, FILE_FORMAT format)
782
{
783 784 785 786 787
    GstElement* fileSink = nullptr;
    GstElement* mux = nullptr;
    GstElement* sink = nullptr;
    GstElement* bin = nullptr;
    bool releaseElements = true;
788

789 790 791 792 793
    do{
        if (format < FILE_FORMAT_MIN || format >= FILE_FORMAT_MAX) {
            qCCritical(VideoReceiverLog) << "Unsupported file format";
            break;
        }
794

795 796 797 798
        if ((mux = gst_element_factory_make(_kFileMux[format - FILE_FORMAT_MIN], nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('" << _kFileMux[format - FILE_FORMAT_MIN] << "') failed";
            break;
        }
799

800 801 802 803
        if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_factory_make('filesink') failed";
            break;
        }
804

805
        g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);
806

807 808 809 810
        if ((bin = gst_bin_new("sinkbin")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_bin_new('sinkbin') failed";
            break;
        }
811

812
        GstPadTemplate* padTemplate;
813

814 815 816 817
        if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_class_get_pad_template(mux) failed";
            break;
        }
818

819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856
        // FIXME: AV: pad handling is potentially leaking (and other similar places too!)
        GstPad* pad;

        if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
            qCCritical(VideoReceiverLog) << "gst_element_request_pad(mux) failed";
            break;
        }

        gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);

        releaseElements = false;

        GstPad* ghostpad = gst_ghost_pad_new("sink", pad);

        gst_element_add_pad(bin, ghostpad);

        gst_object_unref(pad);
        pad = nullptr;

        if (!gst_element_link(mux, sink)) {
            qCCritical(VideoReceiverLog) << "gst_element_link() failed";
            break;
        }

        fileSink = bin;
        bin = nullptr;
    } while(0);

    if (releaseElements) {
        if (sink != nullptr) {
            gst_object_unref(sink);
            sink = nullptr;
        }

        if (mux != nullptr) {
            gst_object_unref(mux);
            mux = nullptr;
        }
857 858
    }

859 860 861 862
    if (bin != nullptr) {
        gst_object_unref(bin);
        bin = nullptr;
    }
863

864 865
    return fileSink;
}
866

867
void
868
GstVideoReceiver::_onNewSourcePad(GstPad* pad)
Gus Grubba's avatar
Gus Grubba committed
869
{
870 871 872
    // FIXME: check for caps - if this is not video stream (and preferably - one of these which we have to support) then simply skip it
    if(!gst_element_link(_source, _tee)) {
        qCCritical(VideoReceiverLog) << "Unable to link source";
873 874
        return;
    }
875 876 877 878

    if (!_streaming) {
        _streaming = true;
        qCDebug(VideoReceiverLog) << "Streaming started";
879 880 881
        _notificationHandler.dispatch([this](){
            emit streamingChanged();
        });
882 883 884 885 886
    }

    gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, _eosProbe, this, nullptr);

    if (_videoSink == nullptr) {
887 888
        return;
    }
889

890 891 892 893 894 895 896 897 898 899 900
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-source-pad");

    if (!_addDecoder(_decoderValve)) {
        qCCritical(VideoReceiverLog) << "_addDecoder() failed";
        return;
    }

    g_object_set(_decoderValve, "drop", FALSE, nullptr);

    qCDebug(VideoReceiverLog) << "Decoding started";
}
901

902
void
903
GstVideoReceiver::_onNewDecoderPad(GstPad* pad)
904 905
{
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-decoder-pad");
906

907 908
    if (!_addVideoSink(pad)) {
        qCCritical(VideoReceiverLog) << "_addVideoSink() failed";
909
    }
910
}
911

912
bool
913
GstVideoReceiver::_addDecoder(GstElement* src)
914 915 916 917 918 919
{
    GstPad* srcpad;

    if ((srcpad = gst_element_get_static_pad(src, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return false;
Gus Grubba's avatar
Gus Grubba committed
920
    }
921

922 923 924 925 926 927 928
    GstCaps* caps;

    if ((caps = gst_pad_query_caps(srcpad, nullptr)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_pad_query_caps() failed";
        gst_object_unref(srcpad);
        srcpad = nullptr;
        return false;
Gus Grubba's avatar
Gus Grubba committed
929
    }
930 931 932 933 934 935 936 937 938

    gst_object_unref(srcpad);
    srcpad = nullptr;

    if ((_decoder = _makeDecoder(caps, _videoSink)) == nullptr) {
        qCCritical(VideoReceiverLog) << "_makeDecoder() failed";
        gst_caps_unref(caps);
        caps = nullptr;
        return false;
939
    }
Gus Grubba's avatar
Gus Grubba committed
940

941
    gst_object_ref(_decoder);
942

943 944
    gst_caps_unref(caps);
    caps = nullptr;
945

946 947 948 949
    // FIXME: AV: check if srcpad exists - if it does then no need to wait for new pad
    //    int probeRes = 0;
    //    gst_element_foreach_src_pad(source, _padProbe, &probeRes);
    g_signal_connect(_decoder, "pad-added", G_CALLBACK(_onNewPad), this);
Gus Grubba's avatar
Gus Grubba committed
950

951
    gst_bin_add(GST_BIN(_pipeline), _decoder);
952

953
    gst_element_sync_state_with_parent(_decoder);
Gus Grubba's avatar
Gus Grubba committed
954

955
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-decoder");
956

957 958 959 960
    if (!gst_element_link(src, _decoder)) {
        qCCritical(VideoReceiverLog) << "Unable to link decoder";
        return false;
    }
Gus Grubba's avatar
Gus Grubba committed
961

962 963
    return true;
}
Gus Grubba's avatar
Gus Grubba committed
964

965
bool
966
GstVideoReceiver::_addVideoSink(GstPad* pad)
967 968
{
    GstCaps* caps = gst_pad_query_caps(pad, nullptr);
969

970 971 972 973 974 975 976 977 978 979
    gst_object_ref(_videoSink); // gst_bin_add() will steal one reference

    gst_bin_add(GST_BIN(_pipeline), _videoSink);

    if(!gst_element_link(_decoder, _videoSink)) {
        gst_bin_remove(GST_BIN(_pipeline), _videoSink);
        qCCritical(VideoReceiverLog) << "Unable to link video sink";
        if (caps != nullptr) {
            gst_caps_unref(caps);
            caps = nullptr;
980
        }
981 982
        return false;
    }
983

984
    gst_element_sync_state_with_parent(_videoSink);
985

986
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-videosink");
987

988 989
    if (caps != nullptr) {
        GstStructure* s = gst_caps_get_structure(caps, 0);
990

991 992 993 994 995
        if (s != nullptr) {
            gint width, height;
            gst_structure_get_int(s, "width", &width);
            gst_structure_get_int(s, "height", &height);
            _setVideoSize(QSize(width, height));
996 997
        }

998 999 1000 1001 1002
        gst_caps_unref(caps);
        caps = nullptr;
    } else {
        _setVideoSize(QSize(0, 0));
    }
1003

1004
    _decoding = true;
1005 1006 1007 1008
    qCDebug(VideoReceiverLog) << "Decoding started";
    _notificationHandler.dispatch([this](){
        emit decodingChanged();
    });
Gus Grubba's avatar
Gus Grubba committed
1009

1010 1011
    return true;
}
Gus Grubba's avatar
Gus Grubba committed
1012

1013
void
1014
GstVideoReceiver::_noteTeeFrame(void)
1015 1016 1017
{
    _lastSourceFrameTime = QDateTime::currentSecsSinceEpoch();
}
Gus Grubba's avatar
Gus Grubba committed
1018

1019
void
1020
GstVideoReceiver::_noteVideoSinkFrame(void)
1021 1022 1023
{
    _lastVideoFrameTime = QDateTime::currentSecsSinceEpoch();
}
Gus Grubba's avatar
Gus Grubba committed
1024

1025
void
1026
GstVideoReceiver::_noteEndOfStream(void)
1027
{
1028
    _endOfStream = true;
1029
}
Gus Grubba's avatar
Gus Grubba committed
1030

1031 1032 1033
// -Unlink the branch from the src pad
// -Send an EOS event at the beginning of that branch
void
1034
GstVideoReceiver::_unlinkBranch(GstElement* from)
1035 1036
{
    GstPad* src;
Gus Grubba's avatar
Gus Grubba committed
1037

1038 1039 1040 1041
    if ((src = gst_element_get_static_pad(from, "src")) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
1042

1043
    GstPad* sink;
Gus Grubba's avatar
Gus Grubba committed
1044

1045 1046 1047 1048 1049 1050
    if ((sink = gst_pad_get_peer(src)) == nullptr) {
        gst_object_unref(src);
        src = nullptr;
        qCCritical(VideoReceiverLog) << "gst_pad_get_peer() failed";
        return;
    }
1051

1052 1053 1054 1055 1056 1057 1058 1059
    if (!gst_pad_unlink(src, sink)) {
        gst_object_unref(src);
        src = nullptr;
        gst_object_unref(sink);
        sink = nullptr;
        qCCritical(VideoReceiverLog) << "gst_pad_unlink() failed";
        return;
    }
1060

1061 1062
    gst_object_unref(src);
    src = nullptr;
1063

1064 1065
    // Send EOS at the beginning of the branch
    const gboolean ret = gst_pad_send_event(sink, gst_event_new_eos());
1066

1067 1068 1069 1070 1071
    gst_object_unref(sink);
    sink = nullptr;

    if (ret) {
        qCDebug(VideoReceiverLog) << "Branch EOS was sent";
1072
    } else {
1073
        qCCritical(VideoReceiverLog) << "Branch EOS was NOT sent";
Gus Grubba's avatar
Gus Grubba committed
1074 1075 1076
    }
}

1077
void
1078
GstVideoReceiver::_shutdownDecodingBranch(void)
Gus Grubba's avatar
Gus Grubba committed
1079
{
1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091
    if (_decoder != nullptr) {
        GstObject* parent;

        if ((parent = gst_element_get_parent(_decoder)) != nullptr) {
            gst_bin_remove(GST_BIN(_pipeline), _decoder);
            gst_element_set_state(_decoder, GST_STATE_NULL);
            gst_object_unref(parent);
            parent = nullptr;
        }

        gst_object_unref(_decoder);
        _decoder = nullptr;
1092
    }
1093 1094 1095 1096 1097 1098 1099

    if (_videoSinkProbeId != 0) {
        GstPad* sinkpad;
        if ((sinkpad = gst_element_get_static_pad(_videoSink, "sink")) != nullptr) {
            gst_pad_remove_probe(sinkpad, _videoSinkProbeId);
            gst_object_unref(sinkpad);
            sinkpad = nullptr;
1100
        }
1101
        _videoSinkProbeId = 0;
Gus Grubba's avatar
Gus Grubba committed
1102
    }
1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122

    _lastVideoFrameTime = 0;

    GstObject* parent;

    if ((parent = gst_element_get_parent(_videoSink)) != nullptr) {
        gst_bin_remove(GST_BIN(_pipeline), _videoSink);
        gst_element_set_state(_videoSink, GST_STATE_NULL);
        gst_object_unref(parent);
        parent = nullptr;
    }

    gst_object_unref(_videoSink);
    _videoSink = nullptr;

    _removingDecoder = false;

    if (_decoding) {
        _decoding = false;
        qCDebug(VideoReceiverLog) << "Decoding stopped";
1123 1124 1125
        _notificationHandler.dispatch([this](){
            emit decodingChanged();
        });
1126 1127 1128
    }

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-decoding-stopped");
Gus Grubba's avatar
Gus Grubba committed
1129 1130
}

1131
void
1132
GstVideoReceiver::_shutdownRecordingBranch(void)
Gus Grubba's avatar
Gus Grubba committed
1133
{
1134 1135 1136 1137
    gst_bin_remove(GST_BIN(_pipeline), _fileSink);
    gst_element_set_state(_fileSink, GST_STATE_NULL);
    gst_object_unref(_fileSink);
    _fileSink = nullptr;
Gus Grubba's avatar
Gus Grubba committed
1138

1139 1140 1141 1142 1143
    _removingRecorder = false;

    if (_recording) {
        _recording = false;
        qCDebug(VideoReceiverLog) << "Recording stopped";
1144 1145 1146
        _notificationHandler.dispatch([this](){
            emit recordingChanged();
        });
1147
    }
1148 1149

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording-stopped");
1150 1151
}

1152
gboolean
1153
GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
1154 1155
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
1156
    Q_ASSERT(msg != nullptr && data != nullptr);
1157
    GstVideoReceiver* pThis = (GstVideoReceiver*)data;
1158

1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172
    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_ERROR:
        do {
            gchar* debug;
            GError* error;

            gst_message_parse_error(msg, &error, &debug);

            if (debug != nullptr) {
                g_free(debug);
                debug = nullptr;
            }

            if (error != nullptr) {
1173
                qCCritical(VideoReceiverLog) << "GStreamer error:" << error->message;
1174 1175 1176 1177
                g_error_free(error);
                error = nullptr;
            }

1178 1179 1180 1181
        //    pThis->_apiHandler.dispatch([pThis](){
        //        qCDebug(VideoReceiverLog) << "Stoppping because of error";
        //        pThis->stop();
        //    });
1182
        } while(0);
1183
        break;
1184
    case GST_MESSAGE_EOS:
1185
        pThis->_apiHandler.dispatch([pThis](){
1186
            qCDebug(VideoReceiverLog) << "Received EOS";
1187 1188
            pThis->_handleEOS();
        });
1189
        break;
1190 1191 1192 1193 1194 1195
    case GST_MESSAGE_ELEMENT:
        do {
            const GstStructure* s = gst_message_get_structure (msg);

            if (!gst_structure_has_name (s, "GstBinForwarded")) {
                break;
1196
            }
1197 1198 1199 1200 1201 1202 1203 1204 1205 1206

            GstMessage* forward_msg = nullptr;

            gst_structure_get(s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);

            if (forward_msg == nullptr) {
                break;
            }

            if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
1207
                pThis->_apiHandler.dispatch([pThis](){
1208
                    qCDebug(VideoReceiverLog) << "Received branch EOS";
1209 1210 1211 1212 1213 1214 1215
                    pThis->_handleEOS();
                });
            }

            gst_message_unref(forward_msg);
            forward_msg = nullptr;
        } while(0);
1216
        break;
1217 1218 1219 1220
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
1221 1222
    return TRUE;
}
1223

1224
void
1225
GstVideoReceiver::_onNewPad(GstElement* element, GstPad* pad, gpointer data)
1226
{
1227
    GstVideoReceiver* self = static_cast<GstVideoReceiver*>(data);
1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238

    if (element == self->_source) {
        self->_onNewSourcePad(pad);
    } else if (element == self->_decoder) {
        self->_onNewDecoderPad(pad);
    } else {
        qCDebug(VideoReceiverLog) << "Unexpected call!";
    }
}

void
1239
GstVideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
1240
{
1241 1242
    Q_UNUSED(data)

1243 1244 1245 1246
    gchar* name;

    if ((name = gst_pad_get_name(pad)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_pad_get_name() failed";
1247 1248 1249
        return;
    }

1250 1251 1252 1253 1254 1255 1256
    GstPad* ghostpad;

    if ((ghostpad = gst_ghost_pad_new(name, pad)) == nullptr) {
        qCCritical(VideoReceiverLog) << "gst_ghost_pad_new() failed";
        g_free(name);
        name = nullptr;
        return;
1257 1258
    }

1259 1260
    g_free(name);
    name = nullptr;
1261

1262
    gst_pad_set_active(ghostpad, TRUE);
1263

1264 1265
    if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
        qCCritical(VideoReceiverLog) << "gst_element_add_pad() failed";
1266 1267 1268
    }
}

1269
void
1270
GstVideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
1271
{
1272
    bool isRtpPad = false;
1273

1274
    GstCaps* filter;
1275

1276 1277
    if ((filter = gst_caps_from_string("application/x-rtp")) != nullptr) {
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1278

1279 1280 1281 1282 1283 1284
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                isRtpPad = true;
            }
            gst_caps_unref(caps);
            caps = nullptr;
1285 1286
        }

1287 1288 1289
        gst_caps_unref(filter);
        filter = nullptr;
    }
1290

1291 1292
    if (isRtpPad) {
        GstElement* buffer;
1293

1294 1295
        if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
            gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);
1296

1297
            gst_element_sync_state_with_parent(buffer);
1298

1299
            GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");
1300

1301 1302
            if (sinkpad != nullptr) {
                const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
1303

1304 1305
                gst_object_unref(sinkpad);
                sinkpad = nullptr;
1306

1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317
                if (ret == GST_PAD_LINK_OK) {
                    pad = gst_element_get_static_pad(buffer, "src");
                    element = buffer;
                } else {
                    qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
                }
            } else {
                qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
            }
        } else {
            qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
1318
        }
1319
    }
1320

1321
    gchar* name;
1322

1323 1324 1325
    if ((name = gst_pad_get_name(pad)) != nullptr) {
        if(gst_element_link_pads(element, name, GST_ELEMENT(data), "sink") == false) {
            qCCritical(VideoReceiverLog) << "gst_element_link_pads() failed";
1326 1327
        }

1328 1329 1330 1331
        g_free(name);
        name = nullptr;
    } else {
        qCCritical(VideoReceiverLog) << "gst_pad_get_name() failed";
1332 1333 1334
    }
}

1335
gboolean
1336
GstVideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
1337
{
1338 1339
    Q_UNUSED(element)

1340
    int* probeRes = (int*)user_data;
1341

1342
    *probeRes |= 1;
1343

1344
    GstCaps* filter = gst_caps_from_string("application/x-rtp");
1345

1346 1347
    if (filter != nullptr) {
        GstCaps* caps = gst_pad_query_caps(pad, nullptr);
1348

1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360
        if (caps != nullptr) {
            if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
                *probeRes |= 2;
            }

            gst_caps_unref(caps);
            caps = nullptr;
        }

        gst_caps_unref(filter);
        filter = nullptr;
    }
1361

1362 1363
    return TRUE;
}
1364

1365
gboolean
1366
GstVideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1367
{
1368 1369 1370 1371
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)

1372
    GstElement* glupload = (GstElement* )data;
1373

1374
    GstPad* sinkpad;
1375

1376 1377 1378
    if ((sinkpad = gst_element_get_static_pad(glupload, "sink")) == nullptr) {
        qCCritical(VideoReceiverLog) << "No sink pad found";
        return FALSE;
1379
    }
1380

1381
    GstCaps* filter;
1382

1383
    gst_query_parse_caps(query, &filter);
1384

1385
    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);
1386

1387
    gst_query_set_caps_result(query, sinkcaps);
1388

1389
    const gboolean ret = !gst_caps_is_empty(sinkcaps);
1390

1391 1392
    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;
1393

1394 1395
    gst_object_unref(sinkpad);
    sinkpad = nullptr;
1396

1397
    return ret;
1398 1399
}

1400
gboolean
1401
GstVideoReceiver::_autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1402
{
1403 1404 1405 1406
    Q_UNUSED(bin)
    Q_UNUSED(pad)
    Q_UNUSED(element)

1407
    GstElement* glsink = (GstElement* )data;
1408

1409
    GstPad* sinkpad;
1410

1411 1412 1413 1414
    if ((sinkpad = gst_element_get_static_pad(glsink, "sink")) == nullptr){
        qCCritical(VideoReceiverLog) << "No sink pad found";
        return FALSE;
    }
1415

1416
    const gboolean ret = gst_pad_query(sinkpad, query);
1417

1418 1419
    gst_object_unref(sinkpad);
    sinkpad = nullptr;
1420

1421
    return ret;
1422 1423
}

1424
gboolean
1425
GstVideoReceiver::_autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
1426
{
1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441
    gboolean ret;

    switch (GST_QUERY_TYPE(query)) {
    case GST_QUERY_CAPS:
        ret = _autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = _autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
1442 1443
}

1444
GstPadProbeReturn
1445
GstVideoReceiver::_teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1446
{
1447
    Q_UNUSED(pad)
1448 1449 1450
    Q_UNUSED(info)

    if(user_data != nullptr) {
1451
        GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1452
        pThis->_noteTeeFrame();
1453
    }
1454 1455

    return GST_PAD_PROBE_OK;
1456
}
1457

1458
GstPadProbeReturn
1459
GstVideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1460
{
1461 1462 1463
    Q_UNUSED(pad)
    Q_UNUSED(info)

1464
    if(user_data != nullptr) {
1465
        GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493

        if (pThis->_resetVideoSink) {
            pThis->_resetVideoSink = false;

// FIXME: AV: this makes MPEG2-TS playing smooth but breaks RTSP
//            gst_pad_send_event(pad, gst_event_new_flush_start());
//            gst_pad_send_event(pad, gst_event_new_flush_stop(TRUE));

//            GstBuffer* buf;

//            if ((buf = gst_pad_probe_info_get_buffer(info)) != nullptr) {
//                GstSegment* seg;

//                if ((seg = gst_segment_new()) != nullptr) {
//                    gst_segment_init(seg, GST_FORMAT_TIME);

//                    seg->start = buf->pts;

//                    gst_pad_send_event(pad, gst_event_new_segment(seg));

//                    gst_segment_free(seg);
//                    seg = nullptr;
//                }

//                gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
//            }
        }

1494 1495 1496 1497 1498 1499
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}

1500
GstPadProbeReturn
1501
GstVideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1502 1503
{
    Q_UNUSED(pad);
1504
    Q_ASSERT(user_data != nullptr);
1505

1506 1507
    if(info != nullptr) {
        GstEvent* event = gst_pad_probe_info_get_event(info);
1508

1509
        if (GST_EVENT_TYPE(event) == GST_EVENT_EOS) {
1510
            GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1511
            pThis->_noteEndOfStream();
1512 1513 1514
        }
    }

1515
    return GST_PAD_PROBE_OK;
1516 1517
}

1518
GstPadProbeReturn
1519
GstVideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
1520
{
1521 1522 1523
    if (info == nullptr || user_data == nullptr) {
        qCCritical(VideoReceiverLog) << "Invalid arguments";
        return GST_PAD_PROBE_DROP;
1524 1525
    }

1526 1527 1528 1529
    GstBuffer* buf = gst_pad_probe_info_get_buffer(info);

    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
        return GST_PAD_PROBE_DROP;
1530 1531
    }

1532 1533
    // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
    gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
1534

1535
    GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
1536 1537

    qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1538

1539 1540 1541 1542
    pThis->recordingStarted();

    return GST_PAD_PROBE_REMOVE;
}