VideoReceiver.cc 21.7 KB
Newer Older
1 2 3 4 5 6 7 8
/****************************************************************************
 *
 *   (c) 2009-2016 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13 14 15 16 17


/**
 * @file
 *   @brief QGC Video Receiver
 *   @author Gus Grubba <mavlink@grubba.com>
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21

Gus Grubba's avatar
Gus Grubba committed
22
#include <QDebug>
23
#include <QUrl>
24 25
#include <QDir>
#include <QDateTime>
26
#include <QSysInfo>
27

28 29
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

Gus Grubba's avatar
Gus Grubba committed
46 47
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
48
#if defined(QGC_GST_STREAMING)
49
    , _running(false)
50
    , _recording(false)
51
    , _streaming(false)
52 53
    , _starting(false)
    , _stopping(false)
54 55 56
    , _sink(NULL)
    , _tee(NULL)
    , _pipeline(NULL)
57
    , _pipelineStopRec(NULL)
Gus Grubba's avatar
Gus Grubba committed
58
    , _videoSink(NULL)
59 60
    , _socket(NULL)
    , _serverPresent(false)
61
#endif
Gus Grubba's avatar
Gus Grubba committed
62
{
63 64 65
#if defined(QGC_GST_STREAMING)
    _timer.setSingleShot(true);
    connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
66 67 68
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
69
#endif
Gus Grubba's avatar
Gus Grubba committed
70 71 72 73
}

VideoReceiver::~VideoReceiver()
{
74
#if defined(QGC_GST_STREAMING)
75 76 77 78
    stop();
    if(_socket) {
        delete _socket;
    }
79
#endif
Gus Grubba's avatar
Gus Grubba committed
80 81
}

82
#if defined(QGC_GST_STREAMING)
Gus Grubba's avatar
Gus Grubba committed
83 84 85 86 87 88 89 90 91 92 93
void VideoReceiver::setVideoSink(GstElement* sink)
{
    if (_videoSink) {
        gst_object_unref(_videoSink);
        _videoSink = NULL;
    }
    if (sink) {
        _videoSink = sink;
        gst_object_ref_sink(_videoSink);
    }
}
94
#endif
Gus Grubba's avatar
Gus Grubba committed
95

96
#if defined(QGC_GST_STREAMING)
97
static void newPadCB(GstElement* element, GstPad* pad, gpointer data)
98
{
99
    gchar* name;
100 101
    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created\n", name);
102 103
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
104
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
105
    g_free(description);
106
    GstElement* p_rtph264depay = GST_ELEMENT(data);
107 108 109 110
    if(gst_element_link_pads(element, name, p_rtph264depay, "sink") == false)
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}
111
#endif
112

113 114 115 116 117
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _timer.stop();
118
    _socket->deleteLater();
119 120 121 122 123 124 125 126 127 128
    _socket = NULL;
    _serverPresent = true;
    start();
}
#endif

#if defined(QGC_GST_STREAMING)
void VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
129
    _socket->deleteLater();
130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151
    _socket = NULL;
    //-- Try again in 5 seconds
    _timer.start(5000);
}
#endif

#if defined(QGC_GST_STREAMING)
void VideoReceiver::_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
    if(_socket) {
        delete _socket;
        _socket = NULL;
    }
    //-- RTSP will try to connect to the server. If it cannot connect,
    //   it will simply give up and never try again. Instead, we keep
    //   attempting a connection on this timer. Once a connection is
    //   found to be working, only then we actually start the stream.
    QUrl url(_uri);
    _socket = new QTcpSocket;
    connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
    connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
152
    //qCDebug(VideoReceiverLog) << "Trying to connect to:" << url.host() << url.port();
153 154 155 156 157
    _socket->connectToHost(url.host(), url.port());
    _timer.start(5000);
}
#endif

158 159 160 161 162 163 164 165 166
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
Gus Grubba's avatar
Gus Grubba committed
167 168
void VideoReceiver::start()
{
169
#if defined(QGC_GST_STREAMING)
170 171
    qCDebug(VideoReceiverLog) << "start()";

Gus Grubba's avatar
Gus Grubba committed
172 173 174 175 176 177 178 179
    if (_uri.isEmpty()) {
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
    if (_videoSink == NULL) {
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
180 181 182 183
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
184

185
    _starting = true;
186

187
    bool isUdp = _uri.contains("udp://");
Gus Grubba's avatar
Gus Grubba committed
188

189 190 191 192 193 194
    //-- For RTSP, check to see if server is there first
    if(!_serverPresent && !isUdp) {
        _timer.start(100);
        return;
    }

Gus Grubba's avatar
Gus Grubba committed
195 196 197 198 199 200
    bool running = false;

    GstElement*     dataSource  = NULL;
    GstCaps*        caps        = NULL;
    GstElement*     demux       = NULL;
    GstElement*     parser      = NULL;
201
    GstElement*     queue       = NULL;
Gus Grubba's avatar
Gus Grubba committed
202
    GstElement*     decoder     = NULL;
203
    GstElement*     queue1     = NULL;
204

Gus Grubba's avatar
Gus Grubba committed
205 206
    do {
        if ((_pipeline = gst_pipeline_new("receiver")) == NULL) {
207
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
208 209 210
            break;
        }

211 212 213 214
        if(isUdp) {
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
215 216
        }

217 218
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
219 220 221
            break;
        }

222 223 224 225 226 227 228
        if(isUdp) {
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(G_OBJECT(dataSource), "uri", qPrintable(_uri), "caps", caps, NULL);
        } else {
229
            g_object_set(G_OBJECT(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", static_cast<guint64>(5000000), NULL);
230
        }
Gus Grubba's avatar
Gus Grubba committed
231 232

        if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == NULL) {
233
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('rtph264depay')";
Gus Grubba's avatar
Gus Grubba committed
234 235 236
            break;
        }

237 238 239 240
        if(!isUdp) {
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
        }

Gus Grubba's avatar
Gus Grubba committed
241
        if ((parser = gst_element_factory_make("h264parse", "h264-parser")) == NULL) {
242
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('h264parse')";
Gus Grubba's avatar
Gus Grubba committed
243 244 245
            break;
        }

246
        if((_tee = gst_element_factory_make("tee", NULL)) == NULL)  {
247 248 249
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
250

251
        if((queue = gst_element_factory_make("queue", NULL)) == NULL)  {
252
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
253 254
            break;
        }
255

256 257 258 259 260 261 262 263 264 265 266
        if ((decoder = gst_element_factory_make("avdec_h264", "h264-decoder")) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('avdec_h264')";
            break;
        }

        if ((queue1 = gst_element_factory_make("queue", NULL)) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue') [1]";
            break;
        }

        gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL);
267

268 269
        if(isUdp) {
            // Link the pipeline in front of the tee
270
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
271
                qCritical() << "Unable to link elements.";
272 273 274
                break;
            }
        } else {
275 276
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, NULL)) {
                qCritical() << "Unable to link elements.";
277 278
                break;
            }
279 280
        }

281
        dataSource = demux = parser = queue = decoder = NULL;
Gus Grubba's avatar
Gus Grubba committed
282

283
        GstBus* bus = NULL;
Gus Grubba's avatar
Gus Grubba committed
284

285 286 287 288 289 290
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = NULL;
        }
Gus Grubba's avatar
Gus Grubba committed
291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323

        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (caps != NULL) {
        gst_caps_unref(caps);
        caps = NULL;
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

        if (decoder != NULL) {
            gst_object_unref(decoder);
            decoder = NULL;
        }

        if (parser != NULL) {
            gst_object_unref(parser);
            parser = NULL;
        }

        if (demux != NULL) {
            gst_object_unref(demux);
            demux = NULL;
        }

        if (dataSource != NULL) {
            gst_object_unref(dataSource);
            dataSource = NULL;
        }

324 325
        if (_tee != NULL) {
            gst_object_unref(_tee);
326 327 328
            dataSource = NULL;
        }

329 330
        if (queue != NULL) {
            gst_object_unref(queue);
331 332 333
            dataSource = NULL;
        }

Gus Grubba's avatar
Gus Grubba committed
334 335 336 337
        if (_pipeline != NULL) {
            gst_object_unref(_pipeline);
            _pipeline = NULL;
        }
338 339 340 341 342

        _running = false;
    } else {
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
343
    }
344
    _starting = false;
345
#endif
Gus Grubba's avatar
Gus Grubba committed
346 347 348 349
}

void VideoReceiver::stop()
{
350
#if defined(QGC_GST_STREAMING)
351
    qCDebug(VideoReceiverLog) << "stop()";
352 353 354
    if(!_streaming) {
        _shutdownPipeline();
    } else if (_pipeline != NULL && !_stopping) {
355 356 357 358
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
359
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
360
        gst_object_unref(bus);
361 362 363 364 365 366
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
367
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
368
    }
369
#endif
Gus Grubba's avatar
Gus Grubba committed
370 371 372 373 374 375 376
}

void VideoReceiver::setUri(const QString & uri)
{
    _uri = uri;
}

377
#if defined(QGC_GST_STREAMING)
378
void VideoReceiver::_shutdownPipeline() {
379 380 381 382
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401
    GstBus* bus = NULL;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
        bus = NULL;
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    gst_object_unref(_pipeline);
    _pipeline = NULL;
    delete _sink;
    _sink = NULL;
    _serverPresent = false;
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
402
#endif
403

404
#if defined(QGC_GST_STREAMING)
405 406 407 408 409 410 411 412 413 414
void VideoReceiver::_handleError() {
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
    _shutdownPipeline();
}
#endif

#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleEOS() {
    if(_stopping) {
        _shutdownPipeline();
415
        qCDebug(VideoReceiverLog) << "Stopped";
416 417 418 419 420
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
        qCritical() << "VideoReceiver: Unexpected EOS!";
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
421 422
    }
}
423
#endif
Gus Grubba's avatar
Gus Grubba committed
424

425 426 427 428 429 430 431
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleStateChanged() {
    _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
    qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
}
#endif

432
#if defined(QGC_GST_STREAMING)
Gus Grubba's avatar
Gus Grubba committed
433 434 435 436 437
gboolean VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
{
    Q_UNUSED(bus)
    Q_ASSERT(msg != NULL && data != NULL);
    VideoReceiver* pThis = (VideoReceiver*)data;
438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
460 461
    return TRUE;
}
462
#endif
463

464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496
void VideoReceiver::_cleanupOldVideos()
{
    QString savePath = qgcApp()->toolbox()->settingsManager()->videoSettings()->videoSavePath()->rawValue().toString();
    QDir videoDir = QDir(savePath);
    videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
    videoDir.setSorting(QDir::Time);
    //-- All the movie extensions we support
    QStringList nameFilters;
    for(uint32_t i = 0; i < NUM_MUXES; i++) {
        nameFilters << QString("*.") + QString(kVideoExtensions[i]);
    }
    videoDir.setNameFilters(nameFilters);
    //-- get the list of videos stored
    QFileInfoList vidList = videoDir.entryInfoList();
    if(!vidList.isEmpty()) {
        uint64_t total   = 0;
        //-- Settings are stored using MB
        uint64_t maxSize = (qgcApp()->toolbox()->settingsManager()->videoSettings()->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
        //-- Compute total used storage
        for(int i = 0; i < vidList.size(); i++) {
            total += vidList[i].size();
        }
        //-- Remove old movies until max size is satisfied.
        while(total >= maxSize && !vidList.isEmpty()) {
            total -= vidList.last().size();
            qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
            QFile file (vidList.last().filePath());
            file.remove();
            vidList.removeLast();
        }
    }
}

497 498 499 500 501 502 503 504 505 506 507 508 509
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
void VideoReceiver::startRecording(void)
{
510
#if defined(QGC_GST_STREAMING)
511

512 513 514 515 516 517 518
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
    if(_pipeline == NULL || _recording) {
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

519 520
    QString savePath = qgcApp()->toolbox()->settingsManager()->videoSettings()->videoSavePath()->rawValue().toString();
    if(savePath.isEmpty()) {
521
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
522 523 524
        return;
    }

525 526 527 528 529 530 531 532 533
    uint32_t muxIdx = qgcApp()->toolbox()->settingsManager()->videoSettings()->recordingFormat()->rawValue().toUInt();
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

534
    _sink           = new Sink();
535 536
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", NULL);
537 538
    _sink->parse    = gst_element_factory_make("h264parse", NULL);
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], NULL);
539 540 541
    _sink->filesink = gst_element_factory_make("filesink", NULL);
    _sink->removing = false;

542
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
543 544 545 546
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

547
    QString videoFile;
548
    videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
549

550 551
    g_object_set(G_OBJECT(_sink->filesink), "location", qPrintable(videoFile), NULL);
    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;
552 553

    gst_object_ref(_sink->queue);
554
    gst_object_ref(_sink->parse);
555 556 557
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

558 559
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
560 561

    gst_element_sync_state_with_parent(_sink->queue);
562
    gst_element_sync_state_with_parent(_sink->parse);
563 564 565 566 567 568 569 570 571 572 573 574 575 576 577
    gst_element_sync_state_with_parent(_sink->mux);
    gst_element_sync_state_with_parent(_sink->filesink);

    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
#endif
}

void VideoReceiver::stopRecording(void)
{
578
#if defined(QGC_GST_STREAMING)
579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
    if(_pipeline == NULL || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, NULL);
#endif
}

// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
596
void VideoReceiver::_shutdownRecordingBranch()
597 598
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
599
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
600 601 602 603 604
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
605
    _pipelineStopRec = NULL;
606

607 608 609 610
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
611 612

    gst_object_unref(_sink->queue);
613
    gst_object_unref(_sink->parse);
614 615 616 617 618 619
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
    _sink = NULL;
    _recording = false;
620

621 622 623 624 625 626 627 628 629 630
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
631
void VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
632 633 634 635
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
636
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
637 638 639 640 641 642 643 644 645

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
646 647
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
648

649 650 651
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
652
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
653
    gst_object_unref(bus);
654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

#if defined(QGC_GST_STREAMING)
GstPadProbeReturn VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    Q_ASSERT(info != NULL && user_data != NULL);
    VideoReceiver* pThis = (VideoReceiver*)user_data;
673 674 675 676
    // We will only act once
    if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE))
        pThis->_detachRecordingBranch(info);

677 678 679
    return GST_PAD_PROBE_REMOVE;
}
#endif