VideoReceiver.cc 27.3 KB
Newer Older
1 2 3 4 5 6 7 8
/****************************************************************************
 *
 *   (c) 2009-2016 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13 14 15 16 17


/**
 * @file
 *   @brief QGC Video Receiver
 *   @author Gus Grubba <mavlink@grubba.com>
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21

Gus Grubba's avatar
Gus Grubba committed
22
#include <QDebug>
23
#include <QUrl>
24 25
#include <QDir>
#include <QDateTime>
26
#include <QSysInfo>
27

28 29
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

30 31
#if defined(QGC_GST_STREAMING)

32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

48 49 50
#endif


Gus Grubba's avatar
Gus Grubba committed
51 52
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
53
#if defined(QGC_GST_STREAMING)
54
    , _running(false)
55
    , _recording(false)
56
    , _streaming(false)
57 58
    , _starting(false)
    , _stopping(false)
59 60 61
    , _sink(NULL)
    , _tee(NULL)
    , _pipeline(NULL)
62
    , _pipelineStopRec(NULL)
Gus Grubba's avatar
Gus Grubba committed
63
    , _videoSink(NULL)
64 65
    , _socket(NULL)
    , _serverPresent(false)
66
#endif
67 68 69
    , _videoSurface(NULL)
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
70
    , _enabled(true)
Gus Grubba's avatar
Gus Grubba committed
71
{
72
    _videoSurface  = new VideoSurface;
73
#if defined(QGC_GST_STREAMING)
74
    _setVideoSink(_videoSurface->videoSink());
75 76
    _timer.setSingleShot(true);
    connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
77 78 79
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
80 81
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
82
#endif
Gus Grubba's avatar
Gus Grubba committed
83 84 85 86
}

VideoReceiver::~VideoReceiver()
{
87
#if defined(QGC_GST_STREAMING)
88 89 90 91
    stop();
    if(_socket) {
        delete _socket;
    }
92 93 94
    if (_videoSink) {
        gst_object_unref(_videoSink);
    }
95
#endif
96 97
    if(_videoSurface)
        delete _videoSurface;
Gus Grubba's avatar
Gus Grubba committed
98 99
}

100
#if defined(QGC_GST_STREAMING)
101 102
void
VideoReceiver::_setVideoSink(GstElement* sink)
Gus Grubba's avatar
Gus Grubba committed
103 104 105 106 107 108 109 110 111 112
{
    if (_videoSink) {
        gst_object_unref(_videoSink);
        _videoSink = NULL;
    }
    if (sink) {
        _videoSink = sink;
        gst_object_ref_sink(_videoSink);
    }
}
113
#endif
Gus Grubba's avatar
Gus Grubba committed
114

115 116 117 118 119 120 121 122 123
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
124
#if defined(QGC_GST_STREAMING)
125 126
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
127
{
128
    gchar* name;
129
    name = gst_pad_get_name(pad);
130
    //g_print("A new pad %s was created\n", name);
131 132
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
133
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
134
    g_free(description);
135 136
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
137 138 139
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}
140
#endif
141

142
//-----------------------------------------------------------------------------
143
#if defined(QGC_GST_STREAMING)
144 145
void
VideoReceiver::_connected()
146 147 148
{
    //-- Server showed up. Now we start the stream.
    _timer.stop();
149
    _socket->deleteLater();
150
    _socket = NULL;
Gus Grubba's avatar
Gus Grubba committed
151 152 153 154
    if(_enabled) {
        _serverPresent = true;
        start();
    }
155 156 157
}
#endif

158
//-----------------------------------------------------------------------------
159
#if defined(QGC_GST_STREAMING)
160 161
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
162 163
{
    Q_UNUSED(socketError);
164
    _socket->deleteLater();
165 166
    _socket = NULL;
    //-- Try again in 5 seconds
Gus Grubba's avatar
Gus Grubba committed
167 168 169
    if(_enabled) {
        _timer.start(5000);
    }
170 171 172
}
#endif

173
//-----------------------------------------------------------------------------
174
#if defined(QGC_GST_STREAMING)
175 176
void
VideoReceiver::_timeout()
177 178 179 180 181 182
{
    //-- If socket is live, we got no connection nor a socket error
    if(_socket) {
        delete _socket;
        _socket = NULL;
    }
Gus Grubba's avatar
Gus Grubba committed
183 184 185 186 187 188 189 190 191 192 193 194 195
    if(_enabled) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        _socket = new QTcpSocket;
        _socket->setProxy(QNetworkProxy::NoProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), url.port());
        _timer.start(5000);
    }
196 197 198
}
#endif

199
//-----------------------------------------------------------------------------
200 201 202 203 204 205 206 207 208
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
209 210
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
211
{
Gus Grubba's avatar
Gus Grubba committed
212 213
    _enabled = true;
    emit enabledChanged();
214
#if defined(QGC_GST_STREAMING)
215 216
    qCDebug(VideoReceiverLog) << "start()";

Gus Grubba's avatar
Gus Grubba committed
217 218 219 220 221 222 223 224
    if (_uri.isEmpty()) {
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
    if (_videoSink == NULL) {
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
225 226 227 228
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
229

230
    _starting = true;
231

232 233
    bool isUdp  = _uri.contains("udp://");
    bool isRtsp = _uri.contains("rtsp://");
234
    bool isTCP  = _uri.contains("tcp://");
Gus Grubba's avatar
Gus Grubba committed
235

236 237
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && (isRtsp || isTCP)) {
238 239 240 241
        _timer.start(100);
        return;
    }

Gus Grubba's avatar
Gus Grubba committed
242
    bool running = false;
243
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
244 245 246 247 248

    GstElement*     dataSource  = NULL;
    GstCaps*        caps        = NULL;
    GstElement*     demux       = NULL;
    GstElement*     parser      = NULL;
249
    GstElement*     queue       = NULL;
Gus Grubba's avatar
Gus Grubba committed
250
    GstElement*     decoder     = NULL;
251
    GstElement*     queue1      = NULL;
252

Gus Grubba's avatar
Gus Grubba committed
253 254
    do {
        if ((_pipeline = gst_pipeline_new("receiver")) == NULL) {
255
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
256 257 258
            break;
        }

259 260
        if(isUdp) {
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
261 262
        } else if(isTCP) {
            dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
263 264
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
265 266
        }

267 268
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
269 270 271
            break;
        }

272 273 274 275 276 277
        if(isUdp) {
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(G_OBJECT(dataSource), "uri", qPrintable(_uri), "caps", caps, NULL);
278 279 280
        } else if(isTCP) {
            QUrl url(_uri);
            g_object_set(G_OBJECT(dataSource), "host", qPrintable(url.host()), "port", url.port(), NULL );
281
        } else {
282
            g_object_set(G_OBJECT(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", static_cast<guint64>(5000000), NULL);
283
        }
Gus Grubba's avatar
Gus Grubba committed
284

285 286 287 288 289 290 291 292 293 294 295
        // Currently, we expect H264 when using anything except for TCP.  Long term we may want this to be settable
        if (isTCP) {
            if ((demux = gst_element_factory_make("tsdemux", "mpeg2-ts-demuxer")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tsdemux')";
                break;
            }
        } else {
            if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == NULL) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('rtph264depay')";
                break;
            }
296 297
        }

Gus Grubba's avatar
Gus Grubba committed
298
        if ((parser = gst_element_factory_make("h264parse", "h264-parser")) == NULL) {
299
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('h264parse')";
Gus Grubba's avatar
Gus Grubba committed
300 301 302
            break;
        }

303
        if((_tee = gst_element_factory_make("tee", NULL)) == NULL)  {
304 305 306
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
307

308
        if((queue = gst_element_factory_make("queue", NULL)) == NULL)  {
309 310
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
311
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
312 313
            break;
        }
314

315 316 317 318 319 320 321 322 323 324 325
        if ((decoder = gst_element_factory_make("avdec_h264", "h264-decoder")) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('avdec_h264')";
            break;
        }

        if ((queue1 = gst_element_factory_make("queue", NULL)) == NULL) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue') [1]";
            break;
        }

        gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL);
326
        pipelineUp = true;
327

328 329
        if(isUdp) {
            // Link the pipeline in front of the tee
330
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
331 332 333 334 335 336 337 338 339 340
                qCritical() << "Unable to link UDP elements.";
                break;
            }
        } else if (isTCP) {
            if(!gst_element_link(dataSource, demux)) {
                qCritical() << "Unable to link TCP dataSource to Demux.";
                break;
            }
            if(!gst_element_link_many(parser, _tee, queue, decoder, queue1, _videoSink, NULL)) {
                qCritical() << "Unable to link TCP pipline to parser.";
341 342
                break;
            }
343
            g_signal_connect(demux, "pad-added", G_CALLBACK(newPadCB), parser);
344
        } else {
345
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
346
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, NULL)) {
347
                qCritical() << "Unable to link RTSP elements.";
348 349
                break;
            }
350 351
        }

352
        dataSource = demux = parser = queue = decoder = queue1 = NULL;
Gus Grubba's avatar
Gus Grubba committed
353

354
        GstBus* bus = NULL;
Gus Grubba's avatar
Gus Grubba committed
355

356 357 358 359 360 361
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
            bus = NULL;
        }
Gus Grubba's avatar
Gus Grubba committed
362 363 364 365 366 367 368 369 370 371 372 373 374

        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

    if (caps != NULL) {
        gst_caps_unref(caps);
        caps = NULL;
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

375 376 377 378
        // In newer versions, the pipeline will clean up all references that are added to it
        if (_pipeline != NULL) {
            gst_object_unref(_pipeline);
            _pipeline = NULL;
Gus Grubba's avatar
Gus Grubba committed
379 380
        }

381 382 383 384 385 386
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
            if (decoder != NULL) {
                gst_object_unref(decoder);
                decoder = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
387

388 389 390 391
            if (parser != NULL) {
                gst_object_unref(parser);
                parser = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
392

393 394 395 396
            if (demux != NULL) {
                gst_object_unref(demux);
                demux = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
397

398 399 400 401
            if (dataSource != NULL) {
                gst_object_unref(dataSource);
                dataSource = NULL;
            }
402

403 404 405 406
            if (_tee != NULL) {
                gst_object_unref(_tee);
                dataSource = NULL;
            }
407

408 409 410 411
            if (queue != NULL) {
                gst_object_unref(queue);
                dataSource = NULL;
            }
Gus Grubba's avatar
Gus Grubba committed
412
        }
413 414 415 416 417

        _running = false;
    } else {
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
418
    }
419
    _starting = false;
420
#endif
Gus Grubba's avatar
Gus Grubba committed
421 422
}

423 424 425
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
426
{
Gus Grubba's avatar
Gus Grubba committed
427 428
    _enabled = false;
    emit enabledChanged();
429
#if defined(QGC_GST_STREAMING)
430
    qCDebug(VideoReceiverLog) << "stop()";
431 432 433
    if(!_streaming) {
        _shutdownPipeline();
    } else if (_pipeline != NULL && !_stopping) {
434 435 436 437
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
438
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
439
        gst_object_unref(bus);
440 441 442 443 444 445
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
446
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
447
    }
448
#endif
Gus Grubba's avatar
Gus Grubba committed
449 450
}

451 452 453
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
454 455 456 457
{
    _uri = uri;
}

458
//-----------------------------------------------------------------------------
459
#if defined(QGC_GST_STREAMING)
460 461
void
VideoReceiver::_shutdownPipeline() {
462 463 464 465
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484
    GstBus* bus = NULL;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
        bus = NULL;
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_bin_remove(GST_BIN(_pipeline), _videoSink);
    gst_object_unref(_pipeline);
    _pipeline = NULL;
    delete _sink;
    _sink = NULL;
    _serverPresent = false;
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
485
#endif
486

487
//-----------------------------------------------------------------------------
488
#if defined(QGC_GST_STREAMING)
489 490
void
VideoReceiver::_handleError() {
491 492 493 494 495
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
    _shutdownPipeline();
}
#endif

496
//-----------------------------------------------------------------------------
497
#if defined(QGC_GST_STREAMING)
498 499
void
VideoReceiver::_handleEOS() {
500 501
    if(_stopping) {
        _shutdownPipeline();
502
        qCDebug(VideoReceiverLog) << "Stopped";
503 504 505
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
506
        qWarning() << "VideoReceiver: Unexpected EOS!";
507
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
508 509
    }
}
510
#endif
Gus Grubba's avatar
Gus Grubba committed
511

512
//-----------------------------------------------------------------------------
513
#if defined(QGC_GST_STREAMING)
514 515 516 517 518 519
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
        qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
    }
520 521 522
}
#endif

523
//-----------------------------------------------------------------------------
524
#if defined(QGC_GST_STREAMING)
525 526
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
527 528 529 530
{
    Q_UNUSED(bus)
    Q_ASSERT(msg != NULL && data != NULL);
    VideoReceiver* pThis = (VideoReceiver*)data;
531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
553 554
    return TRUE;
}
555
#endif
556

557
//-----------------------------------------------------------------------------
558
#if defined(QGC_GST_STREAMING)
559 560
void
VideoReceiver::_cleanupOldVideos()
561
{
562 563 564 565 566 567 568 569 570 571
    //-- Only perform cleanup if storage limit is enabled
    if(qgcApp()->toolbox()->settingsManager()->videoSettings()->enableStorageLimit()->rawValue().toBool()) {
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
572
        }
573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
            uint64_t maxSize = (qgcApp()->toolbox()->settingsManager()->videoSettings()->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
592 593 594
        }
    }
}
595
#endif
596

597
//-----------------------------------------------------------------------------
598 599 600 601 602 603 604 605 606 607 608
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
609 610
void
VideoReceiver::startRecording(void)
611
{
612
#if defined(QGC_GST_STREAMING)
613

614 615 616 617 618 619 620
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
    if(_pipeline == NULL || _recording) {
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

621
    QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
622
    if(savePath.isEmpty()) {
623
        qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
624 625 626
        return;
    }

627 628 629 630 631 632 633 634 635
    uint32_t muxIdx = qgcApp()->toolbox()->settingsManager()->videoSettings()->recordingFormat()->rawValue().toUInt();
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

636
    _sink           = new Sink();
637 638
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
    _sink->queue    = gst_element_factory_make("queue", NULL);
639 640
    _sink->parse    = gst_element_factory_make("h264parse", NULL);
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], NULL);
641 642 643
    _sink->filesink = gst_element_factory_make("filesink", NULL);
    _sink->removing = false;

644
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
645 646 647 648
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

649
    QString videoFile;
650
    videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
651

652 653
    g_object_set(G_OBJECT(_sink->filesink), "location", qPrintable(videoFile), NULL);
    qCDebug(VideoReceiverLog) << "New video file:" << videoFile;
654 655

    gst_object_ref(_sink->queue);
656
    gst_object_ref(_sink->parse);
657 658 659
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

660 661
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
662 663

    gst_element_sync_state_with_parent(_sink->queue);
664
    gst_element_sync_state_with_parent(_sink->parse);
665 666 667 668 669 670 671 672 673 674 675 676 677
    gst_element_sync_state_with_parent(_sink->mux);
    gst_element_sync_state_with_parent(_sink->filesink);

    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
#endif
}

678 679 680
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
681
{
682
#if defined(QGC_GST_STREAMING)
683 684 685 686 687 688 689 690 691 692 693
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
    if(_pipeline == NULL || !_recording) {
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, NULL);
#endif
}

694
//-----------------------------------------------------------------------------
695 696 697 698 699 700
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
701 702
void
VideoReceiver::_shutdownRecordingBranch()
703 704
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
705
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
706 707 708 709 710
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
711
    _pipelineStopRec = NULL;
712

713 714 715 716
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
717 718

    gst_object_unref(_sink->queue);
719
    gst_object_unref(_sink->parse);
720 721 722 723 724 725
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
    _sink = NULL;
    _recording = false;
726

727 728 729 730 731
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

732
//-----------------------------------------------------------------------------
733 734 735 736 737
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
738 739
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
740 741 742 743
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
744
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
745 746 747 748 749 750 751 752 753

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
754 755
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, NULL);
756

757 758 759
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
760
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
761
    gst_object_unref(bus);
762 763 764 765 766 767 768 769 770 771 772 773 774

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

775
//-----------------------------------------------------------------------------
776
#if defined(QGC_GST_STREAMING)
777 778
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
779 780
{
    Q_UNUSED(pad);
781 782 783 784 785 786 787
    if(info != NULL && user_data != NULL) {
        VideoReceiver* pThis = (VideoReceiver*)user_data;
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
788 789 790
    return GST_PAD_PROBE_REMOVE;
}
#endif
791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813

//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
    if(_videoSurface) {
        if(stopping() || starting()) {
            return;
        }
        if(streaming()) {
            if(!_videoRunning) {
                _videoSurface->setLastFrame(0);
                _videoRunning = true;
                emit videoRunningChanged();
            }
        } else {
            if(_videoRunning) {
                _videoRunning = false;
                emit videoRunningChanged();
            }
        }
        if(_videoRunning) {
814 815 816 817
            uint32_t timeout = 1;
            if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
                timeout = qgcApp()->toolbox()->settingsManager()->videoSettings()->rtspTimeout()->rawValue().toUInt();
            }
818 819 820 821 822
            time_t elapsed = 0;
            time_t lastFrame = _videoSurface->lastFrame();
            if(lastFrame != 0) {
                elapsed = time(0) - _videoSurface->lastFrame();
            }
823
            if(elapsed > (time_t)timeout && _videoSurface) {
824 825 826
                stop();
            }
        } else {
Gus Grubba's avatar
Gus Grubba committed
827
            if(!running() && !_uri.isEmpty() && _enabled) {
828 829 830 831 832 833 834
                start();
            }
        }
    }
#endif
}