VideoReceiver.cc 36.6 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65 66
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _pipelineStopRec(nullptr)
    , _videoSink(nullptr)
67 68
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
69
    , _restart_time_ms(1389)
70 71 72
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
73
    , _udpReconnect_us(5000000)
74
#endif
75 76
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
77
    , _videoSettings(nullptr)
Gus Grubba's avatar
Gus Grubba committed
78
{
Gus Grubba's avatar
Gus Grubba committed
79
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
80
#if defined(QGC_GST_STREAMING)
81 82 83
    setVideoDecoder(H264_SW);
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
84 85
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
86 87 88
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
89 90
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
91
#endif
Gus Grubba's avatar
Gus Grubba committed
92 93 94 95
}

VideoReceiver::~VideoReceiver()
{
96
#if defined(QGC_GST_STREAMING)
97
    stop();
98
    setVideoSink(nullptr);
99
#endif
Gus Grubba's avatar
Gus Grubba committed
100 101
}

102 103 104 105 106 107 108 109 110
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
111
#if defined(QGC_GST_STREAMING)
112 113
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
114
{
115
    gchar* name = gst_pad_get_name(pad);
116
    //g_print("A new pad %s was created\n", name);
117 118
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
119
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
120
    g_free(description);
121 122
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
123 124 125 126
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}

127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glupload = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");

    if (!sinkpad) {
        qCritical() << "autoplugQueryCaps(): No sink pad found";
        return FALSE;
    }

    GstCaps* filter;

    gst_query_parse_caps(query, &filter);

    GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);

    gst_query_set_caps_result(query, sinkcaps);

    const gboolean ret = !gst_caps_is_empty(sinkcaps);

    gst_caps_unref(sinkcaps);
    sinkcaps = nullptr;

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    GstElement* glsink = (GstElement* )data;

    GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");

    if (!sinkpad){
        qCritical() << "autoplugQueryContext(): No sink pad found";
        return FALSE;
    }

    const gboolean ret = gst_pad_query(sinkpad, query);

    gst_object_unref(sinkpad);
    sinkpad = nullptr;

    return ret;
}

static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
    gboolean ret;

    switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:
        ret = autoplugQueryCaps(bin, pad, element, query, data);
        break;
    case GST_QUERY_CONTEXT:
        ret = autoplugQueryContext(bin, pad, element, query, data);
        break;
    default:
        ret = FALSE;
        break;
    }

    return ret;
}

198 199
//-----------------------------------------------------------------------------
void
200
VideoReceiver::_restart_timeout()
201
{
202
    qgcApp()->toolbox()->videoManager()->restartVideo();
203
}
204
#endif
205

206 207 208 209 210 211
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
212 213 214
    delete _socket;
    _socket = nullptr;

215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

268
//-----------------------------------------------------------------------------
269 270 271 272 273 274 275 276
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
277 278
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
279
{
280 281 282 283
    if (_uri.isEmpty()) {
        return;
    }
    qCDebug(VideoReceiverLog) << "start():" << _uri;
Gus Grubba's avatar
Gus Grubba committed
284
    if(qgcApp()->runningUnitTests()) {
285 286
        return;
    }
287 288
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
289 290 291
        qCDebug(VideoReceiverLog) << "start() but not enabled/configured";
        return;
    }
292

293
#if defined(QGC_GST_STREAMING)
294
    _stop = false;
295

296
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
297
    //-- Taisync on iOS or Android sends a raw h.264 stream
298 299 300 301
    bool isTaisyncUSB = qgcApp()->toolbox()->videoManager()->isTaisync();
#else
    bool isTaisyncUSB = false;
#endif
Gus Grubba's avatar
Gus Grubba committed
302
    bool isUdp264   = _uri.contains("udp://")  && !isTaisyncUSB;
303
    bool isRtsp     = _uri.contains("rtsp://") && !isTaisyncUSB;
Gus Grubba's avatar
Gus Grubba committed
304
    bool isUdp265   = _uri.contains("udp265://")  && !isTaisyncUSB;
305 306
    bool isTCP      = _uri.contains("tcp://")  && !isTaisyncUSB;
    bool isMPEGTS   = _uri.contains("mpegts://")  && !isTaisyncUSB;
307 308

    if (!isTaisyncUSB && _uri.isEmpty()) {
Gus Grubba's avatar
Gus Grubba committed
309 310 311
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
312
    if (_videoSink == nullptr) {
Gus Grubba's avatar
Gus Grubba committed
313 314 315
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
316 317 318 319
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
320 321 322 323 324
    if (isUdp264) {
        setVideoDecoder(H264_HW);
    } else if (isUdp265) {
        setVideoDecoder(H265_HW);
    }
Gus Grubba's avatar
Gus Grubba committed
325

326
    _starting = true;
327

328 329 330 331 332 333
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && (isRtsp || isTCP)) {
        _tcp_timer.start(100);
        return;
    }

334 335 336
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

337
    bool running    = false;
338
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
339

Gus Grubba's avatar
Gus Grubba committed
340 341 342 343 344 345
    GstElement*     dataSource  = nullptr;
    GstCaps*        caps        = nullptr;
    GstElement*     demux       = nullptr;
    GstElement*     parser      = nullptr;
    GstElement*     queue       = nullptr;
    GstElement*     decoder     = nullptr;
346

Gus Grubba's avatar
Gus Grubba committed
347
    do {
Gus Grubba's avatar
Gus Grubba committed
348
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
349
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
350 351 352
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
353
        if(isUdp264 || isUdp265 || isMPEGTS || isTaisyncUSB) {
354
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
355 356
        } else if(isTCP) {
            dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
357 358
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
359 360
        }

361 362
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
363 364 365
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
366
        if(isUdp264) {
Gus Grubba's avatar
Gus Grubba committed
367
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
368 369 370
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
371
            g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri), "caps", caps, nullptr);
Gus Grubba's avatar
Gus Grubba committed
372
        } else if(isUdp265) {
373 374 375 376 377
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri.replace("udp265", "udp")), "caps", caps, nullptr);
378
#if  defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
379 380 381 382 383
        } else if(isTaisyncUSB) {
            QString uri = QString("0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
            qCDebug(VideoReceiverLog) << "Taisync URI:" << uri;
            g_object_set(static_cast<gpointer>(dataSource), "port", TAISYNC_VIDEO_UDP_PORT, nullptr);
#endif
384 385
        } else if(isTCP) {
            QUrl url(_uri);
386
            g_object_set(static_cast<gpointer>(dataSource), "host", qPrintable(url.host()), "port", url.port(), nullptr );
387 388 389
        } else if(isMPEGTS) {
            QUrl url(_uri);
            g_object_set(static_cast<gpointer>(dataSource), "port", url.port(), nullptr);
390
        } else {
391
            g_object_set(static_cast<gpointer>(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
392
        }
Gus Grubba's avatar
Gus Grubba committed
393

394 395
        if (isTCP || isMPEGTS) {
            if ((demux = gst_element_factory_make("tsdemux", "mpeg-ts-demuxer")) == nullptr) {
396 397 398 399
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tsdemux')";
                break;
            }
        } else {
400
            if(!isTaisyncUSB) {
401 402
                if ((demux = gst_element_factory_make(_depayName, "rtp-depacketizer")) == nullptr) {
                   qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _depayName << "')";
403 404
                    break;
                }
405
            }
406 407
        }

408 409
        if ((parser = gst_element_factory_make(_parserName, "parser")) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _parserName << "')";
Gus Grubba's avatar
Gus Grubba committed
410 411 412
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
413
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
414 415 416
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
417

Gus Grubba's avatar
Gus Grubba committed
418
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
419 420
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
421
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
422 423
            break;
        }
424

425 426
        if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('decodebin')";
427 428 429
            break;
        }

430
        if(isTaisyncUSB) {
431
            gst_bin_add_many(GST_BIN(_pipeline), dataSource, parser, _tee, queue, decoder, _videoSink, nullptr);
432
        } else {
433
            gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, _videoSink, nullptr);
434
        }
435
        pipelineUp = true;
436

Gus Grubba's avatar
Gus Grubba committed
437
        if(isUdp264 || isUdp265) {
438
            // Link the pipeline in front of the tee
439
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, nullptr)) {
440 441 442
                qCritical() << "Unable to link UDP elements.";
                break;
            }
443 444
        } else if(isTaisyncUSB) {
            // Link the pipeline in front of the tee
445
            if(!gst_element_link_many(dataSource, parser, _tee, queue, decoder, nullptr)) {
446 447 448
                qCritical() << "Unable to link Taisync USB elements.";
                break;
            }
449
        } else if (isTCP || isMPEGTS) {
450
            if(!gst_element_link(dataSource, demux)) {
451
                qCritical() << "Unable to link TCP/MPEG-TS dataSource to Demux.";
452 453
                break;
            }
454
            if(!gst_element_link_many(parser, _tee, queue, decoder, nullptr)) {
455
                qCritical() << "Unable to link TCP/MPEG-TS pipline to parser.";
456 457
                break;
            }
458
            g_signal_connect(demux, "pad-added", G_CALLBACK(newPadCB), parser);
459
        } else {
460
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
461
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, nullptr)) {
462
                qCritical() << "Unable to link RTSP elements.";
463 464
                break;
            }
465 466
        }

467 468 469 470
        g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
        g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);

        dataSource = demux = parser = queue = decoder = nullptr;
Gus Grubba's avatar
Gus Grubba committed
471

Gus Grubba's avatar
Gus Grubba committed
472
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
473

Gus Grubba's avatar
Gus Grubba committed
474
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
475 476 477
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
478
            bus = nullptr;
479
        }
Gus Grubba's avatar
Gus Grubba committed
480

481
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
482 483 484 485
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

Gus Grubba's avatar
Gus Grubba committed
486
    if (caps != nullptr) {
Gus Grubba's avatar
Gus Grubba committed
487
        gst_caps_unref(caps);
Gus Grubba's avatar
Gus Grubba committed
488
        caps = nullptr;
Gus Grubba's avatar
Gus Grubba committed
489 490 491 492 493
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

494
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
495
        if (_pipeline != nullptr) {
496
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
497
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
498 499
        }

500 501
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
Gus Grubba's avatar
Gus Grubba committed
502
            if (decoder != nullptr) {
503
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
504
                decoder = nullptr;
505
            }
Gus Grubba's avatar
Gus Grubba committed
506

507 508 509 510 511
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

Gus Grubba's avatar
Gus Grubba committed
512
            if (parser != nullptr) {
513
                gst_object_unref(parser);
Gus Grubba's avatar
Gus Grubba committed
514
                parser = nullptr;
515
            }
Gus Grubba's avatar
Gus Grubba committed
516

Gus Grubba's avatar
Gus Grubba committed
517
            if (demux != nullptr) {
518
                gst_object_unref(demux);
Gus Grubba's avatar
Gus Grubba committed
519
                demux = nullptr;
520
            }
Gus Grubba's avatar
Gus Grubba committed
521

Gus Grubba's avatar
Gus Grubba committed
522
            if (dataSource != nullptr) {
523
                gst_object_unref(dataSource);
Gus Grubba's avatar
Gus Grubba committed
524
                dataSource = nullptr;
525
            }
526

Gus Grubba's avatar
Gus Grubba committed
527
            if (_tee != nullptr) {
528
                gst_object_unref(_tee);
529
                _tee = nullptr;
530
            }
531

Gus Grubba's avatar
Gus Grubba committed
532
        }
533 534 535

        _running = false;
    } else {
536
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
537 538
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
539
    }
540
    _starting = false;
541
#endif
Gus Grubba's avatar
Gus Grubba committed
542 543
}

544 545 546
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
547
{
548
    if(qgcApp() && qgcApp()->runningUnitTests()) {
549 550
        return;
    }
551
#if defined(QGC_GST_STREAMING)
552
    _stop = true;
553
    qCDebug(VideoReceiverLog) << "stop()";
554 555
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
556
    } else if (_pipeline != nullptr && !_stopping) {
557
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
558 559
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
560 561
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
562
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
563
        gst_object_unref(bus);
564 565 566 567 568 569
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
570
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
571
    }
572
#endif
Gus Grubba's avatar
Gus Grubba committed
573 574
}

575 576 577
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
578 579 580 581
{
    _uri = uri;
}

582
//-----------------------------------------------------------------------------
583
#if defined(QGC_GST_STREAMING)
584 585
void
VideoReceiver::_shutdownPipeline() {
586 587 588 589
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
590 591
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
592 593
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
594
        bus = nullptr;
595 596 597
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
598
    _pipeline = nullptr;
599
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
600
    _sink = nullptr;
601
    _serverPresent = false;
602 603 604 605 606 607
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
608
#endif
609

610
//-----------------------------------------------------------------------------
611
#if defined(QGC_GST_STREAMING)
612 613
void
VideoReceiver::_handleError() {
614
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
615
    stop();
616
    _restart_timer.start(_restart_time_ms);
617 618 619
}
#endif

620
//-----------------------------------------------------------------------------
621
#if defined(QGC_GST_STREAMING)
622 623
void
VideoReceiver::_handleEOS() {
624 625
    if(_stopping) {
        _shutdownPipeline();
626
        qCDebug(VideoReceiverLog) << "Stopped";
627 628 629
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
630
        qWarning() << "VideoReceiver: Unexpected EOS!";
631
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
632 633
    }
}
634
#endif
Gus Grubba's avatar
Gus Grubba committed
635

636
//-----------------------------------------------------------------------------
637
#if defined(QGC_GST_STREAMING)
638 639 640 641
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
642
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
643
    }
644 645 646
}
#endif

647
//-----------------------------------------------------------------------------
648
#if defined(QGC_GST_STREAMING)
649 650
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
651 652
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
653
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
654
    VideoReceiver* pThis = (VideoReceiver*)data;
655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
677 678
    return TRUE;
}
679
#endif
680

681
//-----------------------------------------------------------------------------
682
#if defined(QGC_GST_STREAMING)
683 684
void
VideoReceiver::_cleanupOldVideos()
685
{
686
    //-- Only perform cleanup if storage limit is enabled
687
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
688 689 690 691 692 693 694 695
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
696
        }
697 698 699 700 701 702
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
703
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
704 705 706 707 708 709 710 711 712 713 714 715
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
716 717 718
        }
    }
}
719
#endif
720

721 722 723 724 725
//-----------------------------------------------------------------------------
void
VideoReceiver::setVideoDecoder(VideoEncoding encoding)
{
    if (encoding == H265_HW || encoding == H265_SW) {
726
        _depayName  = "rtph265depay";
727 728
        _parserName = "h265parse";
    } else {
729
        _depayName  = "rtph264depay";
730
        _parserName = "h264parse";
731 732
    }
}
733

734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
            qCDebug(VideoReceiverLog) << "Unable to find sink pad of video sink";
        }
    }
}
#endif

766
//-----------------------------------------------------------------------------
767 768 769 770 771 772 773 774 775 776 777
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
778
void
779
VideoReceiver::startRecording(const QString &videoFile)
780
{
781
#if defined(QGC_GST_STREAMING)
782

783 784
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
785
    if(_pipeline == nullptr || _recording) {
786 787 788 789
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

790
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
791 792 793 794 795 796 797 798
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

799
    _sink           = new Sink();
800
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
Gus Grubba's avatar
Gus Grubba committed
801
    _sink->queue    = gst_element_factory_make("queue", nullptr);
802
    _sink->parse    = gst_element_factory_make(_parserName, nullptr);
Gus Grubba's avatar
Gus Grubba committed
803 804
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], nullptr);
    _sink->filesink = gst_element_factory_make("filesink", nullptr);
805 806
    _sink->removing = false;

807
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
808 809 810 811
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

812 813 814 815 816 817 818 819 820 821 822
    if(videoFile.isEmpty()) {
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        if(savePath.isEmpty()) {
            qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
            return;
        }
        _videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
    } else {
        _videoFile = videoFile;
    }
    emit videoFileChanged();
823

824
    g_object_set(static_cast<gpointer>(_sink->filesink), "location", qPrintable(_videoFile), nullptr);
825
    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
826 827

    gst_object_ref(_sink->queue);
828
    gst_object_ref(_sink->parse);
829 830 831
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

832 833
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, nullptr);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, nullptr);
834 835

    gst_element_sync_state_with_parent(_sink->queue);
836
    gst_element_sync_state_with_parent(_sink->parse);
837 838
    gst_element_sync_state_with_parent(_sink->mux);

839 840 841
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
842 843
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
844
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
Gus Grubba's avatar
Gus Grubba committed
845
    gst_pad_add_probe(probepad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER /* | GST_PAD_PROBE_TYPE_BLOCK */), _keyframeWatch, this, nullptr); // to drop the buffer or to block the buffer?
846 847 848
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
849 850 851 852
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

853 854
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

855 856 857
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
858 859
#else
    Q_UNUSED(videoFile)
860 861 862
#endif
}

863 864 865
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
866
{
867
#if defined(QGC_GST_STREAMING)
868 869
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
870
    if(_pipeline == nullptr || !_recording) {
871 872 873 874
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
875
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
876 877 878
#endif
}

879
//-----------------------------------------------------------------------------
880 881 882 883 884 885
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
886 887
void
VideoReceiver::_shutdownRecordingBranch()
888 889
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
890
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
891 892 893 894 895
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
Gus Grubba's avatar
Gus Grubba committed
896
    _pipelineStopRec = nullptr;
897

898 899 900 901
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
902 903

    gst_object_unref(_sink->queue);
904
    gst_object_unref(_sink->parse);
905 906 907 908
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
909
    _sink = nullptr;
910
    _recording = false;
911

912 913 914 915 916
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

917
//-----------------------------------------------------------------------------
918 919 920 921 922
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
923 924
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
925 926 927 928
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
Gus Grubba's avatar
Gus Grubba committed
929
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
930 931 932 933 934 935 936 937 938

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
Gus Grubba's avatar
Gus Grubba committed
939 940
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
941

942 943 944
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
945
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
946
    gst_object_unref(bus);
947 948 949 950 951 952 953 954 955 956 957 958 959

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

960
//-----------------------------------------------------------------------------
961
#if defined(QGC_GST_STREAMING)
962 963
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
964 965
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
966 967
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
968 969 970 971 972
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
973 974 975
    return GST_PAD_PROBE_REMOVE;
}
#endif
976

977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
997
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
998 999 1000
}
#endif

1001 1002 1003 1004 1005 1006
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
1007
    if(info != nullptr && user_data != nullptr) {
1008 1009 1010 1011
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
1012
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!

            gint64 position;

            if (gst_element_query_position(pThis->_pipeline, GST_FORMAT_TIME, &position) != TRUE) {
                qCDebug(VideoReceiverLog) << "Unable to get timeline position, let's hope that downstream elements will survive";

                if (buf->pts != GST_CLOCK_TIME_NONE) {
                    position = buf->pts;
                } else {
                    position = gst_pad_get_offset(pad);
                }
            }

            gst_pad_set_offset(pad, position);
1029 1030 1031 1032 1033 1034

            // Add the filesink once we have a valid I-frame
            gst_bin_add_many(GST_BIN(pThis->_pipeline), pThis->_sink->filesink, nullptr);
            gst_element_link_many(pThis->_sink->mux, pThis->_sink->filesink, nullptr);
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1035
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1036
            pThis->gotFirstRecordingKeyFrame();
1037 1038 1039 1040 1041 1042 1043
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1044 1045 1046 1047 1048
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1049 1050 1051 1052 1053 1054 1055 1056
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1057
        }
1058
    } else {
1059
        if(_videoRunning) {
1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1071
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1072 1073 1074 1075 1076 1077 1078 1079 1080 1081

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1082 1083 1084 1085 1086
        }
    }
#endif
}