VideoReceiver.cc 36.2 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65 66
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _pipelineStopRec(nullptr)
    , _videoSink(nullptr)
67 68
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
69
    , _restart_time_ms(1389)
70 71 72
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
73
    , _udpReconnect_us(5000000)
74
#endif
75 76
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
77
    , _videoSettings(nullptr)
78 79
    , _hwDecoderName(nullptr)
    , _swDecoderName("avdec_h264")
Gus Grubba's avatar
Gus Grubba committed
80
{
Gus Grubba's avatar
Gus Grubba committed
81
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
82
#if defined(QGC_GST_STREAMING)
83 84 85
    setVideoDecoder(H264_SW);
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
86 87
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
88 89 90
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
91 92
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
93
#endif
Gus Grubba's avatar
Gus Grubba committed
94 95 96 97
}

VideoReceiver::~VideoReceiver()
{
98
#if defined(QGC_GST_STREAMING)
99
    stop();
100
    setVideoSink(nullptr);
101
#endif
Gus Grubba's avatar
Gus Grubba committed
102 103
}

104 105 106 107 108 109 110 111 112
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
113
#if defined(QGC_GST_STREAMING)
114 115
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
116
{
117
    gchar* name = gst_pad_get_name(pad);
118
    //g_print("A new pad %s was created\n", name);
119 120
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
121
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
122
    g_free(description);
123 124
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
125 126 127 128
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}

129 130
//-----------------------------------------------------------------------------
void
131
VideoReceiver::_restart_timeout()
132
{
133
    qgcApp()->toolbox()->videoManager()->restartVideo();
134
}
135
#endif
136

137 138 139 140 141 142
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
143 144 145
    delete _socket;
    _socket = nullptr;

146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

199
//-----------------------------------------------------------------------------
200 201 202 203 204 205 206 207
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
208 209
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
210
{
211 212 213 214
    if (_uri.isEmpty()) {
        return;
    }
    qCDebug(VideoReceiverLog) << "start():" << _uri;
Gus Grubba's avatar
Gus Grubba committed
215
    if(qgcApp()->runningUnitTests()) {
216 217
        return;
    }
218 219
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
220 221 222
        qCDebug(VideoReceiverLog) << "start() but not enabled/configured";
        return;
    }
223

224
#if defined(QGC_GST_STREAMING)
225
    _stop = false;
226

227
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
228
    //-- Taisync on iOS or Android sends a raw h.264 stream
229 230 231 232
    bool isTaisyncUSB = qgcApp()->toolbox()->videoManager()->isTaisync();
#else
    bool isTaisyncUSB = false;
#endif
Gus Grubba's avatar
Gus Grubba committed
233
    bool isUdp264   = _uri.contains("udp://")  && !isTaisyncUSB;
234
    bool isRtsp     = _uri.contains("rtsp://") && !isTaisyncUSB;
Gus Grubba's avatar
Gus Grubba committed
235
    bool isUdp265   = _uri.contains("udp265://")  && !isTaisyncUSB;
236 237
    bool isTCP      = _uri.contains("tcp://")  && !isTaisyncUSB;
    bool isMPEGTS   = _uri.contains("mpegts://")  && !isTaisyncUSB;
238 239

    if (!isTaisyncUSB && _uri.isEmpty()) {
Gus Grubba's avatar
Gus Grubba committed
240 241 242
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
243
    if (_videoSink == nullptr) {
Gus Grubba's avatar
Gus Grubba committed
244 245 246
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
247 248 249 250
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
251 252 253 254 255
    if (isUdp264) {
        setVideoDecoder(H264_HW);
    } else if (isUdp265) {
        setVideoDecoder(H265_HW);
    }
Gus Grubba's avatar
Gus Grubba committed
256

257
    _starting = true;
258

259 260 261 262 263 264
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && (isRtsp || isTCP)) {
        _tcp_timer.start(100);
        return;
    }

265 266 267
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

268
    bool running    = false;
269
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
270

Gus Grubba's avatar
Gus Grubba committed
271 272 273 274 275 276 277
    GstElement*     dataSource  = nullptr;
    GstCaps*        caps        = nullptr;
    GstElement*     demux       = nullptr;
    GstElement*     parser      = nullptr;
    GstElement*     queue       = nullptr;
    GstElement*     decoder     = nullptr;
    GstElement*     queue1      = nullptr;
278

Gus Grubba's avatar
Gus Grubba committed
279
    do {
Gus Grubba's avatar
Gus Grubba committed
280
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
281
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
282 283 284
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
285
        if(isUdp264 || isUdp265 || isMPEGTS || isTaisyncUSB) {
286
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
287 288
        } else if(isTCP) {
            dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
289 290
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
291 292
        }

293 294
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
295 296 297
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
298
        if(isUdp264) {
Gus Grubba's avatar
Gus Grubba committed
299
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
300 301 302
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
303
            g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri), "caps", caps, nullptr);
Gus Grubba's avatar
Gus Grubba committed
304
        } else if(isUdp265) {
305 306 307 308 309
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri.replace("udp265", "udp")), "caps", caps, nullptr);
310
#if  defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
311 312 313 314 315
        } else if(isTaisyncUSB) {
            QString uri = QString("0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
            qCDebug(VideoReceiverLog) << "Taisync URI:" << uri;
            g_object_set(static_cast<gpointer>(dataSource), "port", TAISYNC_VIDEO_UDP_PORT, nullptr);
#endif
316 317
        } else if(isTCP) {
            QUrl url(_uri);
318
            g_object_set(static_cast<gpointer>(dataSource), "host", qPrintable(url.host()), "port", url.port(), nullptr );
319 320 321
        } else if(isMPEGTS) {
            QUrl url(_uri);
            g_object_set(static_cast<gpointer>(dataSource), "port", url.port(), nullptr);
322
        } else {
323
            g_object_set(static_cast<gpointer>(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
324
        }
Gus Grubba's avatar
Gus Grubba committed
325

326 327
        if (isTCP || isMPEGTS) {
            if ((demux = gst_element_factory_make("tsdemux", "mpeg-ts-demuxer")) == nullptr) {
328 329 330 331
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tsdemux')";
                break;
            }
        } else {
332
            if(!isTaisyncUSB) {
333 334
                if ((demux = gst_element_factory_make(_depayName, "rtp-depacketizer")) == nullptr) {
                   qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _depayName << "')";
335 336
                    break;
                }
337
            }
338 339
        }

340 341
        if ((parser = gst_element_factory_make(_parserName, "parser")) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _parserName << "')";
Gus Grubba's avatar
Gus Grubba committed
342 343 344
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
345
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
346 347 348
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
349

Gus Grubba's avatar
Gus Grubba committed
350
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
351 352
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
353
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
354 355
            break;
        }
356

357 358 359 360 361 362
        if (!_hwDecoderName || (decoder = gst_element_factory_make(_hwDecoderName, "decoder")) == nullptr) {
            qWarning() << "VideoReceiver::start() hardware decoding not available " << ((_hwDecoderName) ? _hwDecoderName : "");
            if ((decoder = gst_element_factory_make(_swDecoderName, "decoder")) == nullptr) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _swDecoderName << "')";
                break;
            }
363 364
        }

Gus Grubba's avatar
Gus Grubba committed
365
        if ((queue1 = gst_element_factory_make("queue", nullptr)) == nullptr) {
366 367 368 369
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue') [1]";
            break;
        }

370 371 372 373 374
        if(isTaisyncUSB) {
            gst_bin_add_many(GST_BIN(_pipeline), dataSource, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
        } else {
            gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
        }
375
        pipelineUp = true;
376

Gus Grubba's avatar
Gus Grubba committed
377
        if(isUdp264 || isUdp265) {
378
            // Link the pipeline in front of the tee
Gus Grubba's avatar
Gus Grubba committed
379
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
380 381 382
                qCritical() << "Unable to link UDP elements.";
                break;
            }
383 384 385 386 387 388
        } else if(isTaisyncUSB) {
            // Link the pipeline in front of the tee
            if(!gst_element_link_many(dataSource, parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
                qCritical() << "Unable to link Taisync USB elements.";
                break;
            }
389
        } else if (isTCP || isMPEGTS) {
390
            if(!gst_element_link(dataSource, demux)) {
391
                qCritical() << "Unable to link TCP/MPEG-TS dataSource to Demux.";
392 393
                break;
            }
Gus Grubba's avatar
Gus Grubba committed
394
            if(!gst_element_link_many(parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
395
                qCritical() << "Unable to link TCP/MPEG-TS pipline to parser.";
396 397
                break;
            }
398
            g_signal_connect(demux, "pad-added", G_CALLBACK(newPadCB), parser);
399
        } else {
400
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
Gus Grubba's avatar
Gus Grubba committed
401
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, nullptr)) {
402
                qCritical() << "Unable to link RTSP elements.";
403 404
                break;
            }
405 406
        }

Gus Grubba's avatar
Gus Grubba committed
407
        dataSource = demux = parser = queue = decoder = queue1 = nullptr;
Gus Grubba's avatar
Gus Grubba committed
408

Gus Grubba's avatar
Gus Grubba committed
409
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
410

Gus Grubba's avatar
Gus Grubba committed
411
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
412 413 414
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
415
            bus = nullptr;
416
        }
Gus Grubba's avatar
Gus Grubba committed
417

418
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
419 420 421 422
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

Gus Grubba's avatar
Gus Grubba committed
423
    if (caps != nullptr) {
Gus Grubba's avatar
Gus Grubba committed
424
        gst_caps_unref(caps);
Gus Grubba's avatar
Gus Grubba committed
425
        caps = nullptr;
Gus Grubba's avatar
Gus Grubba committed
426 427 428 429 430
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

431
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
432
        if (_pipeline != nullptr) {
433
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
434
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
435 436
        }

437 438
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
439 440 441 442 443
            if (queue1 != nullptr) {
                gst_object_unref(queue1);
                queue1 = nullptr;
            }

Gus Grubba's avatar
Gus Grubba committed
444
            if (decoder != nullptr) {
445
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
446
                decoder = nullptr;
447
            }
Gus Grubba's avatar
Gus Grubba committed
448

449 450 451 452 453
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

Gus Grubba's avatar
Gus Grubba committed
454
            if (parser != nullptr) {
455
                gst_object_unref(parser);
Gus Grubba's avatar
Gus Grubba committed
456
                parser = nullptr;
457
            }
Gus Grubba's avatar
Gus Grubba committed
458

Gus Grubba's avatar
Gus Grubba committed
459
            if (demux != nullptr) {
460
                gst_object_unref(demux);
Gus Grubba's avatar
Gus Grubba committed
461
                demux = nullptr;
462
            }
Gus Grubba's avatar
Gus Grubba committed
463

Gus Grubba's avatar
Gus Grubba committed
464
            if (dataSource != nullptr) {
465
                gst_object_unref(dataSource);
Gus Grubba's avatar
Gus Grubba committed
466
                dataSource = nullptr;
467
            }
468

Gus Grubba's avatar
Gus Grubba committed
469
            if (_tee != nullptr) {
470
                gst_object_unref(_tee);
471
                _tee = nullptr;
472
            }
473

Gus Grubba's avatar
Gus Grubba committed
474
        }
475 476 477

        _running = false;
    } else {
478
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
479 480
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
481
    }
482
    _starting = false;
483
#endif
Gus Grubba's avatar
Gus Grubba committed
484 485
}

486 487 488
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
489
{
490
    if(qgcApp() && qgcApp()->runningUnitTests()) {
491 492
        return;
    }
493
#if defined(QGC_GST_STREAMING)
494
    _stop = true;
495
    qCDebug(VideoReceiverLog) << "stop()";
496 497
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
498
    } else if (_pipeline != nullptr && !_stopping) {
499
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
500 501
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
502 503
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
504
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
505
        gst_object_unref(bus);
506 507 508 509 510 511
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
512
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
513
    }
514
#endif
Gus Grubba's avatar
Gus Grubba committed
515 516
}

517 518 519
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
520 521 522 523
{
    _uri = uri;
}

524
//-----------------------------------------------------------------------------
525
#if defined(QGC_GST_STREAMING)
526 527
void
VideoReceiver::_shutdownPipeline() {
528 529 530 531
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
532 533
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
534 535
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
536
        bus = nullptr;
537 538 539
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
540
    _pipeline = nullptr;
541
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
542
    _sink = nullptr;
543
    _serverPresent = false;
544 545 546 547 548 549
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
550
#endif
551

552
//-----------------------------------------------------------------------------
553
#if defined(QGC_GST_STREAMING)
554 555
void
VideoReceiver::_handleError() {
556
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
557 558
    // If there was an error we switch to software decoding only
    _tryWithHardwareDecoding = false;
559
    stop();
560
    _restart_timer.start(_restart_time_ms);
561 562 563
}
#endif

564
//-----------------------------------------------------------------------------
565
#if defined(QGC_GST_STREAMING)
566 567
void
VideoReceiver::_handleEOS() {
568 569
    if(_stopping) {
        _shutdownPipeline();
570
        qCDebug(VideoReceiverLog) << "Stopped";
571 572 573
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
574
        qWarning() << "VideoReceiver: Unexpected EOS!";
575
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
576 577
    }
}
578
#endif
Gus Grubba's avatar
Gus Grubba committed
579

580
//-----------------------------------------------------------------------------
581
#if defined(QGC_GST_STREAMING)
582 583 584 585
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
586
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
587
    }
588 589 590
}
#endif

591
//-----------------------------------------------------------------------------
592
#if defined(QGC_GST_STREAMING)
593 594
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
595 596
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
597
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
598
    VideoReceiver* pThis = (VideoReceiver*)data;
599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
621 622
    return TRUE;
}
623
#endif
624

625
//-----------------------------------------------------------------------------
626
#if defined(QGC_GST_STREAMING)
627 628
void
VideoReceiver::_cleanupOldVideos()
629
{
630
    //-- Only perform cleanup if storage limit is enabled
631
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
632 633 634 635 636 637 638 639
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
640
        }
641 642 643 644 645 646
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
647
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
648 649 650 651 652 653 654 655 656 657 658 659
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
660 661 662
        }
    }
}
663
#endif
664

665 666 667 668
//-----------------------------------------------------------------------------
void
VideoReceiver::setVideoDecoder(VideoEncoding encoding)
{
669 670 671 672 673 674 675 676
    /*
    #if defined(Q_OS_MAC)
        _hwDecoderName = "vtdec";
    #else
        _hwDecoderName = "vaapidecode";
    #endif
    */

677
    if (encoding == H265_HW || encoding == H265_SW) {
678
        _depayName  = "rtph265depay";
679
        _parserName = "h265parse";
680 681 682 683
#if defined(__android__)
        _hwDecoderName = "amcviddec-omxgooglehevcdecoder";
#endif
        _swDecoderName = "avdec_h265";
684
    } else {
685
        _depayName  = "rtph264depay";
686
        _parserName = "h264parse";
687 688 689 690
#if defined(__android__)
        _hwDecoderName = "amcviddec-omxgoogleh264decoder";
#endif
        _swDecoderName = "avdec_h264";
691 692
    }

693 694 695 696
    if (!_tryWithHardwareDecoding) {
        _hwDecoderName = nullptr;
    }
}
697

698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
            qCDebug(VideoReceiverLog) << "Unable to find sink pad of video sink";
        }
    }
}
#endif

730
//-----------------------------------------------------------------------------
731 732 733 734 735 736 737 738 739 740 741
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
742
void
743
VideoReceiver::startRecording(const QString &videoFile)
744
{
745
#if defined(QGC_GST_STREAMING)
746

747 748
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
749
    if(_pipeline == nullptr || _recording) {
750 751 752 753
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

754
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
755 756 757 758 759 760 761 762
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

763
    _sink           = new Sink();
764
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
Gus Grubba's avatar
Gus Grubba committed
765
    _sink->queue    = gst_element_factory_make("queue", nullptr);
766
    _sink->parse    = gst_element_factory_make(_parserName, nullptr);
Gus Grubba's avatar
Gus Grubba committed
767 768
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], nullptr);
    _sink->filesink = gst_element_factory_make("filesink", nullptr);
769 770
    _sink->removing = false;

771
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
772 773 774 775
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

776 777 778 779 780 781 782 783 784 785 786
    if(videoFile.isEmpty()) {
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        if(savePath.isEmpty()) {
            qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
            return;
        }
        _videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
    } else {
        _videoFile = videoFile;
    }
    emit videoFileChanged();
787

788
    g_object_set(static_cast<gpointer>(_sink->filesink), "location", qPrintable(_videoFile), nullptr);
789
    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
790 791

    gst_object_ref(_sink->queue);
792
    gst_object_ref(_sink->parse);
793 794 795
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

796 797
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, nullptr);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, nullptr);
798 799

    gst_element_sync_state_with_parent(_sink->queue);
800
    gst_element_sync_state_with_parent(_sink->parse);
801 802
    gst_element_sync_state_with_parent(_sink->mux);

803 804 805
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
806 807
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
808
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
Gus Grubba's avatar
Gus Grubba committed
809
    gst_pad_add_probe(probepad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER /* | GST_PAD_PROBE_TYPE_BLOCK */), _keyframeWatch, this, nullptr); // to drop the buffer or to block the buffer?
810 811 812
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
813 814 815 816
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

817 818
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

819 820 821
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
822 823
#else
    Q_UNUSED(videoFile)
824 825 826
#endif
}

827 828 829
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
830
{
831
#if defined(QGC_GST_STREAMING)
832 833
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
834
    if(_pipeline == nullptr || !_recording) {
835 836 837 838
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
839
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
840 841 842
#endif
}

843
//-----------------------------------------------------------------------------
844 845 846 847 848 849
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
850 851
void
VideoReceiver::_shutdownRecordingBranch()
852 853
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
854
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
855 856 857 858 859
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
Gus Grubba's avatar
Gus Grubba committed
860
    _pipelineStopRec = nullptr;
861

862 863 864 865
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
866 867

    gst_object_unref(_sink->queue);
868
    gst_object_unref(_sink->parse);
869 870 871 872
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
873
    _sink = nullptr;
874
    _recording = false;
875

876 877 878 879 880
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

881
//-----------------------------------------------------------------------------
882 883 884 885 886
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
887 888
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
889 890 891 892
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
Gus Grubba's avatar
Gus Grubba committed
893
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
894 895 896 897 898 899 900 901 902

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
Gus Grubba's avatar
Gus Grubba committed
903 904
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
905

906 907 908
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
909
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
910
    gst_object_unref(bus);
911 912 913 914 915 916 917 918 919 920 921 922 923

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

924
//-----------------------------------------------------------------------------
925
#if defined(QGC_GST_STREAMING)
926 927
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
928 929
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
930 931
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
932 933 934 935 936
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
937 938 939
    return GST_PAD_PROBE_REMOVE;
}
#endif
940

941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
961
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
962 963 964
}
#endif

965 966 967 968 969 970
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
971
    if(info != nullptr && user_data != nullptr) {
972 973 974 975
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
976
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!

            gint64 position;

            if (gst_element_query_position(pThis->_pipeline, GST_FORMAT_TIME, &position) != TRUE) {
                qCDebug(VideoReceiverLog) << "Unable to get timeline position, let's hope that downstream elements will survive";

                if (buf->pts != GST_CLOCK_TIME_NONE) {
                    position = buf->pts;
                } else {
                    position = gst_pad_get_offset(pad);
                }
            }

            gst_pad_set_offset(pad, position);
993 994 995 996 997 998

            // Add the filesink once we have a valid I-frame
            gst_bin_add_many(GST_BIN(pThis->_pipeline), pThis->_sink->filesink, nullptr);
            gst_element_link_many(pThis->_sink->mux, pThis->_sink->filesink, nullptr);
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

999
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1000
            pThis->gotFirstRecordingKeyFrame();
1001 1002 1003 1004 1005 1006 1007
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1008 1009 1010 1011 1012
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1013 1014 1015 1016 1017 1018 1019 1020
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1021
        }
1022
    } else {
1023
        if(_videoRunning) {
1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1035
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1036 1037 1038 1039 1040 1041 1042 1043 1044 1045

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1046 1047 1048 1049 1050
        }
    }
#endif
}