VideoReceiver.cc 36.2 KB
Newer Older
1 2
/****************************************************************************
 *
Gus Grubba's avatar
Gus Grubba committed
3
 * (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
4 5 6 7 8
 *
 * QGroundControl is licensed according to the terms in the file
 * COPYING.md in the root of the source code directory.
 *
 ****************************************************************************/
Gus Grubba's avatar
Gus Grubba committed
9 10 11 12 13


/**
 * @file
 *   @brief QGC Video Receiver
Gus Grubba's avatar
Gus Grubba committed
14
 *   @author Gus Grubba <gus@auterion.com>
Gus Grubba's avatar
Gus Grubba committed
15 16 17
 */

#include "VideoReceiver.h"
18 19
#include "SettingsManager.h"
#include "QGCApplication.h"
20
#include "VideoManager.h"
21 22 23
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
Gus Grubba's avatar
Gus Grubba committed
24
#include <QDebug>
25
#include <QUrl>
26 27
#include <QDir>
#include <QDateTime>
28
#include <QSysInfo>
29

30 31
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")

32 33
#if defined(QGC_GST_STREAMING)

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
static const char* kVideoExtensions[] =
{
    "mkv",
    "mov",
    "mp4"
};

static const char* kVideoMuxes[] =
{
    "matroskamux",
    "qtmux",
    "mp4mux"
};

#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))

50 51 52
#endif


Gus Grubba's avatar
Gus Grubba committed
53 54
VideoReceiver::VideoReceiver(QObject* parent)
    : QObject(parent)
55
#if defined(QGC_GST_STREAMING)
56
    , _running(false)
57
    , _recording(false)
58
    , _streaming(false)
59 60
    , _starting(false)
    , _stopping(false)
61
    , _stop(true)
Gus Grubba's avatar
Gus Grubba committed
62 63 64 65 66
    , _sink(nullptr)
    , _tee(nullptr)
    , _pipeline(nullptr)
    , _pipelineStopRec(nullptr)
    , _videoSink(nullptr)
67 68
    , _lastFrameId(G_MAXUINT64)
    , _lastFrameTime(0)
69
    , _restart_time_ms(1389)
70 71 72
    , _socket(nullptr)
    , _serverPresent(false)
    , _tcpTestInterval_ms(5000)
73
    , _udpReconnect_us(5000000)
74
#endif
75 76
    , _videoRunning(false)
    , _showFullScreen(false)
Gus Grubba's avatar
Gus Grubba committed
77
    , _videoSettings(nullptr)
78 79
    , _hwDecoderName(nullptr)
    , _swDecoderName("avdec_h264")
Gus Grubba's avatar
Gus Grubba committed
80
{
Gus Grubba's avatar
Gus Grubba committed
81
    _videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
82
#if defined(QGC_GST_STREAMING)
83 84 85
    setVideoDecoder(H264_SW);
    _restart_timer.setSingleShot(true);
    connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
86 87
    _tcp_timer.setSingleShot(true);
    connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
88 89 90
    connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
    connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
    connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
91 92
    connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
    _frameTimer.start(1000);
93
#endif
Gus Grubba's avatar
Gus Grubba committed
94 95 96 97
}

VideoReceiver::~VideoReceiver()
{
98
#if defined(QGC_GST_STREAMING)
99
    stop();
100
    setVideoSink(nullptr);
101
#endif
Gus Grubba's avatar
Gus Grubba committed
102 103
}

104 105 106 107 108 109 110 111 112
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
    _imageFile = imageFile;
    emit imageFileChanged();
}

//-----------------------------------------------------------------------------
113
#if defined(QGC_GST_STREAMING)
114 115
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
116
{
117
    gchar* name = gst_pad_get_name(pad);
118
    //g_print("A new pad %s was created\n", name);
119 120
    GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
    gchar* description = gst_caps_to_string(p_caps);
121
    qCDebug(VideoReceiverLog) << p_caps << ", " << description;
122
    g_free(description);
123 124
    GstElement* sink = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, sink, "sink") == false)
125 126 127 128
        qCritical() << "newPadCB : failed to link elements\n";
    g_free(name);
}

129 130
//-----------------------------------------------------------------------------
void
131
VideoReceiver::_restart_timeout()
132
{
133
    qgcApp()->toolbox()->videoManager()->restartVideo();
134
}
135
#endif
136

137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
    //-- If socket is live, we got no connection nor a socket error
    if(_socket) {
        delete _socket;
        _socket = nullptr;
    }
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        //-- RTSP will try to connect to the server. If it cannot connect,
        //   it will simply give up and never try again. Instead, we keep
        //   attempting a connection on this timer. Once a connection is
        //   found to be working, only then we actually start the stream.
        QUrl url(_uri);
        //-- If RTSP and no port is defined, set default RTSP port (554)
        if(_uri.contains("rtsp://") && url.port() <= 0) {
            url.setPort(554);
        }
        _socket = new QTcpSocket;
        QNetworkProxy tempProxy;
        tempProxy.setType(QNetworkProxy::DefaultProxy);
        _socket->setProxy(tempProxy);
        connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
        connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
        _socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
    //-- Server showed up. Now we start the stream.
    _tcp_timer.stop();
    _socket->deleteLater();
    _socket = nullptr;
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _serverPresent = true;
        start();
    }
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
    Q_UNUSED(socketError);
    _socket->deleteLater();
    _socket = nullptr;
    //-- Try again in a while
    if(_videoSettings->streamEnabled()->rawValue().toBool()) {
        _tcp_timer.start(_tcpTestInterval_ms);
    }
}
#endif

200
//-----------------------------------------------------------------------------
201 202 203 204 205 206 207 208
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   ^
//                                   |
//                                   +-Here we will later link elements for recording
209 210
void
VideoReceiver::start()
Gus Grubba's avatar
Gus Grubba committed
211
{
212 213 214 215
    if (_uri.isEmpty()) {
        return;
    }
    qCDebug(VideoReceiverLog) << "start():" << _uri;
Gus Grubba's avatar
Gus Grubba committed
216
    if(qgcApp()->runningUnitTests()) {
217 218
        return;
    }
219 220
    if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
       !_videoSettings->streamConfigured()) {
221 222 223
        qCDebug(VideoReceiverLog) << "start() but not enabled/configured";
        return;
    }
224

225
#if defined(QGC_GST_STREAMING)
226
    _stop = false;
227

228
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
229
    //-- Taisync on iOS or Android sends a raw h.264 stream
230 231 232 233
    bool isTaisyncUSB = qgcApp()->toolbox()->videoManager()->isTaisync();
#else
    bool isTaisyncUSB = false;
#endif
Gus Grubba's avatar
Gus Grubba committed
234
    bool isUdp264   = _uri.contains("udp://")  && !isTaisyncUSB;
235
    bool isRtsp     = _uri.contains("rtsp://") && !isTaisyncUSB;
Gus Grubba's avatar
Gus Grubba committed
236
    bool isUdp265   = _uri.contains("udp265://")  && !isTaisyncUSB;
237 238
    bool isTCP      = _uri.contains("tcp://")  && !isTaisyncUSB;
    bool isMPEGTS   = _uri.contains("mpegts://")  && !isTaisyncUSB;
239 240

    if (!isTaisyncUSB && _uri.isEmpty()) {
Gus Grubba's avatar
Gus Grubba committed
241 242 243
        qCritical() << "VideoReceiver::start() failed because URI is not specified";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
244
    if (_videoSink == nullptr) {
Gus Grubba's avatar
Gus Grubba committed
245 246 247
        qCritical() << "VideoReceiver::start() failed because video sink is not set";
        return;
    }
248 249 250 251
    if(_running) {
        qCDebug(VideoReceiverLog) << "Already running!";
        return;
    }
252 253 254 255 256
    if (isUdp264) {
        setVideoDecoder(H264_HW);
    } else if (isUdp265) {
        setVideoDecoder(H265_HW);
    }
Gus Grubba's avatar
Gus Grubba committed
257

258
    _starting = true;
259

260 261 262 263 264 265
    //-- For RTSP and TCP, check to see if server is there first
    if(!_serverPresent && (isRtsp || isTCP)) {
        _tcp_timer.start(100);
        return;
    }

266 267 268
    _lastFrameId = G_MAXUINT64;
    _lastFrameTime = 0;

269
    bool running    = false;
270
    bool pipelineUp = false;
Gus Grubba's avatar
Gus Grubba committed
271

Gus Grubba's avatar
Gus Grubba committed
272 273 274 275 276 277 278
    GstElement*     dataSource  = nullptr;
    GstCaps*        caps        = nullptr;
    GstElement*     demux       = nullptr;
    GstElement*     parser      = nullptr;
    GstElement*     queue       = nullptr;
    GstElement*     decoder     = nullptr;
    GstElement*     queue1      = nullptr;
279

Gus Grubba's avatar
Gus Grubba committed
280
    do {
Gus Grubba's avatar
Gus Grubba committed
281
        if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
282
            qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
Gus Grubba's avatar
Gus Grubba committed
283 284 285
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
286
        if(isUdp264 || isUdp265 || isMPEGTS || isTaisyncUSB) {
287
            dataSource = gst_element_factory_make("udpsrc", "udp-source");
288 289
        } else if(isTCP) {
            dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
290 291
        } else {
            dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
Gus Grubba's avatar
Gus Grubba committed
292 293
        }

294 295
        if (!dataSource) {
            qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
Gus Grubba's avatar
Gus Grubba committed
296 297 298
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
299
        if(isUdp264) {
Gus Grubba's avatar
Gus Grubba committed
300
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
301 302 303
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
304
            g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri), "caps", caps, nullptr);
Gus Grubba's avatar
Gus Grubba committed
305
        } else if(isUdp265) {
306 307 308 309 310
            if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
                break;
            }
            g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri.replace("udp265", "udp")), "caps", caps, nullptr);
311
#if  defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
312 313 314 315 316
        } else if(isTaisyncUSB) {
            QString uri = QString("0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
            qCDebug(VideoReceiverLog) << "Taisync URI:" << uri;
            g_object_set(static_cast<gpointer>(dataSource), "port", TAISYNC_VIDEO_UDP_PORT, nullptr);
#endif
317 318
        } else if(isTCP) {
            QUrl url(_uri);
319
            g_object_set(static_cast<gpointer>(dataSource), "host", qPrintable(url.host()), "port", url.port(), nullptr );
320 321 322
        } else if(isMPEGTS) {
            QUrl url(_uri);
            g_object_set(static_cast<gpointer>(dataSource), "port", url.port(), nullptr);
323
        } else {
324
            g_object_set(static_cast<gpointer>(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
325
        }
Gus Grubba's avatar
Gus Grubba committed
326

327 328
        if (isTCP || isMPEGTS) {
            if ((demux = gst_element_factory_make("tsdemux", "mpeg-ts-demuxer")) == nullptr) {
329 330 331 332
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tsdemux')";
                break;
            }
        } else {
333
            if(!isTaisyncUSB) {
334 335
                if ((demux = gst_element_factory_make(_depayName, "rtp-depacketizer")) == nullptr) {
                   qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _depayName << "')";
336 337
                    break;
                }
338
            }
339 340
        }

341 342
        if ((parser = gst_element_factory_make(_parserName, "parser")) == nullptr) {
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _parserName << "')";
Gus Grubba's avatar
Gus Grubba committed
343 344 345
            break;
        }

Gus Grubba's avatar
Gus Grubba committed
346
        if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr)  {
347 348 349
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
            break;
        }
Gus Grubba's avatar
Gus Grubba committed
350

Gus Grubba's avatar
Gus Grubba committed
351
        if((queue = gst_element_factory_make("queue", nullptr)) == nullptr)  {
352 353
            // TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
            //       We should compare gstreamer scripts to QGroundControl to determine the need
354
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
355 356
            break;
        }
357

358 359 360 361 362 363
        if (!_hwDecoderName || (decoder = gst_element_factory_make(_hwDecoderName, "decoder")) == nullptr) {
            qWarning() << "VideoReceiver::start() hardware decoding not available " << ((_hwDecoderName) ? _hwDecoderName : "");
            if ((decoder = gst_element_factory_make(_swDecoderName, "decoder")) == nullptr) {
                qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('" << _swDecoderName << "')";
                break;
            }
364 365
        }

Gus Grubba's avatar
Gus Grubba committed
366
        if ((queue1 = gst_element_factory_make("queue", nullptr)) == nullptr) {
367 368 369 370
            qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue') [1]";
            break;
        }

371 372 373 374 375
        if(isTaisyncUSB) {
            gst_bin_add_many(GST_BIN(_pipeline), dataSource, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
        } else {
            gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
        }
376
        pipelineUp = true;
377

Gus Grubba's avatar
Gus Grubba committed
378
        if(isUdp264 || isUdp265) {
379
            // Link the pipeline in front of the tee
Gus Grubba's avatar
Gus Grubba committed
380
            if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
381 382 383
                qCritical() << "Unable to link UDP elements.";
                break;
            }
384 385 386 387 388 389
        } else if(isTaisyncUSB) {
            // Link the pipeline in front of the tee
            if(!gst_element_link_many(dataSource, parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
                qCritical() << "Unable to link Taisync USB elements.";
                break;
            }
390
        } else if (isTCP || isMPEGTS) {
391
            if(!gst_element_link(dataSource, demux)) {
392
                qCritical() << "Unable to link TCP/MPEG-TS dataSource to Demux.";
393 394
                break;
            }
Gus Grubba's avatar
Gus Grubba committed
395
            if(!gst_element_link_many(parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
396
                qCritical() << "Unable to link TCP/MPEG-TS pipline to parser.";
397 398
                break;
            }
399
            g_signal_connect(demux, "pad-added", G_CALLBACK(newPadCB), parser);
400
        } else {
401
            g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
Gus Grubba's avatar
Gus Grubba committed
402
            if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, nullptr)) {
403
                qCritical() << "Unable to link RTSP elements.";
404 405
                break;
            }
406 407
        }

Gus Grubba's avatar
Gus Grubba committed
408
        dataSource = demux = parser = queue = decoder = queue1 = nullptr;
Gus Grubba's avatar
Gus Grubba committed
409

Gus Grubba's avatar
Gus Grubba committed
410
        GstBus* bus = nullptr;
Gus Grubba's avatar
Gus Grubba committed
411

Gus Grubba's avatar
Gus Grubba committed
412
        if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
413 414 415
            gst_bus_enable_sync_message_emission(bus);
            g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
            gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
416
            bus = nullptr;
417
        }
Gus Grubba's avatar
Gus Grubba committed
418

419
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
Gus Grubba's avatar
Gus Grubba committed
420 421 422 423
        running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;

    } while(0);

Gus Grubba's avatar
Gus Grubba committed
424
    if (caps != nullptr) {
Gus Grubba's avatar
Gus Grubba committed
425
        gst_caps_unref(caps);
Gus Grubba's avatar
Gus Grubba committed
426
        caps = nullptr;
Gus Grubba's avatar
Gus Grubba committed
427 428 429 430 431
    }

    if (!running) {
        qCritical() << "VideoReceiver::start() failed";

432
        // In newer versions, the pipeline will clean up all references that are added to it
Gus Grubba's avatar
Gus Grubba committed
433
        if (_pipeline != nullptr) {
434
            gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
435
            _pipeline = nullptr;
Gus Grubba's avatar
Gus Grubba committed
436 437
        }

438 439
        // If we failed before adding items to the pipeline, then clean up
        if (!pipelineUp) {
440 441 442 443 444
            if (queue1 != nullptr) {
                gst_object_unref(queue1);
                queue1 = nullptr;
            }

Gus Grubba's avatar
Gus Grubba committed
445
            if (decoder != nullptr) {
446
                gst_object_unref(decoder);
Gus Grubba's avatar
Gus Grubba committed
447
                decoder = nullptr;
448
            }
Gus Grubba's avatar
Gus Grubba committed
449

450 451 452 453 454
            if (queue != nullptr) {
                gst_object_unref(queue);
                queue = nullptr;
            }

Gus Grubba's avatar
Gus Grubba committed
455
            if (parser != nullptr) {
456
                gst_object_unref(parser);
Gus Grubba's avatar
Gus Grubba committed
457
                parser = nullptr;
458
            }
Gus Grubba's avatar
Gus Grubba committed
459

Gus Grubba's avatar
Gus Grubba committed
460
            if (demux != nullptr) {
461
                gst_object_unref(demux);
Gus Grubba's avatar
Gus Grubba committed
462
                demux = nullptr;
463
            }
Gus Grubba's avatar
Gus Grubba committed
464

Gus Grubba's avatar
Gus Grubba committed
465
            if (dataSource != nullptr) {
466
                gst_object_unref(dataSource);
Gus Grubba's avatar
Gus Grubba committed
467
                dataSource = nullptr;
468
            }
469

Gus Grubba's avatar
Gus Grubba committed
470
            if (_tee != nullptr) {
471
                gst_object_unref(_tee);
472
                _tee = nullptr;
473
            }
474

Gus Grubba's avatar
Gus Grubba committed
475
        }
476 477 478

        _running = false;
    } else {
479
        GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
480 481
        _running = true;
        qCDebug(VideoReceiverLog) << "Running";
Gus Grubba's avatar
Gus Grubba committed
482
    }
483
    _starting = false;
484
#endif
Gus Grubba's avatar
Gus Grubba committed
485 486
}

487 488 489
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
Gus Grubba's avatar
Gus Grubba committed
490
{
491
    if(qgcApp() && qgcApp()->runningUnitTests()) {
492 493
        return;
    }
494
#if defined(QGC_GST_STREAMING)
495
    _stop = true;
496
    qCDebug(VideoReceiverLog) << "stop()";
497 498
    if(!_streaming) {
        _shutdownPipeline();
Gus Grubba's avatar
Gus Grubba committed
499
    } else if (_pipeline != nullptr && !_stopping) {
500
        qCDebug(VideoReceiverLog) << "Stopping _pipeline";
501 502
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
        gst_bus_disable_sync_message_emission(bus);
503 504
        gst_element_send_event(_pipeline, gst_event_new_eos());
        _stopping = true;
505
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
506
        gst_object_unref(bus);
507 508 509 510 511 512
        if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
            _shutdownPipeline();
            qCritical() << "Error stopping pipeline!";
        } else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
            _handleEOS();
        }
513
        gst_message_unref(message);
Gus Grubba's avatar
Gus Grubba committed
514
    }
515
#endif
Gus Grubba's avatar
Gus Grubba committed
516 517
}

518 519 520
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
Gus Grubba's avatar
Gus Grubba committed
521 522 523 524
{
    _uri = uri;
}

525
//-----------------------------------------------------------------------------
526
#if defined(QGC_GST_STREAMING)
527 528
void
VideoReceiver::_shutdownPipeline() {
529 530 531 532
    if(!_pipeline) {
        qCDebug(VideoReceiverLog) << "No pipeline";
        return;
    }
Gus Grubba's avatar
Gus Grubba committed
533 534
    GstBus* bus = nullptr;
    if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
535 536
        gst_bus_disable_sync_message_emission(bus);
        gst_object_unref(bus);
Gus Grubba's avatar
Gus Grubba committed
537
        bus = nullptr;
538 539 540
    }
    gst_element_set_state(_pipeline, GST_STATE_NULL);
    gst_object_unref(_pipeline);
Gus Grubba's avatar
Gus Grubba committed
541
    _pipeline = nullptr;
542
    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
543
    _sink = nullptr;
544
    _serverPresent = false;
545 546 547 548 549 550
    _streaming = false;
    _recording = false;
    _stopping = false;
    _running = false;
    emit recordingChanged();
}
551
#endif
552

553
//-----------------------------------------------------------------------------
554
#if defined(QGC_GST_STREAMING)
555 556
void
VideoReceiver::_handleError() {
557
    qCDebug(VideoReceiverLog) << "Gstreamer error!";
558 559
    // If there was an error we switch to software decoding only
    _tryWithHardwareDecoding = false;
560
    stop();
561
    _restart_timer.start(_restart_time_ms);
562 563 564
}
#endif

565
//-----------------------------------------------------------------------------
566
#if defined(QGC_GST_STREAMING)
567 568
void
VideoReceiver::_handleEOS() {
569 570
    if(_stopping) {
        _shutdownPipeline();
571
        qCDebug(VideoReceiverLog) << "Stopped";
572 573 574
    } else if(_recording && _sink->removing) {
        _shutdownRecordingBranch();
    } else {
575
        qWarning() << "VideoReceiver: Unexpected EOS!";
576
        _handleError();
Gus Grubba's avatar
Gus Grubba committed
577 578
    }
}
579
#endif
Gus Grubba's avatar
Gus Grubba committed
580

581
//-----------------------------------------------------------------------------
582
#if defined(QGC_GST_STREAMING)
583 584 585 586
void
VideoReceiver::_handleStateChanged() {
    if(_pipeline) {
        _streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
587
        //qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
588
    }
589 590 591
}
#endif

592
//-----------------------------------------------------------------------------
593
#if defined(QGC_GST_STREAMING)
594 595
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
Gus Grubba's avatar
Gus Grubba committed
596 597
{
    Q_UNUSED(bus)
Gus Grubba's avatar
Gus Grubba committed
598
    Q_ASSERT(msg != nullptr && data != nullptr);
Gus Grubba's avatar
Gus Grubba committed
599
    VideoReceiver* pThis = (VideoReceiver*)data;
600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621

    switch(GST_MESSAGE_TYPE(msg)) {
    case(GST_MESSAGE_ERROR): {
        gchar* debug;
        GError* error;
        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);
        qCritical() << error->message;
        g_error_free(error);
        pThis->msgErrorReceived();
    }
        break;
    case(GST_MESSAGE_EOS):
        pThis->msgEOSReceived();
        break;
    case(GST_MESSAGE_STATE_CHANGED):
        pThis->msgStateChangedReceived();
        break;
    default:
        break;
    }

Gus Grubba's avatar
Gus Grubba committed
622 623
    return TRUE;
}
624
#endif
625

626
//-----------------------------------------------------------------------------
627
#if defined(QGC_GST_STREAMING)
628 629
void
VideoReceiver::_cleanupOldVideos()
630
{
631
    //-- Only perform cleanup if storage limit is enabled
632
    if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
633 634 635 636 637 638 639 640
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        QDir videoDir = QDir(savePath);
        videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
        videoDir.setSorting(QDir::Time);
        //-- All the movie extensions we support
        QStringList nameFilters;
        for(uint32_t i = 0; i < NUM_MUXES; i++) {
            nameFilters << QString("*.") + QString(kVideoExtensions[i]);
641
        }
642 643 644 645 646 647
        videoDir.setNameFilters(nameFilters);
        //-- get the list of videos stored
        QFileInfoList vidList = videoDir.entryInfoList();
        if(!vidList.isEmpty()) {
            uint64_t total   = 0;
            //-- Settings are stored using MB
648
            uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
649 650 651 652 653 654 655 656 657 658 659 660
            //-- Compute total used storage
            for(int i = 0; i < vidList.size(); i++) {
                total += vidList[i].size();
            }
            //-- Remove old movies until max size is satisfied.
            while(total >= maxSize && !vidList.isEmpty()) {
                total -= vidList.last().size();
                qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
                QFile file (vidList.last().filePath());
                file.remove();
                vidList.removeLast();
            }
661 662 663
        }
    }
}
664
#endif
665

666 667 668 669
//-----------------------------------------------------------------------------
void
VideoReceiver::setVideoDecoder(VideoEncoding encoding)
{
670 671 672 673 674 675 676 677
    /*
    #if defined(Q_OS_MAC)
        _hwDecoderName = "vtdec";
    #else
        _hwDecoderName = "vaapidecode";
    #endif
    */

678
    if (encoding == H265_HW || encoding == H265_SW) {
679
        _depayName  = "rtph265depay";
680
        _parserName = "h265parse";
681 682 683 684
#if defined(__android__)
        _hwDecoderName = "amcviddec-omxgooglehevcdecoder";
#endif
        _swDecoderName = "avdec_h265";
685
    } else {
686
        _depayName  = "rtph264depay";
687
        _parserName = "h264parse";
688 689 690 691
#if defined(__android__)
        _hwDecoderName = "amcviddec-omxgoogleh264decoder";
#endif
        _swDecoderName = "avdec_h264";
692 693
    }

694 695 696 697
    if (!_tryWithHardwareDecoding) {
        _hwDecoderName = nullptr;
    }
}
698

699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
    if(_pipeline != nullptr) {
        qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
        return;
    }

    if (_videoSink != nullptr) {
        gst_object_unref(_videoSink);
        _videoSink = nullptr;
    }

    if (videoSink != nullptr) {
        _videoSink = videoSink;
        gst_object_ref(_videoSink);

        GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");

        if (pad != nullptr) {
            gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
            gst_object_unref(pad);
            pad = nullptr;
        } else {
            qCDebug(VideoReceiverLog) << "Unable to find sink pad of video sink";
        }
    }
}
#endif

731
//-----------------------------------------------------------------------------
732 733 734 735 736 737 738 739 740 741 742
// When we finish our pipeline will look like this:
//
//                                   +-->queue-->decoder-->_videosink
//                                   |
//    datasource-->demux-->parser-->tee
//                                   |
//                                   |    +--------------_sink-------------------+
//                                   |    |                                      |
//   we are adding these elements->  +->teepad-->queue-->matroskamux-->_filesink |
//                                        |                                      |
//                                        +--------------------------------------+
743
void
744
VideoReceiver::startRecording(const QString &videoFile)
745
{
746
#if defined(QGC_GST_STREAMING)
747

748 749
    qCDebug(VideoReceiverLog) << "startRecording()";
    // exit immediately if we are already recording
Gus Grubba's avatar
Gus Grubba committed
750
    if(_pipeline == nullptr || _recording) {
751 752 753 754
        qCDebug(VideoReceiverLog) << "Already recording!";
        return;
    }

755
    uint32_t muxIdx = _videoSettings->recordingFormat()->rawValue().toUInt();
756 757 758 759 760 761 762 763
    if(muxIdx >= NUM_MUXES) {
        qgcApp()->showMessage(tr("Invalid video format defined."));
        return;
    }

    //-- Disk usage maintenance
    _cleanupOldVideos();

764
    _sink           = new Sink();
765
    _sink->teepad   = gst_element_get_request_pad(_tee, "src_%u");
Gus Grubba's avatar
Gus Grubba committed
766
    _sink->queue    = gst_element_factory_make("queue", nullptr);
767
    _sink->parse    = gst_element_factory_make(_parserName, nullptr);
Gus Grubba's avatar
Gus Grubba committed
768 769
    _sink->mux      = gst_element_factory_make(kVideoMuxes[muxIdx], nullptr);
    _sink->filesink = gst_element_factory_make("filesink", nullptr);
770 771
    _sink->removing = false;

772
    if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink || !_sink->parse) {
773 774 775 776
        qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
        return;
    }

777 778 779 780 781 782 783 784 785 786 787
    if(videoFile.isEmpty()) {
        QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
        if(savePath.isEmpty()) {
            qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
            return;
        }
        _videoFile = savePath + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + "." + kVideoExtensions[muxIdx];
    } else {
        _videoFile = videoFile;
    }
    emit videoFileChanged();
788

789
    g_object_set(static_cast<gpointer>(_sink->filesink), "location", qPrintable(_videoFile), nullptr);
790
    qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
791 792

    gst_object_ref(_sink->queue);
793
    gst_object_ref(_sink->parse);
794 795 796
    gst_object_ref(_sink->mux);
    gst_object_ref(_sink->filesink);

797 798
    gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, nullptr);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, nullptr);
799 800

    gst_element_sync_state_with_parent(_sink->queue);
801
    gst_element_sync_state_with_parent(_sink->parse);
802 803
    gst_element_sync_state_with_parent(_sink->mux);

804 805 806
    // Install a probe on the recording branch to drop buffers until we hit our first keyframe
    // When we hit our first keyframe, we can offset the timestamps appropriately according to the first keyframe time
    // This will ensure the first frame is a keyframe at t=0, and decoding can begin immediately on playback
807 808
    // Once we have this valid frame, we attach the filesink.
    // Attaching it here would cause the filesink to fail to preroll and to stall the pipeline for a few seconds.
809
    GstPad* probepad = gst_element_get_static_pad(_sink->queue, "src");
Gus Grubba's avatar
Gus Grubba committed
810
    gst_pad_add_probe(probepad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER /* | GST_PAD_PROBE_TYPE_BLOCK */), _keyframeWatch, this, nullptr); // to drop the buffer or to block the buffer?
811 812 813
    gst_object_unref(probepad);

    // Link the recording branch to the pipeline
814 815 816 817
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_link(_sink->teepad, sinkpad);
    gst_object_unref(sinkpad);

818 819
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording");

820 821 822
    _recording = true;
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording started";
DonLakeFlyer's avatar
DonLakeFlyer committed
823 824
#else
    Q_UNUSED(videoFile)
825 826 827
#endif
}

828 829 830
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
831
{
832
#if defined(QGC_GST_STREAMING)
833 834
    qCDebug(VideoReceiverLog) << "stopRecording()";
    // exit immediately if we are not recording
Gus Grubba's avatar
Gus Grubba committed
835
    if(_pipeline == nullptr || !_recording) {
836 837 838 839
        qCDebug(VideoReceiverLog) << "Not recording!";
        return;
    }
    // Wait for data block before unlinking
Gus Grubba's avatar
Gus Grubba committed
840
    gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, nullptr);
841 842 843
#endif
}

844
//-----------------------------------------------------------------------------
845 846 847 848 849 850
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
851 852
void
VideoReceiver::_shutdownRecordingBranch()
853 854
{
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
855
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
856 857 858 859 860
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
    gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);

    gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
    gst_object_unref(_pipelineStopRec);
Gus Grubba's avatar
Gus Grubba committed
861
    _pipelineStopRec = nullptr;
862

863 864 865 866
    gst_element_set_state(_sink->filesink,  GST_STATE_NULL);
    gst_element_set_state(_sink->parse,     GST_STATE_NULL);
    gst_element_set_state(_sink->mux,       GST_STATE_NULL);
    gst_element_set_state(_sink->queue,     GST_STATE_NULL);
867 868

    gst_object_unref(_sink->queue);
869
    gst_object_unref(_sink->parse);
870 871 872 873
    gst_object_unref(_sink->mux);
    gst_object_unref(_sink->filesink);

    delete _sink;
Gus Grubba's avatar
Gus Grubba committed
874
    _sink = nullptr;
875
    _recording = false;
876

877 878 879 880 881
    emit recordingChanged();
    qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif

882
//-----------------------------------------------------------------------------
883 884 885 886 887
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
888 889
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
890 891 892 893
{
    Q_UNUSED(info)

    // Also unlinks and unrefs
Gus Grubba's avatar
Gus Grubba committed
894
    gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
895 896 897 898 899 900 901 902 903

    // Give tee its pad back
    gst_element_release_request_pad(_tee, _sink->teepad);
    gst_object_unref(_sink->teepad);

    // Create temporary pipeline
    _pipelineStopRec = gst_pipeline_new("pipeStopRec");

    // Put our elements from the recording branch into the temporary pipeline
Gus Grubba's avatar
Gus Grubba committed
904 905
    gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
    gst_element_link_many(_sink->queue, _sink->parse, _sink->mux, _sink->filesink, nullptr);
906

907 908 909
    // Add handler for EOS event
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
    gst_bus_enable_sync_message_emission(bus);
910
    g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
911
    gst_object_unref(bus);
912 913 914 915 916 917 918 919 920 921 922 923 924

    if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
    }

    // Send EOS at the beginning of the pipeline
    GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
    gst_pad_send_event(sinkpad, gst_event_new_eos());
    gst_object_unref(sinkpad);
    qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif

925
//-----------------------------------------------------------------------------
926
#if defined(QGC_GST_STREAMING)
927 928
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
929 930
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
931 932
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
933 934 935 936 937
        // We will only act once
        if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
            pThis->_detachRecordingBranch(info);
        }
    }
938 939 940
    return GST_PAD_PROBE_REMOVE;
}
#endif
941

942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
    if(info != nullptr && user_data != nullptr) {
        VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
        pThis->_noteVideoSinkFrame();
    }

    return GST_PAD_PROBE_OK;
}
#endif

//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_noteVideoSinkFrame()
{
962
    _lastFrameTime = QDateTime::currentSecsSinceEpoch();
963 964 965
}
#endif

966 967 968 969 970 971
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
    Q_UNUSED(pad);
Gus Grubba's avatar
Gus Grubba committed
972
    if(info != nullptr && user_data != nullptr) {
973 974 975 976
        GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
        if(GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) { // wait for a keyframe
            return GST_PAD_PROBE_DROP;
        } else {
Gus Grubba's avatar
Gus Grubba committed
977
            VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993

            // set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!

            gint64 position;

            if (gst_element_query_position(pThis->_pipeline, GST_FORMAT_TIME, &position) != TRUE) {
                qCDebug(VideoReceiverLog) << "Unable to get timeline position, let's hope that downstream elements will survive";

                if (buf->pts != GST_CLOCK_TIME_NONE) {
                    position = buf->pts;
                } else {
                    position = gst_pad_get_offset(pad);
                }
            }

            gst_pad_set_offset(pad, position);
994 995 996 997 998 999

            // Add the filesink once we have a valid I-frame
            gst_bin_add_many(GST_BIN(pThis->_pipeline), pThis->_sink->filesink, nullptr);
            gst_element_link_many(pThis->_sink->mux, pThis->_sink->filesink, nullptr);
            gst_element_sync_state_with_parent(pThis->_sink->filesink);

1000
            qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
1001
            pThis->gotFirstRecordingKeyFrame();
1002 1003 1004 1005 1006 1007 1008
        }
    }

    return GST_PAD_PROBE_REMOVE;
}
#endif

1009 1010 1011 1012 1013
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
1014 1015 1016 1017 1018 1019 1020 1021
    if(_stopping || _starting) {
        return;
    }

    if(_streaming) {
        if(!_videoRunning) {
            _videoRunning = true;
            emit videoRunningChanged();
1022
        }
1023
    } else {
1024
        if(_videoRunning) {
1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035
            _videoRunning = false;
            emit videoRunningChanged();
        }
    }

    if(_videoRunning) {
        uint32_t timeout = 1;
        if(qgcApp()->toolbox() && qgcApp()->toolbox()->settingsManager()) {
            timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
        }

1036
        const qint64 now = QDateTime::currentSecsSinceEpoch();
1037 1038 1039 1040 1041 1042 1043 1044 1045 1046

        if(now - _lastFrameTime > timeout) {
            stop();
            // We want to start it back again with _updateTimer
            _stop = false;
        }
    } else {
		// FIXME: AV: if pipeline is _running but not _streaming for some time then we need to restart
        if(!_stop && !_running && !_uri.isEmpty() && _videoSettings->streamEnabled()->rawValue().toBool()) {
            start();
1047 1048 1049 1050 1051
        }
    }
#endif
}