Newer
Older
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
#include "SettingsManager.h"
#include "QGCApplication.h"
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
#include <QDir>
#include <QDateTime>
#include <QSysInfo>
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")
#if defined(QGC_GST_STREAMING)
static const char* kVideoExtensions[] =
{
"mkv",
"mov",
"mp4"
};
static const char* kVideoMuxes[] =
{
"matroskamux",
"qtmux",
"mp4mux"
};
#define NUM_MUXES (sizeof(kVideoMuxes) / sizeof(char*))
VideoReceiver::VideoReceiver(QObject* parent)
: QObject(parent)
#if defined(QGC_GST_STREAMING)
Jacob Walser
committed
, _running(false)
, _recording(false)
Jacob Walser
committed
, _starting(false)
, _stopping(false)
Matej Frančeškin
committed
, _stop(true)
, _sink(nullptr)
, _tee(nullptr)
, _pipeline(nullptr)
, _videoSink(nullptr)
, _lastFrameId(G_MAXUINT64)
, _lastFrameTime(0)
Matej Frančeškin
committed
, _restart_time_ms(1389)
, _socket(nullptr)
, _serverPresent(false)
, _tcpTestInterval_ms(5000)
Gus Grubba
committed
, _udpReconnect_us(5000000)
, _videoRunning(false)
, _showFullScreen(false)
_videoSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
Matej Frančeškin
committed
_restart_timer.setSingleShot(true);
connect(&_restart_timer, &QTimer::timeout, this, &VideoReceiver::_restart_timeout);
_tcp_timer.setSingleShot(true);
connect(&_tcp_timer, &QTimer::timeout, this, &VideoReceiver::_tcp_timeout);
connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
_frameTimer.start(1000);
#if defined(QGC_GST_STREAMING)
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
_imageFile = imageFile;
emit imageFileChanged();
}
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
Matej Frančeškin
committed
gchar* name = gst_pad_get_name(pad);
//g_print("A new pad %s was created\n", name);
GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
gchar* description = gst_caps_to_string(p_caps);
qCDebug(VideoReceiverLog) << p_caps << ", " << description;
GstElement* sink = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, sink, "sink") == false)
qCritical() << "newPadCB : failed to link elements\n";
g_free(name);
}
Andrew Voznytsa
committed
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
static gboolean
autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
GstElement* glupload = (GstElement* )data;
GstPad* sinkpad = gst_element_get_static_pad(glupload, "sink");
if (!sinkpad) {
qCritical() << "autoplugQueryCaps(): No sink pad found";
return FALSE;
}
GstCaps* filter;
gst_query_parse_caps(query, &filter);
GstCaps* sinkcaps = gst_pad_query_caps(sinkpad, filter);
gst_query_set_caps_result(query, sinkcaps);
const gboolean ret = !gst_caps_is_empty(sinkcaps);
gst_caps_unref(sinkcaps);
sinkcaps = nullptr;
gst_object_unref(sinkpad);
sinkpad = nullptr;
return ret;
}
static gboolean
autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
GstElement* glsink = (GstElement* )data;
GstPad* sinkpad = gst_element_get_static_pad(glsink, "sink");
if (!sinkpad){
qCritical() << "autoplugQueryContext(): No sink pad found";
return FALSE;
}
const gboolean ret = gst_pad_query(sinkpad, query);
gst_object_unref(sinkpad);
sinkpad = nullptr;
return ret;
}
static gboolean
autoplugQueryCB(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
gboolean ret;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:
ret = autoplugQueryCaps(bin, pad, element, query, data);
break;
case GST_QUERY_CONTEXT:
ret = autoplugQueryContext(bin, pad, element, query, data);
break;
default:
ret = FALSE;
break;
}
return ret;
}
Andrew Voznytsa
committed
//-----------------------------------------------------------------------------
static void
_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
gchar* name = gst_pad_get_name(pad);
GstPad* ghostpad = gst_ghost_pad_new(name, pad);
g_free(name);
gst_pad_set_active(ghostpad, TRUE);
if (!gst_element_add_pad(GST_ELEMENT_PARENT(element), ghostpad)) {
qCritical() << "Failed to add ghost pad to source";
}
}
static void
_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
gboolean isRtpPad = FALSE;
GstCaps* filter = gst_caps_from_string("application/x-rtp");
if (filter != nullptr) {
Andrew Voznytsa
committed
GstCaps* caps = gst_pad_query_caps(pad, nullptr);
Andrew Voznytsa
committed
Andrew Voznytsa
committed
if (caps != nullptr) {
if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
isRtpPad = TRUE;
}
Andrew Voznytsa
committed
gst_caps_unref(caps);
caps = nullptr;
}
gst_caps_unref(filter);
filter = nullptr;
}
if (isRtpPad) {
GstElement* buffer;
if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
Andrew Voznytsa
committed
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);
gst_element_sync_state_with_parent(buffer);
GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");
if (sinkpad != nullptr) {
const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
gst_object_unref(sinkpad);
sinkpad = nullptr;
if (ret == GST_PAD_LINK_OK) {
pad = gst_element_get_static_pad(buffer, "src");
element = buffer;
} else {
qCritical() << "_wrapWithGhostPad partially failed. Error with gst_pad_link()";
}
} else {
qCritical() << "_wrapWithGhostPad partially failed. Error with gst_element_get_static_pad()";
}
} else {
qCritical() << "_wrapWithGhostPad partially failed. Error with gst_element_factory_make('rtpjitterbuffer')";
}
}
newPadCB(element, pad, data);
}
static gboolean
_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
int* probeRes = (int*)user_data;
*probeRes |= 1;
GstCaps* filter = gst_caps_from_string("application/x-rtp");
if (filter != nullptr) {
Andrew Voznytsa
committed
GstCaps* caps = gst_pad_query_caps(pad, nullptr);
Andrew Voznytsa
committed
Andrew Voznytsa
committed
if (caps != nullptr) {
if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
*probeRes |= 2;
}
Andrew Voznytsa
committed
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
gst_caps_unref(caps);
caps = nullptr;
}
gst_caps_unref(filter);
filter = nullptr;
}
return TRUE;
}
GstElement*
VideoReceiver::_makeSource(const QString& uri)
{
if (uri.isEmpty()) {
qCritical() << "VideoReceiver::_makeSource() failed because URI is not specified";
return nullptr;
}
bool isTaisync = uri.contains("tsusb://");
bool isUdp264 = uri.contains("udp://");
bool isRtsp = uri.contains("rtsp://");
bool isUdp265 = uri.contains("udp265://");
bool isTcpMPEGTS= uri.contains("tcp://");
bool isUdpMPEGTS= uri.contains("mpegts://");
GstElement* source = nullptr;
GstElement* buffer = nullptr;
GstElement* parser = nullptr;
GstElement* bin = nullptr;
GstElement* srcbin = nullptr;
do {
QUrl url(uri);
if(isTcpMPEGTS) {
if ((source = gst_element_factory_make("tcpclientsrc", "source")) != nullptr) {
g_object_set(static_cast<gpointer>(source), "host", qPrintable(url.host()), "port", url.port(), nullptr);
}
} else if (isRtsp) {
if ((source = gst_element_factory_make("rtspsrc", "source")) != nullptr) {
g_object_set(static_cast<gpointer>(source), "location", qPrintable(uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
}
} else if(isUdp264 || isUdp265 || isUdpMPEGTS || isTaisync) {
if ((source = gst_element_factory_make("udpsrc", "source")) != nullptr) {
g_object_set(static_cast<gpointer>(source), "uri", QString("udp://%1:%2").arg(qPrintable(url.host()), QString::number(url.port())).toUtf8().data(), nullptr);
GstCaps* caps = nullptr;
if(isUdp264) {
if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == nullptr) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_caps_from_string()";
break;
}
} else if (isUdp264) {
if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H265")) == nullptr) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_caps_from_string()";
break;
}
}
if (caps != nullptr) {
g_object_set(static_cast<gpointer>(source), "caps", caps, nullptr);
gst_caps_unref(caps);
caps = nullptr;
}
}
} else {
qWarning() << "VideoReceiver::_makeSource(): URI is not recognized";
}
if (!source) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make() for data source";
break;
}
// FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
if (isTcpMPEGTS || isUdpMPEGTS) {
if ((parser = gst_element_factory_make("tsdemux", "parser")) == nullptr) {
qCritical(VideoReceiverLog) << "gst_element_factory_make('tsdemux') failed";
break;
}
} else {
if ((parser = gst_element_factory_make("parsebin", "parser")) == nullptr) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('parsebin')";
break;
}
Andrew Voznytsa
committed
}
if ((bin = gst_bin_new("sourcebin")) == nullptr) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_bin_new('sourcebin')";
break;
}
gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);
int probeRes = 0;
gst_element_foreach_src_pad(source, _padProbe, &probeRes);
if (probeRes & 1) {
if (probeRes & 2) {
if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
Andrew Voznytsa
committed
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_factory_make('rtpjitterbuffer')";
break;
}
gst_bin_add(GST_BIN(bin), buffer);
if (!gst_element_link_many(source, buffer, parser, nullptr)) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_link()";
break;
}
} else {
if (!gst_element_link(source, parser)) {
qCritical() << "VideoReceiver::_makeSource() failed. Error with gst_element_link()";
break;
}
}
} else {
g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
}
g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);
source = buffer = parser = nullptr;
srcbin = bin;
bin = nullptr;
} while(0);
if (bin != nullptr) {
gst_object_unref(bin);
bin = nullptr;
}
if (parser != nullptr) {
gst_object_unref(parser);
parser = nullptr;
}
if (buffer != nullptr) {
gst_object_unref(buffer);
buffer = nullptr;
}
if (source != nullptr) {
gst_object_unref(source);
source = nullptr;
}
return srcbin;
}
//-----------------------------------------------------------------------------
void
Matej Frančeškin
committed
VideoReceiver::_restart_timeout()
Matej Frančeškin
committed
qgcApp()->toolbox()->videoManager()->restartVideo();
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_tcp_timeout()
{
//-- If socket is live, we got no connection nor a socket error
delete _socket;
_socket = nullptr;
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
if(_videoSettings->streamEnabled()->rawValue().toBool()) {
//-- RTSP will try to connect to the server. If it cannot connect,
// it will simply give up and never try again. Instead, we keep
// attempting a connection on this timer. Once a connection is
// found to be working, only then we actually start the stream.
QUrl url(_uri);
//-- If RTSP and no port is defined, set default RTSP port (554)
if(_uri.contains("rtsp://") && url.port() <= 0) {
url.setPort(554);
}
_socket = new QTcpSocket;
QNetworkProxy tempProxy;
tempProxy.setType(QNetworkProxy::DefaultProxy);
_socket->setProxy(tempProxy);
connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
_socket->connectToHost(url.host(), static_cast<uint16_t>(url.port()));
_tcp_timer.start(_tcpTestInterval_ms);
}
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_connected()
{
//-- Server showed up. Now we start the stream.
_tcp_timer.stop();
_socket->deleteLater();
_socket = nullptr;
if(_videoSettings->streamEnabled()->rawValue().toBool()) {
_serverPresent = true;
start();
}
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
Q_UNUSED(socketError);
_socket->deleteLater();
_socket = nullptr;
//-- Try again in a while
if(_videoSettings->streamEnabled()->rawValue().toBool()) {
_tcp_timer.start(_tcpTestInterval_ms);
}
}
#endif
//-----------------------------------------------------------------------------
// When we finish our pipeline will look like this:
//
// +-->queue-->decoder-->_videosink
// |
// datasource-->demux-->parser-->tee
// ^
// |
// +-Here we will later link elements for recording
qCDebug(VideoReceiverLog) << "start():" << _uri;
if(!_videoSettings->streamEnabled()->rawValue().toBool() ||
!_videoSettings->streamConfigured()) {
qCDebug(VideoReceiverLog) << "start() but not enabled/configured";
return;
}
Matej Frančeškin
committed
#if defined(QGC_GST_STREAMING)
Jacob Walser
committed
Andrew Voznytsa
committed
QString uri = _uri;
Matej Frančeškin
committed
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
Gus Grubba
committed
//-- Taisync on iOS or Android sends a raw h.264 stream
Andrew Voznytsa
committed
if (qgcApp()->toolbox()->videoManager()->isTaisync()) {
uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
}
Andrew Voznytsa
committed
if (uri.isEmpty()) {
qCritical() << "VideoReceiver::start() failed because URI is not specified";
return;
}
Andrew Voznytsa
committed
bool useTcpConnection = uri.contains("rtsp://") || uri.contains("tcp://");
qCritical() << "VideoReceiver::start() failed because video sink is not set";
return;
}
Jacob Walser
committed
if(_running) {
qCDebug(VideoReceiverLog) << "Already running!";
return;
}
Jacob Walser
committed
_starting = true;
//-- For RTSP and TCP, check to see if server is there first
Andrew Voznytsa
committed
if(!_serverPresent && useTcpConnection) {
_tcp_timer.start(100);
return;
}
_lastFrameId = G_MAXUINT64;
_lastFrameTime = 0;
bool pipelineUp = false;
Andrew Voznytsa
committed
GstElement* source = nullptr;
GstElement* queue = nullptr;
GstElement* decoder = nullptr;
if ((_pipeline = gst_pipeline_new("receiver")) == nullptr) {
qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
g_object_set(_pipeline, "message-forward", TRUE, nullptr);
Andrew Voznytsa
committed
if ((source = _makeSource(uri)) == nullptr) {
qCritical() << "VideoReceiver::start() failed. Error with _makeSource()";
if((_tee = gst_element_factory_make("tee", nullptr)) == nullptr) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
break;
}
if((queue = gst_element_factory_make("queue", nullptr)) == nullptr) {
// TODO: We may want to add queue2 max-size-buffers=1 to get lower latency
// We should compare gstreamer scripts to QGroundControl to determine the need
Jacob Walser
committed
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
break;
}
Andrew Voznytsa
committed
if ((decoder = gst_element_factory_make("decodebin", "decoder")) == nullptr) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('decodebin')";
Andrew Voznytsa
committed
gst_bin_add_many(GST_BIN(_pipeline), source, _tee, queue, decoder, _videoSink, nullptr);
Andrew Voznytsa
committed
g_signal_connect(source, "pad-added", G_CALLBACK(newPadCB), _tee);
if(!gst_element_link_many(_tee, queue, decoder, nullptr)) {
qCritical() << "Unable to link UDP elements.";
break;
Andrew Voznytsa
committed
g_signal_connect(decoder, "pad-added", G_CALLBACK(newPadCB), _videoSink);
g_signal_connect(decoder, "autoplug-query", G_CALLBACK(autoplugQueryCB), _videoSink);
Andrew Voznytsa
committed
source = queue = decoder = nullptr;
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
gst_bus_enable_sync_message_emission(bus);
g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
gst_object_unref(bus);
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-paused");
running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;
} while(0);
if (!running) {
qCritical() << "VideoReceiver::start() failed";
// In newer versions, the pipeline will clean up all references that are added to it
gst_object_unref(_pipeline);
// If we failed before adding items to the pipeline, then clean up
if (!pipelineUp) {
gst_object_unref(decoder);
if (queue != nullptr) {
gst_object_unref(queue);
queue = nullptr;
}
Andrew Voznytsa
committed
if (source != nullptr) {
gst_object_unref(source);
source = nullptr;
gst_object_unref(_tee);
_running = false;
} else {
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-playing");
_running = true;
qCDebug(VideoReceiverLog) << "Running";
Jacob Walser
committed
_starting = false;
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
if(qgcApp() && qgcApp()->runningUnitTests()) {
#if defined(QGC_GST_STREAMING)
Jacob Walser
committed
qCDebug(VideoReceiverLog) << "stop()";
if(!_streaming) {
_shutdownPipeline();
Jacob Walser
committed
qCDebug(VideoReceiverLog) << "Stopping _pipeline";
GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
gst_bus_disable_sync_message_emission(bus);
Jacob Walser
committed
gst_element_send_event(_pipeline, gst_event_new_eos());
_stopping = true;
GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
Jacob Walser
committed
gst_object_unref(bus);
if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
_shutdownPipeline();
qCritical() << "Error stopping pipeline!";
} else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
_handleEOS();
}
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_shutdownPipeline() {
if(!_pipeline) {
qCDebug(VideoReceiverLog) << "No pipeline";
return;
}
GstBus* bus = nullptr;
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
gst_bus_disable_sync_message_emission(bus);
gst_object_unref(bus);
}
gst_element_set_state(_pipeline, GST_STATE_NULL);
gst_object_unref(_pipeline);
_serverPresent = false;
_streaming = false;
_recording = false;
_stopping = false;
_running = false;
emit recordingChanged();
}
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_handleError() {
qCDebug(VideoReceiverLog) << "Gstreamer error!";
stop();
Matej Frančeškin
committed
_restart_timer.start(_restart_time_ms);
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_handleEOS() {
if(_recording && _sink->removing) {
_shutdownRecordingBranch();
}
Jacob Walser
committed
qCDebug(VideoReceiverLog) << "Stopped";
} else if(_recording && _sink->removing) {
_shutdownRecordingBranch();
} else {
qWarning() << "VideoReceiver: Unexpected EOS!";
Matej Frančeškin
committed
_handleError();
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_handleStateChanged() {
if(_pipeline) {
_streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
//qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
switch(GST_MESSAGE_TYPE(msg)) {
case(GST_MESSAGE_ERROR): {
gchar* debug;
GError* error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
qCritical() << error->message;
g_error_free(error);
pThis->msgErrorReceived();
}
break;
case(GST_MESSAGE_EOS):
pThis->msgEOSReceived();
break;
case(GST_MESSAGE_STATE_CHANGED):
pThis->msgStateChangedReceived();
break;
case(GST_MESSAGE_ELEMENT): {
const GstStructure *s = gst_message_get_structure (msg);
if (gst_structure_has_name (s, "GstBinForwarded")) {
GstMessage *forward_msg = NULL;
gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
if (forward_msg != nullptr) {
if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
pThis->msgEOSReceived();
}
gst_message_unref(forward_msg);
forward_msg = nullptr;
}
}
}
break;
default:
break;
}
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::_cleanupOldVideos()
//-- Only perform cleanup if storage limit is enabled
if(_videoSettings->enableStorageLimit()->rawValue().toBool()) {
QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
QDir videoDir = QDir(savePath);
videoDir.setFilter(QDir::Files | QDir::Readable | QDir::NoSymLinks | QDir::Writable);
videoDir.setSorting(QDir::Time);
//-- All the movie extensions we support
QStringList nameFilters;
for(uint32_t i = 0; i < NUM_MUXES; i++) {
nameFilters << QString("*.") + QString(kVideoExtensions[i]);
videoDir.setNameFilters(nameFilters);
//-- get the list of videos stored
QFileInfoList vidList = videoDir.entryInfoList();
if(!vidList.isEmpty()) {
uint64_t total = 0;
//-- Settings are stored using MB
uint64_t maxSize = (_videoSettings->maxVideoSize()->rawValue().toUInt() * 1024 * 1024);
//-- Compute total used storage
for(int i = 0; i < vidList.size(); i++) {
total += vidList[i].size();
}
//-- Remove old movies until max size is satisfied.
while(total >= maxSize && !vidList.isEmpty()) {
total -= vidList.last().size();
qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
QFile file (vidList.last().filePath());
file.remove();
vidList.removeLast();
}
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void
VideoReceiver::setVideoSink(GstElement* videoSink)
{
if(_pipeline != nullptr) {
qCDebug(VideoReceiverLog) << "Video receiver pipeline is active, video sink change is not possible";
return;
}
if (_videoSink != nullptr) {
gst_object_unref(_videoSink);
_videoSink = nullptr;
}
if (videoSink != nullptr) {
_videoSink = videoSink;
gst_object_ref(_videoSink);
GstPad* pad = gst_element_get_static_pad(_videoSink, "sink");
if (pad != nullptr) {
gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER), _videoSinkProbe, this, nullptr);
gst_object_unref(pad);
pad = nullptr;
} else {
qCDebug(VideoReceiverLog) << "Unable to find sink pad of video sink";
}
}
}
#endif
//-----------------------------------------------------------------------------
// When we finish our pipeline will look like this:
//
// +-->queue-->decoder-->_videosink
// |
Andrew Voznytsa
committed
// source-->tee
// | +---------_sink----------+
// | | |
// we are adding these elements-> +->teepad-->queue-->_filesink |
// | |
// +------------------------+
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, unsigned format)
{
GstElement* fileSink = nullptr;
GstElement* mux = nullptr;
GstElement* sink = nullptr;
GstElement* bin = nullptr;
bool releaseElements = true;
do{
if ((mux = gst_element_factory_make(kVideoMuxes[format], nullptr)) == nullptr) {
qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_factory_make('" << kVideoMuxes[format] << "')";
break;
}
if ((sink = gst_element_factory_make("filesink", nullptr)) == nullptr) {
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_factory_make('filesink')";
break;
}
g_object_set(static_cast<gpointer>(sink), "location", qPrintable(videoFile), nullptr);
if ((bin = gst_bin_new("sinkbin")) == nullptr) {
qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_bin_new('sinkbin')";
break;
}
GstPadTemplate* padTemplate;
if ((padTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mux), "video_%u")) == nullptr) {
qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_class_get_pad_template(mux)";
break;
}
// FIXME: AV: pad handling is potentially leaking (and other similar places too!)
GstPad* pad;
if ((pad = gst_element_request_pad(mux, padTemplate, nullptr, nullptr)) == nullptr) {
qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_request_pad(mux)";
break;
}
gst_bin_add_many(GST_BIN(bin), mux, sink, nullptr);
releaseElements = false;
GstPad* ghostpad = gst_ghost_pad_new("sink", pad);
gst_element_add_pad(bin, ghostpad);
gst_object_unref(pad);
pad = nullptr;
if (!gst_element_link(mux, sink)) {
qCritical() << "VideoReceiver::_makeFileSink() failed. Error with gst_element_link()";
break;
}
fileSink = bin;
bin = nullptr;
} while(0);
if (releaseElements) {
if (sink != nullptr) {
gst_object_unref(sink);
sink = nullptr;
}
if (mux != nullptr) {
gst_object_unref(mux);
mux = nullptr;
}
}
if (bin != nullptr) {
gst_object_unref(bin);
bin = nullptr;
}