Newer
Older
/****************************************************************************
*
* (c) 2009-2016 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
/**
* @file
* @brief QGC Video Receiver
* @author Gus Grubba <mavlink@grubba.com>
*/
#include "VideoReceiver.h"
#include <QDebug>
VideoReceiver::VideoReceiver(QObject* parent)
: QObject(parent)
#if defined(QGC_GST_STREAMING)
, _socket(NULL)
, _serverPresent(false)
#if defined(QGC_GST_STREAMING)
_timer.setSingleShot(true);
connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
#endif
#if defined(QGC_GST_STREAMING)
#if defined(QGC_GST_STREAMING)
void VideoReceiver::setVideoSink(GstElement* sink)
{
if (_videoSink) {
gst_object_unref(_videoSink);
_videoSink = NULL;
}
if (sink) {
_videoSink = sink;
gst_object_ref_sink(_videoSink);
}
}
#if defined(QGC_GST_STREAMING)
static void newPadCB(GstElement * element, GstPad* pad, gpointer data)
{
gchar *name;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps);
qDebug() << p_caps << ", " << description;
g_free(description);
GstElement * p_rtph264depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, p_rtph264depay, "sink") == false)
qCritical() << "newPadCB : failed to link elements\n";
g_free(name);
}
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_connected()
{
//-- Server showed up. Now we start the stream.
_timer.stop();
delete _socket;
_socket = NULL;
_serverPresent = true;
start();
}
#endif
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
Q_UNUSED(socketError);
delete _socket;
_socket = NULL;
//-- Try again in 5 seconds
_timer.start(5000);
}
#endif
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_timeout()
{
//-- If socket is live, we got no connection nor a socket error
if(_socket) {
delete _socket;
_socket = NULL;
}
//-- RTSP will try to connect to the server. If it cannot connect,
// it will simply give up and never try again. Instead, we keep
// attempting a connection on this timer. Once a connection is
// found to be working, only then we actually start the stream.
QUrl url(_uri);
_socket = new QTcpSocket;
connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
//qDebug() << "Trying to connect to:" << url.host() << url.port();
_socket->connectToHost(url.host(), url.port());
_timer.start(5000);
}
#endif
#if defined(QGC_GST_STREAMING)
if (_uri.isEmpty()) {
qCritical() << "VideoReceiver::start() failed because URI is not specified";
return;
}
if (_videoSink == NULL) {
qCritical() << "VideoReceiver::start() failed because video sink is not set";
return;
}
bool isUdp = _uri.contains("udp://");
//-- For RTSP, check to see if server is there first
if(!_serverPresent && !isUdp) {
_timer.start(100);
return;
}
bool running = false;
GstElement* dataSource = NULL;
GstCaps* caps = NULL;
GstElement* demux = NULL;
GstElement* parser = NULL;
GstElement* decoder = NULL;
do {
if ((_pipeline = gst_pipeline_new("receiver")) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_pipeline_new()";
if(isUdp) {
dataSource = gst_element_factory_make("udpsrc", "udp-source");
} else {
dataSource = gst_element_factory_make("rtspsrc", "rtsp-source");
if (!dataSource) {
qCritical() << "VideoReceiver::start() failed. Error with data source for gst_element_factory_make()";
if(isUdp) {
if ((caps = gst_caps_from_string("application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264")) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
break;
}
g_object_set(G_OBJECT(dataSource), "uri", qPrintable(_uri), "caps", caps, NULL);
} else {
g_object_set(G_OBJECT(dataSource), "location", qPrintable(_uri), "latency", 0, "udp-reconnect", 1, "timeout", 5000000, NULL);
if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('rtph264depay')";
if(!isUdp) {
g_signal_connect(dataSource, "pad-added", G_CALLBACK(newPadCB), demux);
}
if ((parser = gst_element_factory_make("h264parse", "h264-parser")) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('h264parse')";
break;
}
if ((decoder = gst_element_factory_make("avdec_h264", "h264-decoder")) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('avdec_h264')";
break;
}
gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, decoder, _videoSink, NULL);
gboolean res = FALSE;
if(isUdp) {
res = gst_element_link_many(dataSource, demux, parser, decoder, _videoSink, NULL);
} else {
res = gst_element_link_many(demux, parser, decoder, _videoSink, NULL);
}
if (!res) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_link_many()";
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
break;
}
dataSource = demux = parser = decoder = NULL;
GstBus* bus = NULL;
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
gst_bus_add_watch(bus, _onBusMessage, this);
gst_object_unref(bus);
bus = NULL;
}
running = gst_element_set_state(_pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE;
} while(0);
if (caps != NULL) {
gst_caps_unref(caps);
caps = NULL;
}
if (!running) {
qCritical() << "VideoReceiver::start() failed";
if (decoder != NULL) {
gst_object_unref(decoder);
decoder = NULL;
}
if (parser != NULL) {
gst_object_unref(parser);
parser = NULL;
}
if (demux != NULL) {
gst_object_unref(demux);
demux = NULL;
}
if (dataSource != NULL) {
gst_object_unref(dataSource);
dataSource = NULL;
}
if (_pipeline != NULL) {
gst_object_unref(_pipeline);
_pipeline = NULL;
}
}
#if defined(QGC_GST_STREAMING)
if (_pipeline != NULL) {
gst_element_set_state(_pipeline, GST_STATE_NULL);
gst_object_unref(_pipeline);
_pipeline = NULL;
}
void VideoReceiver::setUri(const QString & uri)
{
stop();
_uri = uri;
}
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_onBusMessage(GstMessage* msg)
{
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
stop();
break;
case GST_MESSAGE_ERROR:
do {
gchar* debug;
GError* error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
qCritical() << error->message;
g_error_free(error);
} while(0);
stop();
break;
default:
break;
}
}
#if defined(QGC_GST_STREAMING)
gboolean VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
{
Q_UNUSED(bus)
Q_ASSERT(msg != NULL && data != NULL);
VideoReceiver* pThis = (VideoReceiver*)data;
pThis->_onBusMessage(msg);
return TRUE;
}