Commit 6b405306 authored by Gus Grubba's avatar Gus Grubba

iOS and Android need a different, raw h.264 pipeline

parent d0113827
......@@ -34,10 +34,6 @@ QGC_LOGGING_CATEGORY(VideoManagerLog, "VideoManagerLog")
//-----------------------------------------------------------------------------
VideoManager::VideoManager(QGCApplication* app, QGCToolbox* toolbox)
: QGCTool(app, toolbox)
, _videoReceiver(nullptr)
, _videoSettings(nullptr)
, _fullScreen(false)
, _activeVehicle(nullptr)
{
}
......
......@@ -36,6 +36,7 @@ public:
Q_PROPERTY(bool hasVideo READ hasVideo NOTIFY hasVideoChanged)
Q_PROPERTY(bool isGStreamer READ isGStreamer NOTIFY isGStreamerChanged)
Q_PROPERTY(bool isAutoStream READ isAutoStream NOTIFY isAutoStreamChanged)
Q_PROPERTY(bool isTaisync READ isTaisync WRITE setIsTaisync NOTIFY isTaisyncChanged)
Q_PROPERTY(QString videoSourceID READ videoSourceID NOTIFY videoSourceIDChanged)
Q_PROPERTY(bool uvcEnabled READ uvcEnabled CONSTANT)
Q_PROPERTY(bool fullScreen READ fullScreen WRITE setfullScreen NOTIFY fullScreenChanged)
......@@ -45,6 +46,7 @@ public:
bool hasVideo ();
bool isGStreamer ();
bool isAutoStream ();
bool isTaisync () { return _isTaisync; }
bool fullScreen () { return _fullScreen; }
QString videoSourceID () { return _videoSourceID; }
QString autoURL () { return QString(_streamInfo.uri); }
......@@ -59,6 +61,7 @@ public:
#endif
void setfullScreen (bool f) { _fullScreen = f; emit fullScreenChanged(); }
void setIsTaisync (bool t) { _isTaisync = t; emit isTaisyncChanged(); }
// Override from QGCTool
void setToolbox (QGCToolbox *toolbox);
......@@ -73,6 +76,7 @@ signals:
void fullScreenChanged ();
void isAutoStreamChanged ();
void aspectRatioChanged ();
void isTaisyncChanged ();
private slots:
void _videoSourceChanged ();
......@@ -88,11 +92,13 @@ private:
void _updateSettings ();
void _restartVideo ();
VideoReceiver* _videoReceiver;
VideoSettings* _videoSettings;
private:
bool _isTaisync = false;
VideoReceiver* _videoReceiver = nullptr;
VideoSettings* _videoSettings = nullptr;
QString _videoSourceID;
bool _fullScreen;
Vehicle* _activeVehicle;
bool _fullScreen = false;
Vehicle* _activeVehicle = nullptr;
mavlink_video_stream_information_t _streamInfo;
};
......
......@@ -18,7 +18,9 @@
#include "SettingsManager.h"
#include "QGCApplication.h"
#include "VideoManager.h"
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
#include <QDebug>
#include <QUrl>
#include <QDir>
......@@ -227,7 +229,16 @@ VideoReceiver::start()
_stop = false;
qCDebug(VideoReceiverLog) << "start()";
if (_uri.isEmpty()) {
#ifdef QGC_GST_TAISYNC_ENABLED
bool isTaisyncUSB = qgcApp()->toolbox()->videoManager()->isTaisync();
#else
bool isTaisyncUSB = false;
#endif
bool isUdp = _uri.contains("udp://") && !isTaisyncUSB;
bool isRtsp = _uri.contains("rtsp://") && !isTaisyncUSB;
bool isTCP = _uri.contains("tcp://") && !isTaisyncUSB;
if (!isTaisyncUSB && _uri.isEmpty()) {
qCritical() << "VideoReceiver::start() failed because URI is not specified";
return;
}
......@@ -242,17 +253,13 @@ VideoReceiver::start()
_starting = true;
bool isUdp = _uri.contains("udp://");
bool isRtsp = _uri.contains("rtsp://");
bool isTCP = _uri.contains("tcp://");
//-- For RTSP and TCP, check to see if server is there first
if(!_serverPresent && (isRtsp || isTCP)) {
_timer.start(100);
return;
}
bool running = false;
bool running = false;
bool pipelineUp = false;
GstElement* dataSource = nullptr;
......@@ -269,7 +276,7 @@ VideoReceiver::start()
break;
}
if(isUdp) {
if(isUdp || isTaisyncUSB) {
dataSource = gst_element_factory_make("udpsrc", "udp-source");
} else if(isTCP) {
dataSource = gst_element_factory_make("tcpclientsrc", "tcpclient-source");
......@@ -287,12 +294,18 @@ VideoReceiver::start()
qCritical() << "VideoReceiver::start() failed. Error with gst_caps_from_string()";
break;
}
g_object_set(G_OBJECT(dataSource), "uri", qPrintable(_uri), "caps", caps, nullptr);
g_object_set(static_cast<gpointer>(dataSource), "uri", qPrintable(_uri), "caps", caps, nullptr);
#ifdef QGC_GST_TAISYNC_ENABLED
} else if(isTaisyncUSB) {
QString uri = QString("0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
qCDebug(VideoReceiverLog) << "Taisync URI:" << uri;
g_object_set(static_cast<gpointer>(dataSource), "port", TAISYNC_VIDEO_UDP_PORT, nullptr);
#endif
} else if(isTCP) {
QUrl url(_uri);
g_object_set(G_OBJECT(dataSource), "host", qPrintable(url.host()), "port", url.port(), nullptr );
g_object_set(static_cast<gpointer>(dataSource), "host", qPrintable(url.host()), "port", url.port(), nullptr );
} else {
g_object_set(G_OBJECT(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
g_object_set(static_cast<gpointer>(dataSource), "location", qPrintable(_uri), "latency", 17, "udp-reconnect", 1, "timeout", _udpReconnect_us, NULL);
}
// Currently, we expect H264 when using anything except for TCP. Long term we may want this to be settable
......@@ -302,11 +315,13 @@ VideoReceiver::start()
break;
}
} else {
if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == nullptr) {
if(!isTaisyncUSB) {
if ((demux = gst_element_factory_make("rtph264depay", "rtp-h264-depacketizer")) == nullptr) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('rtph264depay')";
break;
}
}
}
if ((parser = gst_element_factory_make("h264parse", "h264-parser")) == nullptr) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('h264parse')";
......@@ -335,7 +350,11 @@ VideoReceiver::start()
break;
}
gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
if(isTaisyncUSB) {
gst_bin_add_many(GST_BIN(_pipeline), dataSource, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
} else {
gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, queue1, _videoSink, nullptr);
}
pipelineUp = true;
if(isUdp) {
......@@ -344,6 +363,12 @@ VideoReceiver::start()
qCritical() << "Unable to link UDP elements.";
break;
}
} else if(isTaisyncUSB) {
// Link the pipeline in front of the tee
if(!gst_element_link_many(dataSource, parser, _tee, queue, decoder, queue1, _videoSink, nullptr)) {
qCritical() << "Unable to link Taisync USB elements.";
break;
}
} else if (isTCP) {
if(!gst_element_link(dataSource, demux)) {
qCritical() << "Unable to link TCP dataSource to Demux.";
......@@ -666,7 +691,7 @@ VideoReceiver::startRecording(const QString &videoFile)
}
emit videoFileChanged();
g_object_set(G_OBJECT(_sink->filesink), "location", qPrintable(_videoFile), nullptr);
g_object_set(static_cast<gpointer>(_sink->filesink), "location", qPrintable(_videoFile), nullptr);
qCDebug(VideoReceiverLog) << "New video file:" << _videoFile;
gst_object_ref(_sink->queue);
......
......@@ -15,6 +15,7 @@
#include "QGCApplication.h"
#include "SettingsManager.h"
#include "VideoSettings.h"
#include "VideoManager.h"
#include "TaisyncTelemetry.h"
#include "TaisyncSettings.h"
......@@ -102,6 +103,9 @@ TaisyncLink::_disconnect()
_hardwareDisconnect();
emit disconnected();
}
#if defined(__ios__) || defined(__android__)
qgcApp()->toolbox()->videoManager()->setIsTaisync(false);
#endif
//-- Restore video settings
if(!_savedVideoSource.isNull()) {
VideoSettings* pVSettings = qgcApp()->toolbox()->settingsManager()->videoSettings();
......@@ -129,6 +133,10 @@ TaisyncLink::_connect(void)
_savedVideoUDP = pVSettings->udpPort()->rawValue();
_savedAR = pVSettings->aspectRatio()->rawValue();
_savedVideoState = pVSettings->visible();
#if defined(__ios__) || defined(__android__)
//-- iOS and Android receive raw h.264 and need a different pipeline
qgcApp()->toolbox()->videoManager()->setIsTaisync(true);
#endif
pVSettings->setVisible(false);
pVSettings->udpPort()->setRawValue(5600);
pVSettings->aspectRatio()->setRawValue(1024.0 / 768.0);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment