Commit 246f2d4b authored by Gus Grubba's avatar Gus Grubba Committed by GitHub

Merge pull request #5318 from dogmaphobic/videoReceiver

Decouple VideoReceiver from VideoManager.
parents bbc7bfae 5fb20787
......@@ -370,7 +370,7 @@ QGCView {
anchors.right: _flightVideo.right
height: ScreenTools.defaultFontPixelHeight * 2
width: height
visible: QGroundControl.videoManager.videoRunning && QGroundControl.settingsManager.videoSettings.showRecControl.rawValue
visible: QGroundControl.videoManager.videoReceiver.videoRunning && QGroundControl.settingsManager.videoSettings.showRecControl.rawValue
opacity: 0.75
Rectangle {
......
......@@ -29,7 +29,7 @@ Item {
id: noVideo
anchors.fill: parent
color: Qt.rgba(0,0,0,0.75)
visible: !QGroundControl.videoManager.videoRunning
visible: !QGroundControl.videoManager.videoReceiver.videoRunning
QGCLabel {
text: qsTr("WAITING FOR VIDEO")
font.family: ScreenTools.demiboldFontFamily
......@@ -41,20 +41,20 @@ Item {
Rectangle {
anchors.fill: parent
color: "black"
visible: QGroundControl.videoManager.videoRunning
visible: QGroundControl.videoManager.videoReceiver.videoRunning
QGCVideoBackground {
id: videoContent
height: parent.height
width: _ar != 0.0 ? height * _ar : parent.width
anchors.centerIn: parent
display: QGroundControl.videoManager.videoSurface
receiver: QGroundControl.videoManager.videoReceiver
visible: QGroundControl.videoManager.videoRunning
display: QGroundControl.videoManager.videoReceiver.videoSurface
visible: QGroundControl.videoManager.videoReceiver.videoRunning
Connections {
target: QGroundControl.videoManager
target: QGroundControl.videoManager.videoReceiver
onImageFileChanged: {
videoContent.grabToImage(function(result) {
if (!result.saveToFile(QGroundControl.videoManager.imageFile)) {
if (!result.saveToFile(QGroundControl.videoManager.videoReceiver.imageFile)) {
console.error('Error capturing video frame');
}
});
......
......@@ -32,19 +32,17 @@ QGC_LOGGING_CATEGORY(VideoManagerLog, "VideoManagerLog")
//-----------------------------------------------------------------------------
VideoManager::VideoManager(QGCApplication* app, QGCToolbox* toolbox)
: QGCTool(app, toolbox)
, _videoSurface(NULL)
, _videoReceiver(NULL)
, _videoRunning(false)
, _init(false)
, _videoSettings(NULL)
, _showFullScreen(false)
{
}
//-----------------------------------------------------------------------------
VideoManager::~VideoManager()
{
if(_videoReceiver) {
delete _videoReceiver;
}
}
//-----------------------------------------------------------------------------
......@@ -53,14 +51,14 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
{
QGCTool::setToolbox(toolbox);
QQmlEngine::setObjectOwnership(this, QQmlEngine::CppOwnership);
qmlRegisterUncreatableType<VideoManager>("QGroundControl.VideoManager", 1, 0, "VideoManager", "Reference only");
qmlRegisterUncreatableType<VideoManager> ("QGroundControl.VideoManager", 1, 0, "VideoManager", "Reference only");
qmlRegisterUncreatableType<VideoReceiver>("QGroundControl", 1, 0, "VideoReceiver","Reference only");
qmlRegisterUncreatableType<VideoSurface> ("QGroundControl", 1, 0, "VideoSurface", "Reference only");
_videoSettings = toolbox->settingsManager()->videoSettings();
QString videoSource = _videoSettings->videoSource()->rawValue().toString();
connect(_videoSettings->videoSource(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged);
connect(_videoSettings->udpPort(), &Fact::rawValueChanged, this, &VideoManager::_udpPortChanged);
connect(_videoSettings->rtspUrl(), &Fact::rawValueChanged, this, &VideoManager::_rtspUrlChanged);
connect(_videoSettings->videoSource(), &Fact::rawValueChanged, this, &VideoManager::_videoSourceChanged);
connect(_videoSettings->udpPort(), &Fact::rawValueChanged, this, &VideoManager::_udpPortChanged);
connect(_videoSettings->rtspUrl(), &Fact::rawValueChanged, this, &VideoManager::_rtspUrlChanged);
#if defined(QGC_GST_STREAMING)
#ifndef QGC_DISABLE_UVC
......@@ -78,47 +76,38 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
emit isGStreamerChanged();
qCDebug(VideoManagerLog) << "New Video Source:" << videoSource;
if(_videoReceiver) {
if(isGStreamer()) {
_videoReceiver->start();
} else {
_videoReceiver->stop();
}
_videoReceiver = new VideoReceiver(this);
_updateSettings();
if(isGStreamer()) {
_videoReceiver->start();
} else {
_videoReceiver->stop();
}
#endif
_init = true;
#if defined(QGC_GST_STREAMING)
_updateVideo();
connect(&_frameTimer, &QTimer::timeout, this, &VideoManager::_updateTimer);
_frameTimer.start(1000);
#endif
}
void VideoManager::_videoSourceChanged(void)
//-----------------------------------------------------------------------------
void
VideoManager::_videoSourceChanged()
{
emit hasVideoChanged();
emit isGStreamerChanged();
_restartVideo();
}
void VideoManager::_udpPortChanged(void)
{
_restartVideo();
}
void VideoManager::_rtspUrlChanged(void)
//-----------------------------------------------------------------------------
void
VideoManager::_udpPortChanged()
{
_restartVideo();
}
//-----------------------------------------------------------------------------
void
VideoManager::grabImage(QString imageFile)
VideoManager::_rtspUrlChanged()
{
_imageFile = imageFile;
emit imageFileChanged();
_restartVideo();
}
//-----------------------------------------------------------------------------
......@@ -154,51 +143,11 @@ VideoManager::uvcEnabled()
#endif
//-----------------------------------------------------------------------------
void VideoManager::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
if(_videoReceiver && _videoSurface) {
if(_videoReceiver->stopping() || _videoReceiver->starting()) {
return;
}
if(_videoReceiver->streaming()) {
if(!_videoRunning) {
_videoSurface->setLastFrame(0);
_videoRunning = true;
emit videoRunningChanged();
}
} else {
if(_videoRunning) {
_videoRunning = false;
emit videoRunningChanged();
}
}
if(_videoRunning) {
time_t elapsed = 0;
time_t lastFrame = _videoSurface->lastFrame();
if(lastFrame != 0) {
elapsed = time(0) - _videoSurface->lastFrame();
}
if(elapsed > 2 && _videoSurface) {
_videoReceiver->stop();
}
} else {
if(!_videoReceiver->running()) {
_videoReceiver->start();
}
}
}
#endif
}
//-----------------------------------------------------------------------------
void VideoManager::_updateSettings()
void
VideoManager::_updateSettings()
{
if(!_videoSettings || !_videoReceiver)
return;
if (_videoSettings->videoSource()->rawValue().toString() == VideoSettings::videoSourceUDP)
_videoReceiver->setUri(QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else
......@@ -206,29 +155,12 @@ void VideoManager::_updateSettings()
}
//-----------------------------------------------------------------------------
void VideoManager::_updateVideo()
void
VideoManager::_restartVideo()
{
if(_init) {
if(_videoReceiver)
delete _videoReceiver;
if(_videoSurface)
delete _videoSurface;
_videoSurface = new VideoSurface;
_videoReceiver = new VideoReceiver(this);
#if defined(QGC_GST_STREAMING)
_videoReceiver->setVideoSink(_videoSurface->videoSink());
_updateSettings();
#endif
_videoReceiver->start();
}
}
//-----------------------------------------------------------------------------
void VideoManager::_restartVideo()
{
if(!_videoReceiver)
return;
#if defined(QGC_GST_STREAMING)
_videoReceiver->stop();
_updateSettings();
_videoReceiver->start();
......
......@@ -16,7 +16,6 @@
#include <QUrl>
#include "QGCLoggingCategory.h"
#include "VideoSurface.h"
#include "VideoReceiver.h"
#include "QGCToolbox.h"
......@@ -35,21 +34,13 @@ public:
Q_PROPERTY(bool hasVideo READ hasVideo NOTIFY hasVideoChanged)
Q_PROPERTY(bool isGStreamer READ isGStreamer NOTIFY isGStreamerChanged)
Q_PROPERTY(QString videoSourceID READ videoSourceID NOTIFY videoSourceIDChanged)
Q_PROPERTY(bool videoRunning READ videoRunning NOTIFY videoRunningChanged)
Q_PROPERTY(bool uvcEnabled READ uvcEnabled CONSTANT)
Q_PROPERTY(VideoSurface* videoSurface READ videoSurface CONSTANT)
Q_PROPERTY(VideoReceiver* videoReceiver READ videoReceiver CONSTANT)
Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged)
Q_PROPERTY(bool showFullScreen READ showFullScreen WRITE setShowFullScreen NOTIFY showFullScreenChanged)
bool hasVideo ();
bool isGStreamer ();
bool videoRunning () { return _videoRunning; }
QString videoSourceID () { return _videoSourceID; }
QString imageFile () { return _imageFile; }
bool showFullScreen () { return _showFullScreen; }
VideoSurface* videoSurface () { return _videoSurface; }
VideoReceiver* videoReceiver () { return _videoReceiver; }
#if defined(QGC_DISABLE_UVC)
......@@ -58,43 +49,26 @@ public:
bool uvcEnabled ();
#endif
void grabImage (QString imageFile);
void setShowFullScreen (bool show) { _showFullScreen = show; emit showFullScreenChanged(); }
// Override from QGCTool
void setToolbox (QGCToolbox *toolbox);
signals:
void hasVideoChanged ();
void videoRunningChanged ();
void isGStreamerChanged ();
void videoSourceIDChanged ();
void imageFileChanged ();
void showFullScreenChanged ();
void hasVideoChanged ();
void isGStreamerChanged ();
void videoSourceIDChanged ();
private slots:
void _videoSourceChanged(void);
void _udpPortChanged(void);
void _rtspUrlChanged(void);
void _videoSourceChanged ();
void _udpPortChanged ();
void _rtspUrlChanged ();
private:
void _updateTimer ();
void _updateSettings ();
void _updateVideo ();
void _restartVideo ();
void _updateSettings ();
void _restartVideo ();
VideoSurface* _videoSurface;
VideoReceiver* _videoReceiver;
bool _videoRunning;
#if defined(QGC_GST_STREAMING)
QTimer _frameTimer;
#endif
QString _videoSourceID;
bool _init;
VideoSettings* _videoSettings;
QString _imageFile;
bool _showFullScreen;
QString _videoSourceID;
};
#endif
......@@ -345,8 +345,6 @@ void QGCApplication::_initCommon(void)
qmlRegisterUncreatableType<CoordinateVector> ("QGroundControl", 1, 0, "CoordinateVector", "Reference only");
qmlRegisterUncreatableType<QmlObjectListModel> ("QGroundControl", 1, 0, "QmlObjectListModel", "Reference only");
qmlRegisterUncreatableType<VideoReceiver> ("QGroundControl", 1, 0, "VideoReceiver", "Reference only");
qmlRegisterUncreatableType<VideoSurface> ("QGroundControl", 1, 0, "VideoSurface", "Reference only");
qmlRegisterUncreatableType<MissionCommandTree> ("QGroundControl", 1, 0, "MissionCommandTree", "Reference only");
qmlRegisterUncreatableType<AutoPilotPlugin> ("QGroundControl.AutoPilotPlugin", 1, 0, "AutoPilotPlugin", "Reference only");
......
......@@ -64,13 +64,20 @@ VideoReceiver::VideoReceiver(QObject* parent)
, _socket(NULL)
, _serverPresent(false)
#endif
, _videoSurface(NULL)
, _videoRunning(false)
, _showFullScreen(false)
{
_videoSurface = new VideoSurface;
#if defined(QGC_GST_STREAMING)
_setVideoSink(_videoSurface->videoSink());
_timer.setSingleShot(true);
connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
connect(&_frameTimer, &QTimer::timeout, this, &VideoReceiver::_updateTimer);
_frameTimer.start(1000);
#endif
}
......@@ -81,11 +88,17 @@ VideoReceiver::~VideoReceiver()
if(_socket) {
delete _socket;
}
if (_videoSink) {
gst_object_unref(_videoSink);
}
#endif
if(_videoSurface)
delete _videoSurface;
}
#if defined(QGC_GST_STREAMING)
void VideoReceiver::setVideoSink(GstElement* sink)
void
VideoReceiver::_setVideoSink(GstElement* sink)
{
if (_videoSink) {
gst_object_unref(_videoSink);
......@@ -98,8 +111,18 @@ void VideoReceiver::setVideoSink(GstElement* sink)
}
#endif
//-----------------------------------------------------------------------------
void
VideoReceiver::grabImage(QString imageFile)
{
_imageFile = imageFile;
emit imageFileChanged();
}
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
static void newPadCB(GstElement* element, GstPad* pad, gpointer data)
static void
newPadCB(GstElement* element, GstPad* pad, gpointer data)
{
gchar* name;
name = gst_pad_get_name(pad);
......@@ -115,8 +138,10 @@ static void newPadCB(GstElement* element, GstPad* pad, gpointer data)
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_connected()
void
VideoReceiver::_connected()
{
//-- Server showed up. Now we start the stream.
_timer.stop();
......@@ -127,8 +152,10 @@ void VideoReceiver::_connected()
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
void
VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
{
Q_UNUSED(socketError);
_socket->deleteLater();
......@@ -138,8 +165,10 @@ void VideoReceiver::_socketError(QAbstractSocket::SocketError socketError)
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_timeout()
void
VideoReceiver::_timeout()
{
//-- If socket is live, we got no connection nor a socket error
if(_socket) {
......@@ -160,6 +189,7 @@ void VideoReceiver::_timeout()
}
#endif
//-----------------------------------------------------------------------------
// When we finish our pipeline will look like this:
//
// +-->queue-->decoder-->_videosink
......@@ -169,7 +199,8 @@ void VideoReceiver::_timeout()
// ^
// |
// +-Here we will later link elements for recording
void VideoReceiver::start()
void
VideoReceiver::start()
{
#if defined(QGC_GST_STREAMING)
qCDebug(VideoReceiverLog) << "start()";
......@@ -205,7 +236,7 @@ void VideoReceiver::start()
GstElement* parser = NULL;
GstElement* queue = NULL;
GstElement* decoder = NULL;
GstElement* queue1 = NULL;
GstElement* queue1 = NULL;
do {
if ((_pipeline = gst_pipeline_new("receiver")) == NULL) {
......@@ -350,7 +381,9 @@ void VideoReceiver::start()
#endif
}
void VideoReceiver::stop()
//-----------------------------------------------------------------------------
void
VideoReceiver::stop()
{
#if defined(QGC_GST_STREAMING)
qCDebug(VideoReceiverLog) << "stop()";
......@@ -374,13 +407,17 @@ void VideoReceiver::stop()
#endif
}
void VideoReceiver::setUri(const QString & uri)
//-----------------------------------------------------------------------------
void
VideoReceiver::setUri(const QString & uri)
{
_uri = uri;
}
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_shutdownPipeline() {
void
VideoReceiver::_shutdownPipeline() {
if(!_pipeline) {
qCDebug(VideoReceiverLog) << "No pipeline";
return;
......@@ -406,15 +443,19 @@ void VideoReceiver::_shutdownPipeline() {
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleError() {
void
VideoReceiver::_handleError() {
qCDebug(VideoReceiverLog) << "Gstreamer error!";
_shutdownPipeline();
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleEOS() {
void
VideoReceiver::_handleEOS() {
if(_stopping) {
_shutdownPipeline();
qCDebug(VideoReceiverLog) << "Stopped";
......@@ -427,15 +468,21 @@ void VideoReceiver::_handleEOS() {
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleStateChanged() {
_streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
void
VideoReceiver::_handleStateChanged() {
if(_pipeline) {
_streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
}
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
gboolean VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
{
Q_UNUSED(bus)
Q_ASSERT(msg != NULL && data != NULL);
......@@ -466,8 +513,10 @@ gboolean VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer dat
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_cleanupOldVideos()
void
VideoReceiver::_cleanupOldVideos()
{
QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
QDir videoDir = QDir(savePath);
......@@ -501,6 +550,7 @@ void VideoReceiver::_cleanupOldVideos()
}
#endif
//-----------------------------------------------------------------------------
// When we finish our pipeline will look like this:
//
// +-->queue-->decoder-->_videosink
......@@ -512,7 +562,8 @@ void VideoReceiver::_cleanupOldVideos()
// we are adding these elements-> +->teepad-->queue-->matroskamux-->_filesink |
// | |
// +--------------------------------------+
void VideoReceiver::startRecording(void)
void
VideoReceiver::startRecording(void)
{
#if defined(QGC_GST_STREAMING)
......@@ -580,7 +631,9 @@ void VideoReceiver::startRecording(void)
#endif
}
void VideoReceiver::stopRecording(void)
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
{
#if defined(QGC_GST_STREAMING)
qCDebug(VideoReceiverLog) << "stopRecording()";
......@@ -594,13 +647,15 @@ void VideoReceiver::stopRecording(void)
#endif
}
//-----------------------------------------------------------------------------
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_shutdownRecordingBranch()
void
VideoReceiver::_shutdownRecordingBranch()
{
gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->parse);
......@@ -630,12 +685,14 @@ void VideoReceiver::_shutdownRecordingBranch()
}
#endif
//-----------------------------------------------------------------------------
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
void
VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
{
Q_UNUSED(info)
......@@ -671,16 +728,59 @@ void VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
}
#endif
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
GstPadProbeReturn
VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
Q_UNUSED(pad);
Q_ASSERT(info != NULL && user_data != NULL);
VideoReceiver* pThis = (VideoReceiver*)user_data;
// We will only act once
if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE))
pThis->_detachRecordingBranch(info);
if(info != NULL && user_data != NULL) {
VideoReceiver* pThis = (VideoReceiver*)user_data;
// We will only act once
if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE)) {
pThis->_detachRecordingBranch(info);
}
}
return GST_PAD_PROBE_REMOVE;
}
#endif
//-----------------------------------------------------------------------------
void
VideoReceiver::_updateTimer()
{
#if defined(QGC_GST_STREAMING)
if(_videoSurface) {
if(stopping() || starting()) {
return;
}
if(streaming()) {
if(!_videoRunning) {
_videoSurface->setLastFrame(0);
_videoRunning = true;
emit videoRunningChanged();
}
} else {
if(_videoRunning) {
_videoRunning = false;
emit videoRunningChanged();
}
}
if(_videoRunning) {
time_t elapsed = 0;
time_t lastFrame = _videoSurface->lastFrame();
if(lastFrame != 0) {
elapsed = time(0) - _videoSurface->lastFrame();
}
if(elapsed > 2 && _videoSurface) {
stop();
}
} else {
if(!running()) {
start();
}
}
}
#endif
}
......@@ -22,6 +22,8 @@
#include <QTimer>
#include <QTcpSocket>
#include "VideoSurface.h"
#if defined(QGC_GST_STREAMING)
#include <gst/gst.h>
#endif
......@@ -33,51 +35,64 @@ class VideoReceiver : public QObject
Q_OBJECT
public:
#if defined(QGC_GST_STREAMING)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
#endif
Q_PROPERTY(VideoSurface* videoSurface READ videoSurface CONSTANT)
Q_PROPERTY(bool videoRunning READ videoRunning NOTIFY videoRunningChanged)
Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged)
Q_PROPERTY(bool showFullScreen READ showFullScreen WRITE setShowFullScreen NOTIFY showFullScreenChanged)
explicit VideoReceiver(QObject* parent = 0);
~VideoReceiver();
#if defined(QGC_GST_STREAMING)
void setVideoSink(GstElement* sink);
bool running() { return _running; }
bool recording() { return _recording; }
bool streaming() { return _streaming; }
bool starting() { return _starting; }
bool stopping() { return _stopping; }
bool running () { return _running; }
bool recording () { return _recording; }
bool streaming () { return _streaming; }
bool starting () { return _starting; }
bool stopping () { return _stopping; }
#endif
VideoSurface* videoSurface () { return _videoSurface; }
bool videoRunning () { return _videoRunning; }
QString imageFile () { return _imageFile; }
bool showFullScreen () { return _showFullScreen; }
void grabImage (QString imageFile);
void setShowFullScreen (bool show) { _showFullScreen = show; emit showFullScreenChanged(); }
signals:
void videoRunningChanged ();
void imageFileChanged ();
void showFullScreenChanged ();
#if defined(QGC_GST_STREAMING)
void recordingChanged();
void msgErrorReceived();
void msgEOSReceived();
void msgStateChangedReceived();
void recordingChanged ();
void msgErrorReceived ();
void msgEOSReceived ();
void msgStateChangedReceived ();
#endif
public slots:
void start ();
void stop ();
void setUri (const QString& uri);
void stopRecording ();
void startRecording ();
void start ();
void stop ();
void setUri (const QString& uri);
void stopRecording ();
void startRecording ();
private slots:
void _updateTimer ();
#if defined(QGC_GST_STREAMING)
void _timeout ();
void _connected ();
void _socketError (QAbstractSocket::SocketError socketError);
void _handleError();
void _handleEOS();
void _handleStateChanged();
void _timeout ();
void _connected ();
void _socketError (QAbstractSocket::SocketError socketError);
void _handleError ();
void _handleEOS ();
void _handleStateChanged ();
#endif
private:
#if defined(QGC_GST_STREAMING)
typedef struct
{
GstPad* teepad;
......@@ -96,29 +111,32 @@ private:
Sink* _sink;
GstElement* _tee;
static gboolean _onBusMessage(GstBus* bus, GstMessage* message, gpointer user_data);
static GstPadProbeReturn _unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
void _detachRecordingBranch(GstPadProbeInfo* info);
static gboolean _onBusMessage (GstBus* bus, GstMessage* message, gpointer user_data);
static GstPadProbeReturn _unlinkCallBack (GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
void _detachRecordingBranch (GstPadProbeInfo* info);
void _shutdownRecordingBranch();
void _shutdownPipeline();
void _cleanupOldVideos();
void _shutdownPipeline ();
void _cleanupOldVideos ();
void _setVideoSink (GstElement* sink);
#endif
GstElement* _pipeline;
GstElement* _pipelineStopRec;
GstElement* _videoSink;
QString _uri;
//-- Wait for Video Server to show up before starting
QTimer _frameTimer;
QTimer _timer;
QTcpSocket* _socket;
bool _serverPresent;
#if defined(QGC_GST_STREAMING)
GstElement* _pipeline;
GstElement* _pipelineStopRec;
GstElement* _videoSink;
#endif
//-- Wait for Video Server to show up before starting
#if defined(QGC_GST_STREAMING)
QTimer _timer;
QTcpSocket* _socket;
bool _serverPresent;
#endif
QString _uri;
QString _imageFile;
VideoSurface* _videoSurface;
bool _videoRunning;
bool _showFullScreen;
};
#endif // VIDEORECEIVER_H
......@@ -345,6 +345,13 @@ Item {
id: flightView
anchors.fill: parent
visible: true
//-------------------------------------------------------------------------
//-- Loader helper for any child, no matter how deep can display an element
// on top of the video window.
Loader {
id: rootVideoLoader
anchors.centerIn: parent
}
}
Loader {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment