Commit 6895a5f6 authored by Don Gagne's avatar Don Gagne Committed by GitHub

Merge pull request #4438 from bluerobotics/video-recording

Video recording
parents ef3d5401 c333553d
...@@ -156,6 +156,7 @@ ...@@ -156,6 +156,7 @@
<file alias="Signal100.svg">src/ui/toolbar/Images/Signal100.svg</file> <file alias="Signal100.svg">src/ui/toolbar/Images/Signal100.svg</file>
<file alias="TelemRSSI.svg">src/ui/toolbar/Images/TelemRSSI.svg</file> <file alias="TelemRSSI.svg">src/ui/toolbar/Images/TelemRSSI.svg</file>
<file alias="Yield.svg">src/ui/toolbar/Images/Yield.svg</file> <file alias="Yield.svg">src/ui/toolbar/Images/Yield.svg</file>
<file alias="CameraIcon.svg">src/ui/toolbar/Images/CameraIcon.svg</file>
<file alias="CogWheel.svg">src/MissionManager/CogWheel.svg</file> <file alias="CogWheel.svg">src/MissionManager/CogWheel.svg</file>
<file alias="StationMode.svg">src/AutoPilotPlugins/Common/Images/StationMode.svg</file> <file alias="StationMode.svg">src/AutoPilotPlugins/Common/Images/StationMode.svg</file>
<file alias="APMode.svg">src/AutoPilotPlugins/Common/Images/APMode.svg</file> <file alias="APMode.svg">src/AutoPilotPlugins/Common/Images/APMode.svg</file>
......
...@@ -260,6 +260,42 @@ QGCView { ...@@ -260,6 +260,42 @@ QGCView {
visible: singleVehicleView.checked visible: singleVehicleView.checked
} }
// Button to start/stop video recording
Item {
z: _flightVideoPipControl.z + 1
anchors.margins: ScreenTools.defaultFontPixelHeight / 2
anchors.bottom: _flightVideo.bottom
anchors.right: _flightVideo.right
height: ScreenTools.defaultFontPixelHeight * 2
width: height
visible: QGroundControl.videoManager.videoRunning
opacity: 0.75
Rectangle {
anchors.top: parent.top
anchors.bottom: parent.bottom
width: height
radius: QGroundControl.videoManager.videoReceiver && QGroundControl.videoManager.videoReceiver.recording ? 0 : height
color: "red"
}
QGCColoredImage {
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.horizontalCenter: parent.horizontalCenter
width: height * 0.625
sourceSize.width: width
source: "/qmlimages/CameraIcon.svg"
fillMode: Image.PreserveAspectFit
color: "white"
}
MouseArea {
anchors.fill: parent
onClicked: QGroundControl.videoManager.videoReceiver && QGroundControl.videoManager.videoReceiver.recording ? QGroundControl.videoManager.videoReceiver.stopRecording() : QGroundControl.videoManager.videoReceiver.startRecording()
}
}
MultiVehicleList { MultiVehicleList {
anchors.margins: _margins anchors.margins: _margins
anchors.top: singleMultiSelector.bottom anchors.top: singleMultiSelector.bottom
......
...@@ -11,6 +11,8 @@ ...@@ -11,6 +11,8 @@
#include <QQmlContext> #include <QQmlContext>
#include <QQmlEngine> #include <QQmlEngine>
#include <QSettings> #include <QSettings>
#include <QUrl>
#include <QDir>
#ifndef QGC_DISABLE_UVC #ifndef QGC_DISABLE_UVC
#include <QCameraInfo> #include <QCameraInfo>
...@@ -27,6 +29,7 @@ ...@@ -27,6 +29,7 @@
static const char* kVideoSourceKey = "VideoSource"; static const char* kVideoSourceKey = "VideoSource";
static const char* kVideoUDPPortKey = "VideoUDPPort"; static const char* kVideoUDPPortKey = "VideoUDPPort";
static const char* kVideoRTSPUrlKey = "VideoRTSPUrl"; static const char* kVideoRTSPUrlKey = "VideoRTSPUrl";
static const char* kVideoSavePathKey = "VideoSavePath";
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
static const char* kUDPStream = "UDP Video Stream"; static const char* kUDPStream = "UDP Video Stream";
static const char* kRTSPStream = "RTSP Video Stream"; static const char* kRTSPStream = "RTSP Video Stream";
...@@ -80,6 +83,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox) ...@@ -80,6 +83,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
setUdpPort(settings.value(kVideoUDPPortKey, 5600).toUInt()); setUdpPort(settings.value(kVideoUDPPortKey, 5600).toUInt());
setRtspURL(settings.value(kVideoRTSPUrlKey, "rtsp://192.168.42.1:554/live").toString()); //-- Example RTSP URL setRtspURL(settings.value(kVideoRTSPUrlKey, "rtsp://192.168.42.1:554/live").toString()); //-- Example RTSP URL
} }
setVideoSavePath(settings.value(kVideoSavePathKey, QDir::homePath()).toString());
#endif #endif
_init = true; _init = true;
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
...@@ -186,6 +190,22 @@ VideoManager::setRtspURL(QString url) ...@@ -186,6 +190,22 @@ VideoManager::setRtspURL(QString url)
*/ */
} }
void
VideoManager::setVideoSavePathByUrl(QUrl url) {
setVideoSavePath(url.toLocalFile());
}
void
VideoManager::setVideoSavePath(QString path)
{
_videoSavePath = path;
QSettings settings;
settings.setValue(kVideoSavePathKey, path);
if(_videoReceiver)
_videoReceiver->setVideoSavePath(_videoSavePath);
emit videoSavePathChanged();
}
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
QStringList QStringList
VideoManager::videoSourceList() VideoManager::videoSourceList()
...@@ -211,36 +231,36 @@ VideoManager::videoSourceList() ...@@ -211,36 +231,36 @@ VideoManager::videoSourceList()
void VideoManager::_updateTimer() void VideoManager::_updateTimer()
{ {
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
if(_videoRunning) if(_videoReceiver && _videoSurface) {
{ if(_videoReceiver->stopping() || _videoReceiver->starting()) {
time_t elapsed = 0; return;
if(_videoSurface)
{
elapsed = time(0) - _videoSurface->lastFrame();
} }
if(elapsed > 2 && _videoSurface)
{ if(_videoReceiver->streaming()) {
_videoRunning = false; if(!_videoRunning) {
_videoSurface->setLastFrame(0); _videoSurface->setLastFrame(0);
_videoRunning = true;
emit videoRunningChanged();
}
} else {
if(_videoRunning) {
_videoRunning = false;
emit videoRunningChanged(); emit videoRunningChanged();
if(_videoReceiver) {
if(isGStreamer()) {
//-- Stop it
_videoReceiver->stop();
QThread::msleep(100);
//-- And start over
_videoReceiver->start();
} }
} }
if(_videoRunning) {
time_t elapsed = 0;
time_t lastFrame = _videoSurface->lastFrame();
if(lastFrame != 0) {
elapsed = time(0) - _videoSurface->lastFrame();
} }
if(elapsed > 2 && _videoSurface) {
_videoReceiver->stop();
} }
else } else {
{ if(!_videoReceiver->running()) {
if(_videoSurface && _videoSurface->lastFrame()) { _videoReceiver->start();
if(!_videoRunning)
{
_videoRunning = true;
emit videoRunningChanged();
} }
} }
} }
...@@ -263,6 +283,7 @@ void VideoManager::_updateVideo() ...@@ -263,6 +283,7 @@ void VideoManager::_updateVideo()
_videoReceiver->setUri(QStringLiteral("udp://0.0.0.0:%1").arg(_udpPort)); _videoReceiver->setUri(QStringLiteral("udp://0.0.0.0:%1").arg(_udpPort));
else else
_videoReceiver->setUri(_rtspURL); _videoReceiver->setUri(_rtspURL);
_videoReceiver->setVideoSavePath(_videoSavePath);
#endif #endif
_videoReceiver->start(); _videoReceiver->start();
} }
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
#include <QObject> #include <QObject>
#include <QTimer> #include <QTimer>
#include <QUrl>
#include "QGCLoggingCategory.h" #include "QGCLoggingCategory.h"
#include "VideoSurface.h" #include "VideoSurface.h"
...@@ -37,10 +38,13 @@ public: ...@@ -37,10 +38,13 @@ public:
Q_PROPERTY(bool videoRunning READ videoRunning NOTIFY videoRunningChanged) Q_PROPERTY(bool videoRunning READ videoRunning NOTIFY videoRunningChanged)
Q_PROPERTY(quint16 udpPort READ udpPort WRITE setUdpPort NOTIFY udpPortChanged) Q_PROPERTY(quint16 udpPort READ udpPort WRITE setUdpPort NOTIFY udpPortChanged)
Q_PROPERTY(QString rtspURL READ rtspURL WRITE setRtspURL NOTIFY rtspURLChanged) Q_PROPERTY(QString rtspURL READ rtspURL WRITE setRtspURL NOTIFY rtspURLChanged)
Q_PROPERTY(QString videoSavePath READ videoSavePath NOTIFY videoSavePathChanged)
Q_PROPERTY(bool uvcEnabled READ uvcEnabled CONSTANT) Q_PROPERTY(bool uvcEnabled READ uvcEnabled CONSTANT)
Q_PROPERTY(VideoSurface* videoSurface MEMBER _videoSurface CONSTANT) Q_PROPERTY(VideoSurface* videoSurface MEMBER _videoSurface CONSTANT)
Q_PROPERTY(VideoReceiver* videoReceiver MEMBER _videoReceiver CONSTANT) Q_PROPERTY(VideoReceiver* videoReceiver MEMBER _videoReceiver CONSTANT)
Q_INVOKABLE void setVideoSavePathByUrl (QUrl url);
bool hasVideo (); bool hasVideo ();
bool isGStreamer (); bool isGStreamer ();
bool videoRunning () { return _videoRunning; } bool videoRunning () { return _videoRunning; }
...@@ -49,6 +53,7 @@ public: ...@@ -49,6 +53,7 @@ public:
QStringList videoSourceList (); QStringList videoSourceList ();
quint16 udpPort () { return _udpPort; } quint16 udpPort () { return _udpPort; }
QString rtspURL () { return _rtspURL; } QString rtspURL () { return _rtspURL; }
QString videoSavePath () { return _videoSavePath; }
#if defined(QGC_DISABLE_UVC) #if defined(QGC_DISABLE_UVC)
bool uvcEnabled () { return false; } bool uvcEnabled () { return false; }
...@@ -59,6 +64,7 @@ public: ...@@ -59,6 +64,7 @@ public:
void setVideoSource (QString vSource); void setVideoSource (QString vSource);
void setUdpPort (quint16 port); void setUdpPort (quint16 port);
void setRtspURL (QString url); void setRtspURL (QString url);
void setVideoSavePath (QString path);
// Override from QGCTool // Override from QGCTool
void setToolbox (QGCToolbox *toolbox); void setToolbox (QGCToolbox *toolbox);
...@@ -72,6 +78,7 @@ signals: ...@@ -72,6 +78,7 @@ signals:
void videoSourceIDChanged (); void videoSourceIDChanged ();
void udpPortChanged (); void udpPortChanged ();
void rtspURLChanged (); void rtspURLChanged ();
void videoSavePathChanged ();
private: private:
void _updateTimer (); void _updateTimer ();
...@@ -89,6 +96,7 @@ private: ...@@ -89,6 +96,7 @@ private:
QStringList _videoSourceList; QStringList _videoSourceList;
quint16 _udpPort; quint16 _udpPort;
QString _rtspURL; QString _rtspURL;
QString _videoSavePath;
bool _init; bool _init;
}; };
......
...@@ -17,11 +17,24 @@ ...@@ -17,11 +17,24 @@
#include "VideoReceiver.h" #include "VideoReceiver.h"
#include <QDebug> #include <QDebug>
#include <QUrl> #include <QUrl>
#include <QDir>
#include <QDateTime>
#include <QSysInfo>
QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")
VideoReceiver::VideoReceiver(QObject* parent) VideoReceiver::VideoReceiver(QObject* parent)
: QObject(parent) : QObject(parent)
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
, _running(false)
, _recording(false)
, _streaming(false)
, _starting(false)
, _stopping(false)
, _sink(NULL)
, _tee(NULL)
, _pipeline(NULL) , _pipeline(NULL)
, _pipelineStopRec(NULL)
, _videoSink(NULL) , _videoSink(NULL)
, _socket(NULL) , _socket(NULL)
, _serverPresent(false) , _serverPresent(false)
...@@ -30,6 +43,9 @@ VideoReceiver::VideoReceiver(QObject* parent) ...@@ -30,6 +43,9 @@ VideoReceiver::VideoReceiver(QObject* parent)
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
_timer.setSingleShot(true); _timer.setSingleShot(true);
connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout); connect(&_timer, &QTimer::timeout, this, &VideoReceiver::_timeout);
connect(this, &VideoReceiver::msgErrorReceived, this, &VideoReceiver::_handleError);
connect(this, &VideoReceiver::msgEOSReceived, this, &VideoReceiver::_handleEOS);
connect(this, &VideoReceiver::msgStateChangedReceived, this, &VideoReceiver::_handleStateChanged);
#endif #endif
} }
...@@ -37,7 +53,6 @@ VideoReceiver::~VideoReceiver() ...@@ -37,7 +53,6 @@ VideoReceiver::~VideoReceiver()
{ {
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
stop(); stop();
setVideoSink(NULL);
if(_socket) { if(_socket) {
delete _socket; delete _socket;
} }
...@@ -59,16 +74,16 @@ void VideoReceiver::setVideoSink(GstElement* sink) ...@@ -59,16 +74,16 @@ void VideoReceiver::setVideoSink(GstElement* sink)
#endif #endif
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
static void newPadCB(GstElement * element, GstPad* pad, gpointer data) static void newPadCB(GstElement* element, GstPad* pad, gpointer data)
{ {
gchar *name; gchar* name;
name = gst_pad_get_name(pad); name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name); g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad); GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps); gchar* description = gst_caps_to_string(p_caps);
qDebug() << p_caps << ", " << description; qCDebug(VideoReceiverLog) << p_caps << ", " << description;
g_free(description); g_free(description);
GstElement * p_rtph264depay = GST_ELEMENT(data); GstElement* p_rtph264depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, p_rtph264depay, "sink") == false) if(gst_element_link_pads(element, name, p_rtph264depay, "sink") == false)
qCritical() << "newPadCB : failed to link elements\n"; qCritical() << "newPadCB : failed to link elements\n";
g_free(name); g_free(name);
...@@ -114,15 +129,26 @@ void VideoReceiver::_timeout() ...@@ -114,15 +129,26 @@ void VideoReceiver::_timeout()
_socket = new QTcpSocket; _socket = new QTcpSocket;
connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError); connect(_socket, static_cast<void (QTcpSocket::*)(QAbstractSocket::SocketError)>(&QTcpSocket::error), this, &VideoReceiver::_socketError);
connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected); connect(_socket, &QTcpSocket::connected, this, &VideoReceiver::_connected);
//qDebug() << "Trying to connect to:" << url.host() << url.port(); //qCDebug(VideoReceiverLog) << "Trying to connect to:" << url.host() << url.port();
_socket->connectToHost(url.host(), url.port()); _socket->connectToHost(url.host(), url.port());
_timer.start(5000); _timer.start(5000);
} }
#endif #endif
// When we finish our pipeline will look like this:
//
// +-->queue-->decoder-->_videosink
// |
// datasource-->demux-->parser-->tee
//
// ^
// |
// +-Here we will later link elements for recording
void VideoReceiver::start() void VideoReceiver::start()
{ {
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
qCDebug(VideoReceiverLog) << "start()";
if (_uri.isEmpty()) { if (_uri.isEmpty()) {
qCritical() << "VideoReceiver::start() failed because URI is not specified"; qCritical() << "VideoReceiver::start() failed because URI is not specified";
return; return;
...@@ -131,10 +157,14 @@ void VideoReceiver::start() ...@@ -131,10 +157,14 @@ void VideoReceiver::start()
qCritical() << "VideoReceiver::start() failed because video sink is not set"; qCritical() << "VideoReceiver::start() failed because video sink is not set";
return; return;
} }
if(_running) {
qCDebug(VideoReceiverLog) << "Already running!";
return;
}
bool isUdp = _uri.contains("udp://"); _starting = true;
stop(); bool isUdp = _uri.contains("udp://");
//-- For RTSP, check to see if server is there first //-- For RTSP, check to see if server is there first
if(!_serverPresent && !isUdp) { if(!_serverPresent && !isUdp) {
...@@ -148,6 +178,7 @@ void VideoReceiver::start() ...@@ -148,6 +178,7 @@ void VideoReceiver::start()
GstCaps* caps = NULL; GstCaps* caps = NULL;
GstElement* demux = NULL; GstElement* demux = NULL;
GstElement* parser = NULL; GstElement* parser = NULL;
GstElement* queue = NULL;
GstElement* decoder = NULL; GstElement* decoder = NULL;
do { do {
...@@ -196,27 +227,38 @@ void VideoReceiver::start() ...@@ -196,27 +227,38 @@ void VideoReceiver::start()
break; break;
} }
gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, decoder, _videoSink, NULL); if((_tee = gst_element_factory_make("tee", NULL)) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('tee')";
break;
}
if((queue = gst_element_factory_make("queue", NULL)) == NULL) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_factory_make('queue')";
break;
}
gboolean res = FALSE; gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue, decoder, _videoSink, NULL);
if(isUdp) { if(isUdp) {
res = gst_element_link_many(dataSource, demux, parser, decoder, _videoSink, NULL); // Link the pipeline in front of the tee
} else { if(!gst_element_link_many(dataSource, demux, parser, _tee, queue, decoder, _videoSink, NULL)) {
res = gst_element_link_many(demux, parser, decoder, _videoSink, NULL); qCritical() << "Unable to link elements.";
break;
} }
} else {
if (!res) { if(!gst_element_link_many(demux, parser, _tee, queue, decoder, _videoSink, NULL)) {
qCritical() << "VideoReceiver::start() failed. Error with gst_element_link_many()"; qCritical() << "Unable to link elements.";
break; break;
} }
}
dataSource = demux = parser = decoder = NULL; dataSource = demux = parser = queue = decoder = NULL;
GstBus* bus = NULL; GstBus* bus = NULL;
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) { if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
gst_bus_add_watch(bus, _onBusMessage, this); gst_bus_enable_sync_message_emission(bus);
g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
gst_object_unref(bus); gst_object_unref(bus);
bus = NULL; bus = NULL;
} }
...@@ -253,63 +295,313 @@ void VideoReceiver::start() ...@@ -253,63 +295,313 @@ void VideoReceiver::start()
dataSource = NULL; dataSource = NULL;
} }
if (_tee != NULL) {
gst_object_unref(_tee);
dataSource = NULL;
}
if (queue != NULL) {
gst_object_unref(queue);
dataSource = NULL;
}
if (_pipeline != NULL) { if (_pipeline != NULL) {
gst_object_unref(_pipeline); gst_object_unref(_pipeline);
_pipeline = NULL; _pipeline = NULL;
} }
_running = false;
} else {
_running = true;
qCDebug(VideoReceiverLog) << "Running";
} }
_starting = false;
#endif #endif
} }
void VideoReceiver::stop() void VideoReceiver::stop()
{ {
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
if (_pipeline != NULL) { qCDebug(VideoReceiverLog) << "stop()";
gst_element_set_state(_pipeline, GST_STATE_NULL); if (_pipeline != NULL && !_stopping) {
gst_object_unref(_pipeline); qCDebug(VideoReceiverLog) << "Stopping _pipeline";
_pipeline = NULL; gst_element_send_event(_pipeline, gst_event_new_eos());
_serverPresent = false; _stopping = true;
GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR));
gst_object_unref(bus);
if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
_shutdownPipeline();
qCritical() << "Error stopping pipeline!";
} else if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_EOS) {
_handleEOS();
}
gst_message_unref(message);
} }
#endif #endif
} }
void VideoReceiver::setUri(const QString & uri) void VideoReceiver::setUri(const QString & uri)
{ {
stop();
_uri = uri; _uri = uri;
} }
void VideoReceiver::setVideoSavePath(const QString & path)
{
_path = path;
qCDebug(VideoReceiverLog) << "New Path:" << _path;
}
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_shutdownPipeline() {
if(!_pipeline) {
qCDebug(VideoReceiverLog) << "No pipeline";
return;
}
GstBus* bus = NULL;
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != NULL) {
gst_bus_disable_sync_message_emission(bus);
gst_object_unref(bus);
bus = NULL;
}
gst_element_set_state(_pipeline, GST_STATE_NULL);
gst_bin_remove(GST_BIN(_pipeline), _videoSink);
gst_object_unref(_pipeline);
_pipeline = NULL;
delete _sink;
_sink = NULL;
_serverPresent = false;
_streaming = false;
_recording = false;
_stopping = false;
_running = false;
emit recordingChanged();
}
#endif
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleError() {
qCDebug(VideoReceiverLog) << "Gstreamer error!";
_shutdownPipeline();
}
#endif
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_handleEOS() {
if(_stopping) {
_shutdownPipeline();
qCDebug(VideoReceiverLog) << "Stopped";
} else if(_recording && _sink->removing) {
_shutdownRecordingBranch();
} else {
qCritical() << "VideoReceiver: Unexpected EOS!";
_shutdownPipeline();
}
}
#endif
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
void VideoReceiver::_onBusMessage(GstMessage* msg) void VideoReceiver::_handleStateChanged() {
_streaming = GST_STATE(_pipeline) == GST_STATE_PLAYING;
qCDebug(VideoReceiverLog) << "State changed, _streaming:" << _streaming;
}
#endif
#if defined(QGC_GST_STREAMING)
gboolean VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
{ {
switch (GST_MESSAGE_TYPE(msg)) { Q_UNUSED(bus)
case GST_MESSAGE_EOS: Q_ASSERT(msg != NULL && data != NULL);
stop(); VideoReceiver* pThis = (VideoReceiver*)data;
break;
case GST_MESSAGE_ERROR: switch(GST_MESSAGE_TYPE(msg)) {
do { case(GST_MESSAGE_ERROR): {
gchar* debug; gchar* debug;
GError* error; GError* error;
gst_message_parse_error(msg, &error, &debug); gst_message_parse_error(msg, &error, &debug);
g_free(debug); g_free(debug);
qCritical() << error->message; qCritical() << error->message;
g_error_free(error); g_error_free(error);
} while(0); pThis->msgErrorReceived();
stop(); }
break;
case(GST_MESSAGE_EOS):
pThis->msgEOSReceived();
break;
case(GST_MESSAGE_STATE_CHANGED):
pThis->msgStateChangedReceived();
break; break;
default: default:
break; break;
} }
return TRUE;
} }
#endif #endif
// When we finish our pipeline will look like this:
//
// +-->queue-->decoder-->_videosink
// |
// datasource-->demux-->parser-->tee
// |
// | +--------------_sink-------------------+
// | | |
// we are adding these elements-> +->teepad-->queue-->matroskamux-->_filesink |
// | |
// +--------------------------------------+
void VideoReceiver::startRecording(void)
{
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
gboolean VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data) qCDebug(VideoReceiverLog) << "startRecording()";
// exit immediately if we are already recording
if(_pipeline == NULL || _recording) {
qCDebug(VideoReceiverLog) << "Already recording!";
return;
}
if(_path.isEmpty()) {
qWarning() << "VideoReceiver::startRecording Empty Path!";
return;
}
_sink = new Sink();
_sink->teepad = gst_element_get_request_pad(_tee, "src_%u");
_sink->queue = gst_element_factory_make("queue", NULL);
_sink->mux = gst_element_factory_make("matroskamux", NULL);
_sink->filesink = gst_element_factory_make("filesink", NULL);
_sink->removing = false;
if(!_sink->teepad || !_sink->queue || !_sink->mux || !_sink->filesink) {
qCritical() << "VideoReceiver::startRecording() failed to make _sink elements";
return;
}
QString videoFile;
videoFile = _path + "/QGC-" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") + ".mkv";
g_object_set(G_OBJECT(_sink->filesink), "location", qPrintable(videoFile), NULL);
qCDebug(VideoReceiverLog) << "New video file:" << videoFile;
gst_object_ref(_sink->queue);
gst_object_ref(_sink->mux);
gst_object_ref(_sink->filesink);
gst_bin_add_many(GST_BIN(_pipeline), _sink->queue, _sink->mux, _sink->filesink, NULL);
gst_element_link_many(_sink->queue, _sink->mux, _sink->filesink, NULL);
gst_element_sync_state_with_parent(_sink->queue);
gst_element_sync_state_with_parent(_sink->mux);
gst_element_sync_state_with_parent(_sink->filesink);
GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
gst_pad_link(_sink->teepad, sinkpad);
gst_object_unref(sinkpad);
_recording = true;
emit recordingChanged();
qCDebug(VideoReceiverLog) << "Recording started";
#endif
}
void VideoReceiver::stopRecording(void)
{ {
Q_UNUSED(bus) #if defined(QGC_GST_STREAMING)
Q_ASSERT(msg != NULL && data != NULL); qCDebug(VideoReceiverLog) << "stopRecording()";
VideoReceiver* pThis = (VideoReceiver*)data; // exit immediately if we are not recording
pThis->_onBusMessage(msg); if(_pipeline == NULL || !_recording) {
return TRUE; qCDebug(VideoReceiverLog) << "Not recording!";
return;
}
// Wait for data block before unlinking
gst_pad_add_probe(_sink->teepad, GST_PAD_PROBE_TYPE_IDLE, _unlinkCallBack, this, NULL);
#endif
}
// This is only installed on the transient _pipelineStopRec in order
// to finalize a video file. It is not used for the main _pipeline.
// -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_shutdownRecordingBranch()
{
gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->queue);
gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->mux);
gst_bin_remove(GST_BIN(_pipelineStopRec), _sink->filesink);
gst_element_set_state(_pipelineStopRec, GST_STATE_NULL);
gst_object_unref(_pipelineStopRec);
_pipelineStopRec = NULL;
gst_element_set_state(_sink->filesink, GST_STATE_NULL);
gst_element_set_state(_sink->mux, GST_STATE_NULL);
gst_element_set_state(_sink->queue, GST_STATE_NULL);
gst_object_unref(_sink->queue);
gst_object_unref(_sink->mux);
gst_object_unref(_sink->filesink);
delete _sink;
_sink = NULL;
_recording = false;
emit recordingChanged();
qCDebug(VideoReceiverLog) << "Recording Stopped";
}
#endif
// -Unlink the recording branch from the tee in the main _pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline
#if defined(QGC_GST_STREAMING)
void VideoReceiver::_detachRecordingBranch(GstPadProbeInfo* info)
{
Q_UNUSED(info)
// Also unlinks and unrefs
gst_bin_remove_many(GST_BIN(_pipeline), _sink->queue, _sink->mux, _sink->filesink, NULL);
// Give tee its pad back
gst_element_release_request_pad(_tee, _sink->teepad);
gst_object_unref(_sink->teepad);
// Create temporary pipeline
_pipelineStopRec = gst_pipeline_new("pipeStopRec");
// Put our elements from the recording branch into the temporary pipeline
gst_bin_add_many(GST_BIN(_pipelineStopRec), _sink->queue, _sink->mux, _sink->filesink, NULL);
gst_element_link_many(_sink->queue, _sink->mux, _sink->filesink, NULL);
// Add handler for EOS event
GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipelineStopRec));
gst_bus_enable_sync_message_emission(bus);
g_signal_connect(bus, "sync-message", G_CALLBACK(_onBusMessage), this);
gst_object_unref(bus);
if(gst_element_set_state(_pipelineStopRec, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
qCDebug(VideoReceiverLog) << "problem starting _pipelineStopRec";
}
// Send EOS at the beginning of the pipeline
GstPad* sinkpad = gst_element_get_static_pad(_sink->queue, "sink");
gst_pad_send_event(sinkpad, gst_event_new_eos());
gst_object_unref(sinkpad);
qCDebug(VideoReceiverLog) << "Recording branch unlinked";
}
#endif
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn VideoReceiver::_unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
Q_UNUSED(pad);
Q_ASSERT(info != NULL && user_data != NULL);
VideoReceiver* pThis = (VideoReceiver*)user_data;
// We will only act once
if(g_atomic_int_compare_and_exchange(&pThis->_sink->removing, FALSE, TRUE))
pThis->_detachRecordingBranch(info);
return GST_PAD_PROBE_REMOVE;
} }
#endif #endif
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#ifndef VIDEORECEIVER_H #ifndef VIDEORECEIVER_H
#define VIDEORECEIVER_H #define VIDEORECEIVER_H
#include "QGCLoggingCategory.h"
#include <QObject> #include <QObject>
#include <QTimer> #include <QTimer>
#include <QTcpSocket> #include <QTcpSocket>
...@@ -25,40 +26,90 @@ ...@@ -25,40 +26,90 @@
#include <gst/gst.h> #include <gst/gst.h>
#endif #endif
Q_DECLARE_LOGGING_CATEGORY(VideoReceiverLog)
class VideoReceiver : public QObject class VideoReceiver : public QObject
{ {
Q_OBJECT Q_OBJECT
public: public:
#if defined(QGC_GST_STREAMING)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
#endif
explicit VideoReceiver(QObject* parent = 0); explicit VideoReceiver(QObject* parent = 0);
~VideoReceiver(); ~VideoReceiver();
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
void setVideoSink(GstElement* sink); void setVideoSink(GstElement* sink);
bool running() { return _running; }
bool recording() { return _recording; }
bool streaming() { return _streaming; }
bool starting() { return _starting; }
bool stopping() { return _stopping; }
#endif
signals:
#if defined(QGC_GST_STREAMING)
void recordingChanged();
void msgErrorReceived();
void msgEOSReceived();
void msgStateChangedReceived();
#endif #endif
public slots: public slots:
void start (); void start ();
void stop (); void stop ();
void setUri (const QString& uri); void setUri (const QString& uri);
void setVideoSavePath (const QString& path);
void stopRecording ();
void startRecording ();
private slots: private slots:
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
void _timeout (); void _timeout ();
void _connected (); void _connected ();
void _socketError (QAbstractSocket::SocketError socketError); void _socketError (QAbstractSocket::SocketError socketError);
void _handleError();
void _handleEOS();
void _handleStateChanged();
#endif #endif
private: private:
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
void _onBusMessage(GstMessage* message); typedef struct
static gboolean _onBusMessage(GstBus* bus, GstMessage* msg, gpointer data); {
GstPad* teepad;
GstElement* queue;
GstElement* mux;
GstElement* filesink;
gboolean removing;
} Sink;
bool _running;
bool _recording;
bool _streaming;
bool _starting;
bool _stopping;
Sink* _sink;
GstElement* _tee;
static gboolean _onBusMessage(GstBus* bus, GstMessage* message, gpointer user_data);
static GstPadProbeReturn _unlinkCallBack(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
void _detachRecordingBranch(GstPadProbeInfo* info);
void _shutdownRecordingBranch();
void _shutdownPipeline();
#endif #endif
QString _uri; QString _uri;
QString _path;
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
GstElement* _pipeline; GstElement* _pipeline;
GstElement* _pipelineStopRec;
GstElement* _videoSink; GstElement* _videoSink;
#endif #endif
......
...@@ -39,6 +39,14 @@ QGCView { ...@@ -39,6 +39,14 @@ QGCView {
QGCPalette { id: qgcPal } QGCPalette { id: qgcPal }
FileDialog {
id: fileDialog
title: "Choose a location to save video files."
folder: shortcuts.home
selectFolder: true
onAccepted: QGroundControl.videoManager.setVideoSavePathByUrl(fileDialog.fileUrl)
}
QGCViewPanel { QGCViewPanel {
id: panel id: panel
anchors.fill: parent anchors.fill: parent
...@@ -525,6 +533,25 @@ QGCView { ...@@ -525,6 +533,25 @@ QGCView {
} }
} }
} }
Row {
spacing: ScreenTools.defaultFontPixelWidth
visible: QGroundControl.videoManager.isGStreamer
QGCLabel {
anchors.baseline: pathField.baseline
text: qsTr("Save Path:")
width: _labelWidth
}
QGCTextField {
id: pathField
width: _editFieldWidth
readOnly: true
text: QGroundControl.videoManager.videoSavePath
}
Button {
text: "Browse"
onClicked: fileDialog.visible = true
}
}
} }
} }
......
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
version="1.1"
width="250"
height="160"
id="svg4009">
<defs
id="defs4011" />
<metadata
id="metadata4014">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
transform="translate(0,-892.36218)"
id="layer1">
<rect
width="190"
height="160"
ry="36.792519"
x="0"
y="892.36218"
id="rect4017"
style="fill:#000000;fill-opacity:0.49803922;fill-rule:nonzero" />
<path
d="m 190,998.43327 0,-52.1422 60,-46.92889 0,146.00002 z"
id="path4028"
style="fill:#000000;fill-opacity:0.49803922;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:0.49803922;stroke-dasharray:none" />
</g>
</svg>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment