Commit c3c6662a authored by Andrew Voznytsa's avatar Andrew Voznytsa

Update VideoReceiver API

parent d403f313
...@@ -113,7 +113,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox) ...@@ -113,7 +113,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged); connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
connect(_videoReceiver, &VideoReceiver::recordingStarted, this, &VideoManager::_recordingStarted); connect(_videoReceiver, &VideoReceiver::recordingStarted, this, &VideoManager::_recordingStarted);
connect(_videoReceiver, &VideoReceiver::recordingChanged, this, &VideoManager::_recordingChanged); connect(_videoReceiver, &VideoReceiver::recordingChanged, this, &VideoManager::_recordingChanged);
connect(_videoReceiver, &VideoReceiver::screenshotComplete, this, &VideoManager::_screenshotComplete); connect(_videoReceiver, &VideoReceiver::onTakeScreenshotComplete, this, &VideoManager::_onTakeScreenshotComplete);
// FIXME: AV: I believe _thermalVideoReceiver should be handled just like _videoReceiver in terms of event // FIXME: AV: I believe _thermalVideoReceiver should be handled just like _videoReceiver in terms of event
// and I expect that it will be changed during multiple video stream activity // and I expect that it will be changed during multiple video stream activity
...@@ -661,11 +661,11 @@ VideoManager::_streamChanged() ...@@ -661,11 +661,11 @@ VideoManager::_streamChanged()
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
void void
VideoManager::_onStartComplete(bool status) VideoManager::_onStartComplete(VideoReceiver::STATUS status)
{ {
disconnect(_videoReceiver, &VideoReceiver::onStartComplete, this, &VideoManager::_onStartComplete); disconnect(_videoReceiver, &VideoReceiver::onStartComplete, this, &VideoManager::_onStartComplete);
if (status) { if (status == VideoReceiver::STATUS_OK) {
connect(_videoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo); connect(_videoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged); connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
} else { } else {
...@@ -708,7 +708,7 @@ VideoManager::_recordingChanged() ...@@ -708,7 +708,7 @@ VideoManager::_recordingChanged()
//---------------------------------------------------------------------------------------- //----------------------------------------------------------------------------------------
void void
VideoManager::_screenshotComplete() VideoManager::_onTakeScreenshotComplete(VideoReceiver::STATUS status)
{ {
} }
......
...@@ -120,12 +120,12 @@ protected: ...@@ -120,12 +120,12 @@ protected:
void _updateThermalVideoUri (const QString& uri); void _updateThermalVideoUri (const QString& uri);
void _cleanupOldVideos (); void _cleanupOldVideos ();
void _streamChanged (); void _streamChanged ();
void _onStartComplete (bool status); void _onStartComplete (VideoReceiver::STATUS status);
void _restartVideo (); void _restartVideo ();
void _streamingChanged (); void _streamingChanged ();
void _recordingStarted (); void _recordingStarted ();
void _recordingChanged (); void _recordingChanged ();
void _screenshotComplete (); void _onTakeScreenshotComplete (VideoReceiver::STATUS status);
protected: protected:
QString _videoFile; QString _videoFile;
......
...@@ -108,6 +108,8 @@ static void qgcputenv(const QString& key, const QString& root, const QString& pa ...@@ -108,6 +108,8 @@ static void qgcputenv(const QString& key, const QString& root, const QString& pa
void void
GStreamer::initialize(int argc, char* argv[], int debuglevel) GStreamer::initialize(int argc, char* argv[], int debuglevel)
{ {
qRegisterMetaType<VideoReceiver::STATUS>("STATUS");
#ifdef Q_OS_MAC #ifdef Q_OS_MAC
#ifdef QGC_INSTALL_RELEASE #ifdef QGC_INSTALL_RELEASE
QString currentDir = QCoreApplication::applicationDirPath(); QString currentDir = QCoreApplication::applicationDirPath();
......
...@@ -79,7 +79,7 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout) ...@@ -79,7 +79,7 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout)
if(_pipeline) { if(_pipeline) {
qCDebug(VideoReceiverLog) << "Already running!"; qCDebug(VideoReceiverLog) << "Already running!";
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit onStartComplete(false); emit onStartComplete(STATUS_INVALID_STATE);
}); });
return; return;
} }
...@@ -87,7 +87,7 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout) ...@@ -87,7 +87,7 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout)
if (uri.isEmpty()) { if (uri.isEmpty()) {
qCDebug(VideoReceiverLog) << "Failed because URI is not specified"; qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit onStartComplete(false); emit onStartComplete(STATUS_INVALID_URL);
}); });
return; return;
} }
...@@ -232,14 +232,14 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout) ...@@ -232,14 +232,14 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout)
} }
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit onStartComplete(false); emit onStartComplete(STATUS_FAIL);
}); });
} else { } else {
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started"); GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
qCDebug(VideoReceiverLog) << "Started"; qCDebug(VideoReceiverLog) << "Started";
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit onStartComplete(true); emit onStartComplete(STATUS_OK);
}); });
} }
} }
...@@ -331,6 +331,10 @@ GstVideoReceiver::stop(void) ...@@ -331,6 +331,10 @@ GstVideoReceiver::stop(void)
} }
qCDebug(VideoReceiverLog) << "Stopped"; qCDebug(VideoReceiverLog) << "Stopped";
_notificationHandler.dispatch([this](){
emit onStopComplete(STATUS_OK);
});
} }
void void
...@@ -364,6 +368,9 @@ GstVideoReceiver::startDecoding(void* sink) ...@@ -364,6 +368,9 @@ GstVideoReceiver::startDecoding(void* sink)
if(_videoSink != nullptr || _decoding) { if(_videoSink != nullptr || _decoding) {
qCDebug(VideoReceiverLog) << "Already decoding!"; qCDebug(VideoReceiverLog) << "Already decoding!";
_notificationHandler.dispatch([this](){
emit onStartDecodingComplete(STATUS_INVALID_STATE);
});
return; return;
} }
...@@ -371,6 +378,9 @@ GstVideoReceiver::startDecoding(void* sink) ...@@ -371,6 +378,9 @@ GstVideoReceiver::startDecoding(void* sink)
if ((pad = gst_element_get_static_pad(videoSink, "sink")) == nullptr) { if ((pad = gst_element_get_static_pad(videoSink, "sink")) == nullptr) {
qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink"; qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
_notificationHandler.dispatch([this](){
emit onStartDecodingComplete(STATUS_FAIL);
});
return; return;
} }
...@@ -387,17 +397,27 @@ GstVideoReceiver::startDecoding(void* sink) ...@@ -387,17 +397,27 @@ GstVideoReceiver::startDecoding(void* sink)
_removingDecoder = false; _removingDecoder = false;
if (!_streaming) { if (!_streaming) {
_notificationHandler.dispatch([this](){
emit onStartDecodingComplete(STATUS_OK);
});
return; return;
} }
if (!_addDecoder(_decoderValve)) { if (!_addDecoder(_decoderValve)) {
qCCritical(VideoReceiverLog) << "_addDecoder() failed"; qCCritical(VideoReceiverLog) << "_addDecoder() failed";
_notificationHandler.dispatch([this](){
emit onStartDecodingComplete(STATUS_FAIL);
});
return; return;
} }
g_object_set(_decoderValve, "drop", FALSE, nullptr); g_object_set(_decoderValve, "drop", FALSE, nullptr);
qCDebug(VideoReceiverLog) << "Decoding started"; qCDebug(VideoReceiverLog) << "Decoding started";
_notificationHandler.dispatch([this](){
emit onStartDecodingComplete(STATUS_OK);
});
} }
void void
...@@ -415,6 +435,9 @@ GstVideoReceiver::stopDecoding(void) ...@@ -415,6 +435,9 @@ GstVideoReceiver::stopDecoding(void)
// exit immediately if we are not decoding // exit immediately if we are not decoding
if (_pipeline == nullptr || !_decoding) { if (_pipeline == nullptr || !_decoding) {
qCDebug(VideoReceiverLog) << "Not decoding!"; qCDebug(VideoReceiverLog) << "Not decoding!";
_notificationHandler.dispatch([this](){
emit onStopDecodingComplete(STATUS_INVALID_STATE);
});
return; return;
} }
...@@ -422,15 +445,22 @@ GstVideoReceiver::stopDecoding(void) ...@@ -422,15 +445,22 @@ GstVideoReceiver::stopDecoding(void)
_removingDecoder = true; _removingDecoder = true;
_unlinkBranch(_decoderValve); bool ret = _unlinkBranch(_decoderValve);
// FIXME: AV: it is much better to emit onStopDecodingComplete() after decoding is really stopped
// (which happens later due to async design) but as for now it is also not so bad...
_notificationHandler.dispatch([this, ret](){
emit onStopDecodingComplete(ret ? STATUS_OK : STATUS_FAIL);
});
} }
void void
GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
{ {
if (_apiHandler.needDispatch()) { if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, videoFile, format]() { QString cachedVideoFile = videoFile;
startRecording(videoFile, format); _apiHandler.dispatch([this, cachedVideoFile, format]() {
startRecording(cachedVideoFile, format);
}); });
return; return;
} }
...@@ -439,11 +469,17 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) ...@@ -439,11 +469,17 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
if (_pipeline == nullptr) { if (_pipeline == nullptr) {
qCDebug(VideoReceiverLog) << "Streaming is not active!"; qCDebug(VideoReceiverLog) << "Streaming is not active!";
_notificationHandler.dispatch([this](){
emit onStartRecordingComplete(STATUS_INVALID_STATE);
});
return; return;
} }
if (_recording) { if (_recording) {
qCDebug(VideoReceiverLog) << "Already recording!"; qCDebug(VideoReceiverLog) << "Already recording!";
_notificationHandler.dispatch([this](){
emit onStartRecordingComplete(STATUS_INVALID_STATE);
});
return; return;
} }
...@@ -451,6 +487,9 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) ...@@ -451,6 +487,9 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
if ((_fileSink = _makeFileSink(videoFile, format)) == nullptr) { if ((_fileSink = _makeFileSink(videoFile, format)) == nullptr) {
qCCritical(VideoReceiverLog) << "_makeFileSink() failed"; qCCritical(VideoReceiverLog) << "_makeFileSink() failed";
_notificationHandler.dispatch([this](){
emit onStartRecordingComplete(STATUS_FAIL);
});
return; return;
} }
...@@ -462,6 +501,9 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) ...@@ -462,6 +501,9 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
if (!gst_element_link(_recorderValve, _fileSink)) { if (!gst_element_link(_recorderValve, _fileSink)) {
qCCritical(VideoReceiverLog) << "Failed to link valve and file sink"; qCCritical(VideoReceiverLog) << "Failed to link valve and file sink";
_notificationHandler.dispatch([this](){
emit onStartRecordingComplete(STATUS_FAIL);
});
return; return;
} }
...@@ -476,6 +518,9 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) ...@@ -476,6 +518,9 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
if ((probepad = gst_element_get_static_pad(_recorderValve, "src")) == nullptr) { if ((probepad = gst_element_get_static_pad(_recorderValve, "src")) == nullptr) {
qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed"; qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
_notificationHandler.dispatch([this](){
emit onStartRecordingComplete(STATUS_FAIL);
});
return; return;
} }
...@@ -488,6 +533,7 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) ...@@ -488,6 +533,7 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
_recording = true; _recording = true;
qCDebug(VideoReceiverLog) << "Recording started"; qCDebug(VideoReceiverLog) << "Recording started";
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit onStartRecordingComplete(STATUS_OK);
emit recordingChanged(); emit recordingChanged();
}); });
} }
...@@ -508,6 +554,9 @@ GstVideoReceiver::stopRecording(void) ...@@ -508,6 +554,9 @@ GstVideoReceiver::stopRecording(void)
// exit immediately if we are not recording // exit immediately if we are not recording
if (_pipeline == nullptr || !_recording) { if (_pipeline == nullptr || !_recording) {
qCDebug(VideoReceiverLog) << "Not recording!"; qCDebug(VideoReceiverLog) << "Not recording!";
_notificationHandler.dispatch([this](){
emit onStopRecordingComplete(STATUS_INVALID_STATE);
});
return; return;
} }
...@@ -515,22 +564,29 @@ GstVideoReceiver::stopRecording(void) ...@@ -515,22 +564,29 @@ GstVideoReceiver::stopRecording(void)
_removingRecorder = true; _removingRecorder = true;
_unlinkBranch(_recorderValve); bool ret = _unlinkBranch(_recorderValve);
// FIXME: AV: it is much better to emit onStopRecordingComplete() after recording is really stopped
// (which happens later due to async design) but as for now it is also not so bad...
_notificationHandler.dispatch([this, ret](){
emit onStopRecordingComplete(ret ? STATUS_OK : STATUS_FAIL);
});
} }
void void
GstVideoReceiver::takeScreenshot(const QString& imageFile) GstVideoReceiver::takeScreenshot(const QString& imageFile)
{ {
if (_apiHandler.needDispatch()) { if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, imageFile]() { QString cachedImageFile = imageFile;
takeScreenshot(imageFile); _apiHandler.dispatch([this, cachedImageFile]() {
takeScreenshot(cachedImageFile);
}); });
return; return;
} }
// FIXME: AV: record screenshot here // FIXME: AV: record screenshot here
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit screenshotComplete(); emit onTakeScreenshotComplete(STATUS_NOT_IMPLEMENTED);
}); });
} }
...@@ -1030,14 +1086,14 @@ GstVideoReceiver::_noteEndOfStream(void) ...@@ -1030,14 +1086,14 @@ GstVideoReceiver::_noteEndOfStream(void)
// -Unlink the branch from the src pad // -Unlink the branch from the src pad
// -Send an EOS event at the beginning of that branch // -Send an EOS event at the beginning of that branch
void bool
GstVideoReceiver::_unlinkBranch(GstElement* from) GstVideoReceiver::_unlinkBranch(GstElement* from)
{ {
GstPad* src; GstPad* src;
if ((src = gst_element_get_static_pad(from, "src")) == nullptr) { if ((src = gst_element_get_static_pad(from, "src")) == nullptr) {
qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed"; qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
return; return false;
} }
GstPad* sink; GstPad* sink;
...@@ -1046,7 +1102,7 @@ GstVideoReceiver::_unlinkBranch(GstElement* from) ...@@ -1046,7 +1102,7 @@ GstVideoReceiver::_unlinkBranch(GstElement* from)
gst_object_unref(src); gst_object_unref(src);
src = nullptr; src = nullptr;
qCCritical(VideoReceiverLog) << "gst_pad_get_peer() failed"; qCCritical(VideoReceiverLog) << "gst_pad_get_peer() failed";
return; return false;
} }
if (!gst_pad_unlink(src, sink)) { if (!gst_pad_unlink(src, sink)) {
...@@ -1055,7 +1111,7 @@ GstVideoReceiver::_unlinkBranch(GstElement* from) ...@@ -1055,7 +1111,7 @@ GstVideoReceiver::_unlinkBranch(GstElement* from)
gst_object_unref(sink); gst_object_unref(sink);
sink = nullptr; sink = nullptr;
qCCritical(VideoReceiverLog) << "gst_pad_unlink() failed"; qCCritical(VideoReceiverLog) << "gst_pad_unlink() failed";
return; return false;
} }
gst_object_unref(src); gst_object_unref(src);
...@@ -1067,11 +1123,14 @@ GstVideoReceiver::_unlinkBranch(GstElement* from) ...@@ -1067,11 +1123,14 @@ GstVideoReceiver::_unlinkBranch(GstElement* from)
gst_object_unref(sink); gst_object_unref(sink);
sink = nullptr; sink = nullptr;
if (ret) { if (!ret) {
qCDebug(VideoReceiverLog) << "Branch EOS was sent";
} else {
qCCritical(VideoReceiverLog) << "Branch EOS was NOT sent"; qCCritical(VideoReceiverLog) << "Branch EOS was NOT sent";
return false;
} }
qCDebug(VideoReceiverLog) << "Branch EOS was sent";
return true;
} }
void void
...@@ -1175,10 +1234,10 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data) ...@@ -1175,10 +1234,10 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
error = nullptr; error = nullptr;
} }
// pThis->_apiHandler.dispatch([pThis](){ pThis->_apiHandler.dispatch([pThis](){
// qCDebug(VideoReceiverLog) << "Stoppping because of error"; qCDebug(VideoReceiverLog) << "Stoppping because of error";
// pThis->stop(); pThis->stop();
// }); });
} while(0); } while(0);
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
......
...@@ -118,7 +118,7 @@ protected: ...@@ -118,7 +118,7 @@ protected:
virtual void _noteTeeFrame(void); virtual void _noteTeeFrame(void);
virtual void _noteVideoSinkFrame(void); virtual void _noteVideoSinkFrame(void);
virtual void _noteEndOfStream(void); virtual void _noteEndOfStream(void);
virtual void _unlinkBranch(GstElement* from); virtual bool _unlinkBranch(GstElement* from);
virtual void _shutdownDecodingBranch (void); virtual void _shutdownDecodingBranch (void);
virtual void _shutdownRecordingBranch(void); virtual void _shutdownRecordingBranch(void);
......
...@@ -44,6 +44,16 @@ public: ...@@ -44,6 +44,16 @@ public:
FILE_FORMAT_MAX FILE_FORMAT_MAX
} FILE_FORMAT; } FILE_FORMAT;
typedef enum {
STATUS_OK = 0,
STATUS_FAIL,
STATUS_INVALID_STATE,
STATUS_INVALID_URL,
STATUS_NOT_IMPLEMENTED
} STATUS;
Q_ENUM(STATUS)
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged) Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged) Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged) Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
...@@ -73,17 +83,14 @@ signals: ...@@ -73,17 +83,14 @@ signals:
void recordingChanged(void); void recordingChanged(void);
void recordingStarted(void); void recordingStarted(void);
void videoSizeChanged(void); void videoSizeChanged(void);
void screenshotComplete(void);
void onStartComplete(STATUS status);
// FIXME: AV: I see very big sense to convert 'bool status' into 'enum status' and clearly state what happend during operation void onStopComplete(STATUS status);
void onStartComplete(bool status); void onStartDecodingComplete(STATUS status);
// FIXME: AV: add these signals after completing onStartComplete() void onStopDecodingComplete(STATUS status);
// void onStopComplete(bool status); void onStartRecordingComplete(STATUS status);
// void onStartDecodingComplete(bool status); void onStopRecordingComplete(STATUS status);
// void onStopDecodingComplete(bool status); void onTakeScreenshotComplete(STATUS status);
// void onStartRecordingComplete(bool status);
// void onStopRecordingComplete(bool status);
// void onTakeScreenshotComplete(bool status);
public slots: public slots:
virtual void start(const QString& uri, unsigned timeout) = 0; virtual void start(const QString& uri, unsigned timeout) = 0;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment