Commit d403f313 authored by Andrew Voznytsa's avatar Andrew Voznytsa

Fix race conditions with camera manager

parent cf2628fe
...@@ -190,6 +190,7 @@ VideoManager::startVideo() ...@@ -190,6 +190,7 @@ VideoManager::startVideo()
const unsigned timeout = _videoSettings->rtspTimeout()->rawValue().toUInt(); const unsigned timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
if(_videoReceiver != nullptr) { if(_videoReceiver != nullptr) {
connect(_videoReceiver, &VideoReceiver::onStartComplete, this, &VideoManager::_onStartComplete);
_videoReceiver->start(_videoUri, timeout); _videoReceiver->start(_videoUri, timeout);
if (_videoSink != nullptr) { if (_videoSink != nullptr) {
_videoReceiver->startDecoding(_videoSink); _videoReceiver->startDecoding(_videoSink);
...@@ -212,6 +213,10 @@ VideoManager::stopVideo() ...@@ -212,6 +213,10 @@ VideoManager::stopVideo()
if (qgcApp()->runningUnitTests()) { if (qgcApp()->runningUnitTests()) {
return; return;
} }
disconnect(_videoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
disconnect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
if(_videoReceiver) _videoReceiver->stop(); if(_videoReceiver) _videoReceiver->stop();
if(_thermalVideoReceiver) _thermalVideoReceiver->stop(); if(_thermalVideoReceiver) _thermalVideoReceiver->stop();
...@@ -526,35 +531,40 @@ VideoManager::_initVideo() ...@@ -526,35 +531,40 @@ VideoManager::_initVideo()
} }
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
void bool
VideoManager::_updateSettings() VideoManager::_updateSettings()
{ {
if(!_videoSettings) if(!_videoSettings)
return; return false;
//-- Auto discovery //-- Auto discovery
if(_activeVehicle && _activeVehicle->dynamicCameras()) { if(_activeVehicle && _activeVehicle->dynamicCameras()) {
QGCVideoStreamInfo* pInfo = _activeVehicle->dynamicCameras()->currentStreamInstance(); QGCVideoStreamInfo* pInfo = _activeVehicle->dynamicCameras()->currentStreamInstance();
if(pInfo) { if(pInfo) {
bool status = false;
qCDebug(VideoManagerLog) << "Configure primary stream: " << pInfo->uri(); qCDebug(VideoManagerLog) << "Configure primary stream: " << pInfo->uri();
switch(pInfo->type()) { switch(pInfo->type()) {
case VIDEO_STREAM_TYPE_RTSP: case VIDEO_STREAM_TYPE_RTSP:
_setVideoUri(pInfo->uri()); if (status = _updateVideoUri(pInfo->uri())) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceRTSP); _toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceRTSP);
}
break; break;
case VIDEO_STREAM_TYPE_TCP_MPEG: case VIDEO_STREAM_TYPE_TCP_MPEG:
_setVideoUri(pInfo->uri()); if (status = _updateVideoUri(pInfo->uri())) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceTCP); _toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceTCP);
}
break; break;
case VIDEO_STREAM_TYPE_RTPUDP: case VIDEO_STREAM_TYPE_RTPUDP:
_setVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pInfo->uri())); if (status = _updateVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pInfo->uri()))) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceUDPH264); _toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceUDPH264);
}
break; break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264: case VIDEO_STREAM_TYPE_MPEG_TS_H264:
_setVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pInfo->uri())); if (status = _updateVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pInfo->uri()))) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceMPEGTS); _toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceMPEGTS);
}
break; break;
default: default:
_setVideoUri(pInfo->uri()); status = _updateVideoUri(pInfo->uri());
break; break;
} }
//-- Thermal stream (if any) //-- Thermal stream (if any)
...@@ -564,50 +574,55 @@ VideoManager::_updateSettings() ...@@ -564,50 +574,55 @@ VideoManager::_updateSettings()
switch(pTinfo->type()) { switch(pTinfo->type()) {
case VIDEO_STREAM_TYPE_RTSP: case VIDEO_STREAM_TYPE_RTSP:
case VIDEO_STREAM_TYPE_TCP_MPEG: case VIDEO_STREAM_TYPE_TCP_MPEG:
_setThermalVideoUri(pTinfo->uri()); _updateThermalVideoUri(pTinfo->uri());
break; break;
case VIDEO_STREAM_TYPE_RTPUDP: case VIDEO_STREAM_TYPE_RTPUDP:
_setThermalVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pTinfo->uri())); _updateThermalVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pTinfo->uri()));
break; break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264: case VIDEO_STREAM_TYPE_MPEG_TS_H264:
_setThermalVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pTinfo->uri())); _updateThermalVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pTinfo->uri()));
break; break;
default: default:
_setThermalVideoUri(pTinfo->uri()); _updateThermalVideoUri(pTinfo->uri());
break; break;
} }
} }
return; return status;
} }
} }
QString source = _videoSettings->videoSource()->rawValue().toString(); QString source = _videoSettings->videoSource()->rawValue().toString();
if (source == VideoSettings::videoSourceUDPH264) if (source == VideoSettings::videoSourceUDPH264)
_setVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt())); return _updateVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceUDPH265) else if (source == VideoSettings::videoSourceUDPH265)
_setVideoUri(QStringLiteral("udp265://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt())); return _updateVideoUri(QStringLiteral("udp265://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceMPEGTS) else if (source == VideoSettings::videoSourceMPEGTS)
_setVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt())); return _updateVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceRTSP) else if (source == VideoSettings::videoSourceRTSP)
_setVideoUri(_videoSettings->rtspUrl()->rawValue().toString()); return _updateVideoUri(_videoSettings->rtspUrl()->rawValue().toString());
else if (source == VideoSettings::videoSourceTCP) else if (source == VideoSettings::videoSourceTCP)
_setVideoUri(QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString())); return _updateVideoUri(QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString()));
} }
void bool
VideoManager::_setVideoUri(const QString& uri) VideoManager::_updateVideoUri(const QString& uri)
{ {
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__)) #if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
//-- Taisync on iOS or Android sends a raw h.264 stream //-- Taisync on iOS or Android sends a raw h.264 stream
if (isTaisync()) { if (isTaisync()) {
_videoUri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT); uri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
return;
} }
#endif #endif
if (uri == _videoUri) {
return false;
}
_videoUri = uri; _videoUri = uri;
return true;
} }
void void
VideoManager::_setThermalVideoUri(const QString& uri) VideoManager::_updateThermalVideoUri(const QString& uri)
{ {
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__)) #if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
//-- Taisync on iOS or Android sends a raw h.264 stream //-- Taisync on iOS or Android sends a raw h.264 stream
...@@ -635,6 +650,31 @@ VideoManager::_streamingChanged() ...@@ -635,6 +650,31 @@ VideoManager::_streamingChanged()
#endif #endif
} }
//-----------------------------------------------------------------------------
void
VideoManager::_streamChanged()
{
if (_updateSettings()) {
_restartVideo();
}
}
//-----------------------------------------------------------------------------
void
VideoManager::_onStartComplete(bool status)
{
disconnect(_videoReceiver, &VideoReceiver::onStartComplete, this, &VideoManager::_onStartComplete);
if (status) {
connect(_videoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
} else {
QTimer::singleShot(1000, [this](){
_restartVideo();
});
}
}
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
void void
VideoManager::_restartVideo() VideoManager::_restartVideo()
...@@ -644,7 +684,7 @@ VideoManager::_restartVideo() ...@@ -644,7 +684,7 @@ VideoManager::_restartVideo()
stopVideo(); stopVideo();
_updateSettings(); _updateSettings();
startVideo(); startVideo();
emit aspectRatioChanged(); // emit aspectRatioChanged();
#endif #endif
} }
...@@ -690,7 +730,7 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle) ...@@ -690,7 +730,7 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle)
if(_activeVehicle) { if(_activeVehicle) {
connect(_activeVehicle, &Vehicle::connectionLostChanged, this, &VideoManager::_connectionLostChanged); connect(_activeVehicle, &Vehicle::connectionLostChanged, this, &VideoManager::_connectionLostChanged);
if(_activeVehicle->dynamicCameras()) { if(_activeVehicle->dynamicCameras()) {
connect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartVideo); connect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_streamChanged);
QGCCameraControl* pCamera = _activeVehicle->dynamicCameras()->currentCameraInstance(); QGCCameraControl* pCamera = _activeVehicle->dynamicCameras()->currentCameraInstance();
if(pCamera) { if(pCamera) {
pCamera->resumeStream(); pCamera->resumeStream();
......
...@@ -115,10 +115,12 @@ protected: ...@@ -115,10 +115,12 @@ protected:
friend class FinishVideoInitialization; friend class FinishVideoInitialization;
void _initVideo (); void _initVideo ();
void _updateSettings (); bool _updateSettings ();
void _setVideoUri (const QString& uri); bool _updateVideoUri (const QString& uri);
void _setThermalVideoUri (const QString& uri); void _updateThermalVideoUri (const QString& uri);
void _cleanupOldVideos (); void _cleanupOldVideos ();
void _streamChanged ();
void _onStartComplete (bool status);
void _restartVideo (); void _restartVideo ();
void _streamingChanged (); void _streamingChanged ();
void _recordingStarted (); void _recordingStarted ();
......
...@@ -69,19 +69,26 @@ void ...@@ -69,19 +69,26 @@ void
GstVideoReceiver::start(const QString& uri, unsigned timeout) GstVideoReceiver::start(const QString& uri, unsigned timeout)
{ {
if (_apiHandler.needDispatch()) { if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, uri, timeout]() { QString cachedUri = uri;
start(uri, timeout); _apiHandler.dispatch([this, cachedUri, timeout]() {
start(cachedUri, timeout);
}); });
return; return;
} }
if(_pipeline) { if(_pipeline) {
qCDebug(VideoReceiverLog) << "Already running!"; qCDebug(VideoReceiverLog) << "Already running!";
_notificationHandler.dispatch([this](){
emit onStartComplete(false);
});
return; return;
} }
if (uri.isEmpty()) { if (uri.isEmpty()) {
qCDebug(VideoReceiverLog) << "Failed because URI is not specified"; qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
_notificationHandler.dispatch([this](){
emit onStartComplete(false);
});
return; return;
} }
...@@ -223,9 +230,17 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout) ...@@ -223,9 +230,17 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout)
_source = nullptr; _source = nullptr;
} }
} }
_notificationHandler.dispatch([this](){
emit onStartComplete(false);
});
} else { } else {
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started"); GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
qCDebug(VideoReceiverLog) << "Started"; qCDebug(VideoReceiverLog) << "Started";
_notificationHandler.dispatch([this](){
emit onStartComplete(true);
});
} }
} }
...@@ -247,6 +262,8 @@ GstVideoReceiver::stop(void) ...@@ -247,6 +262,8 @@ GstVideoReceiver::stop(void)
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) { if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
gst_bus_disable_sync_message_emission(bus); gst_bus_disable_sync_message_emission(bus);
g_signal_handlers_disconnect_by_data(bus, this);
gboolean recordingValveClosed = TRUE; gboolean recordingValveClosed = TRUE;
g_object_get(_recorderValve, "drop", &recordingValveClosed, nullptr); g_object_get(_recorderValve, "drop", &recordingValveClosed, nullptr);
...@@ -305,6 +322,11 @@ GstVideoReceiver::stop(void) ...@@ -305,6 +322,11 @@ GstVideoReceiver::stop(void)
_notificationHandler.dispatch([this](){ _notificationHandler.dispatch([this](){
emit streamingChanged(); emit streamingChanged();
}); });
} else {
qCDebug(VideoReceiverLog) << "Streaming did not start";
_notificationHandler.dispatch([this](){
emit timeout();
});
} }
} }
...@@ -1148,18 +1170,20 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data) ...@@ -1148,18 +1170,20 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
} }
if (error != nullptr) { if (error != nullptr) {
qCCritical(VideoReceiverLog) << error->message; qCCritical(VideoReceiverLog) << "GStreamer error:" << error->message;
g_error_free(error); g_error_free(error);
error = nullptr; error = nullptr;
} }
pThis->_apiHandler.dispatch([pThis](){ // pThis->_apiHandler.dispatch([pThis](){
pThis->stop(); // qCDebug(VideoReceiverLog) << "Stoppping because of error";
}); // pThis->stop();
// });
} while(0); } while(0);
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
pThis->_apiHandler.dispatch([pThis](){ pThis->_apiHandler.dispatch([pThis](){
qCDebug(VideoReceiverLog) << "Received EOS";
pThis->_handleEOS(); pThis->_handleEOS();
}); });
break; break;
...@@ -1181,6 +1205,7 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data) ...@@ -1181,6 +1205,7 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) { if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
pThis->_apiHandler.dispatch([pThis](){ pThis->_apiHandler.dispatch([pThis](){
qCDebug(VideoReceiverLog) << "Received branch EOS";
pThis->_handleEOS(); pThis->_handleEOS();
}); });
} }
......
...@@ -75,6 +75,16 @@ signals: ...@@ -75,6 +75,16 @@ signals:
void videoSizeChanged(void); void videoSizeChanged(void);
void screenshotComplete(void); void screenshotComplete(void);
// FIXME: AV: I see very big sense to convert 'bool status' into 'enum status' and clearly state what happend during operation
void onStartComplete(bool status);
// FIXME: AV: add these signals after completing onStartComplete()
// void onStopComplete(bool status);
// void onStartDecodingComplete(bool status);
// void onStopDecodingComplete(bool status);
// void onStartRecordingComplete(bool status);
// void onStopRecordingComplete(bool status);
// void onTakeScreenshotComplete(bool status);
public slots: public slots:
virtual void start(const QString& uri, unsigned timeout) = 0; virtual void start(const QString& uri, unsigned timeout) = 0;
virtual void stop(void) = 0; virtual void stop(void) = 0;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment