Commit 98510ab0 authored by Andrew Voznytsa's avatar Andrew Voznytsa

Update VideoReceiver and VideoManager APIs

parent 9d973d0f
...@@ -155,6 +155,9 @@ private: ...@@ -155,6 +155,9 @@ private:
unsigned int _fileFormat = VideoReceiver::FILE_FORMAT_MIN; unsigned int _fileFormat = VideoReceiver::FILE_FORMAT_MIN;
unsigned _stopRecordingAfter = 15; unsigned _stopRecordingAfter = 15;
bool _useFakeSink = false; bool _useFakeSink = false;
bool _streaming = false;
bool _decoding = false;
bool _recording = false;
}; };
void void
...@@ -303,7 +306,7 @@ VideoReceiverApp::exec() ...@@ -303,7 +306,7 @@ VideoReceiverApp::exec()
qCDebug(AppLog) << "Streaming timeout"; qCDebug(AppLog) << "Streaming timeout";
_dispatch([this](){ _dispatch([this](){
if (_receiver->streaming()) { if (_streaming) {
_receiver->stop(); _receiver->stop();
} else { } else {
if (--_connect > 0) { if (--_connect > 0) {
...@@ -320,8 +323,9 @@ VideoReceiverApp::exec() ...@@ -320,8 +323,9 @@ VideoReceiverApp::exec()
}); });
}); });
QObject::connect(_receiver, &VideoReceiver::streamingChanged, [this](){ QObject::connect(_receiver, &VideoReceiver::streamingChanged, [this](bool active){
if (_receiver->streaming()) { _streaming = active;
if (_streaming) {
qCDebug(AppLog) << "Streaming started"; qCDebug(AppLog) << "Streaming started";
} else { } else {
qCDebug(AppLog) << "Streaming stopped"; qCDebug(AppLog) << "Streaming stopped";
...@@ -338,13 +342,14 @@ VideoReceiverApp::exec() ...@@ -338,13 +342,14 @@ VideoReceiverApp::exec()
} }
}); });
QObject::connect(_receiver, &VideoReceiver::decodingChanged, [this](){ QObject::connect(_receiver, &VideoReceiver::decodingChanged, [this](bool active){
if (_receiver->decoding()) { _decoding = active;
if (_decoding) {
qCDebug(AppLog) << "Decoding started"; qCDebug(AppLog) << "Decoding started";
} else { } else {
qCDebug(AppLog) << "Decoding stopped"; qCDebug(AppLog) << "Decoding stopped";
if (_receiver->streaming()) { if (_streaming) {
if (!_receiver->recording()) { if (!_recording) {
_dispatch([this](){ _dispatch([this](){
_receiver->stop(); _receiver->stop();
}); });
...@@ -353,13 +358,14 @@ VideoReceiverApp::exec() ...@@ -353,13 +358,14 @@ VideoReceiverApp::exec()
} }
}); });
QObject::connect(_receiver, &VideoReceiver::recordingChanged, [this](){ QObject::connect(_receiver, &VideoReceiver::recordingChanged, [this](bool active){
if (_receiver->recording()) { _recording = active;
if (_recording) {
qCDebug(AppLog) << "Recording started"; qCDebug(AppLog) << "Recording started";
} else { } else {
qCDebug(AppLog) << "Recording stopped"; qCDebug(AppLog) << "Recording stopped";
if (_receiver->streaming()) { if (_streaming) {
if (!_receiver->decoding()) { if (!_decoding) {
_dispatch([this](){ _dispatch([this](){
_receiver->stop(); _receiver->stop();
}); });
......
...@@ -44,8 +44,8 @@ Item { ...@@ -44,8 +44,8 @@ Item {
property real _labelFieldWidth: ScreenTools.defaultFontPixelWidth * 28 property real _labelFieldWidth: ScreenTools.defaultFontPixelWidth * 28
property real _editFieldWidth: ScreenTools.defaultFontPixelWidth * 30 property real _editFieldWidth: ScreenTools.defaultFontPixelWidth * 30
property real _editFieldHeight: ScreenTools.defaultFontPixelHeight * 2 property real _editFieldHeight: ScreenTools.defaultFontPixelHeight * 2
property var _videoReceiver: QGroundControl.videoManager.videoReceiver property var _videoManager: QGroundControl.videoManager
property bool _recordingLocalVideo: _videoReceiver && _videoReceiver.recording property bool _recordingLocalVideo: QGroundControl.videoManager.recording
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property bool _isCamera: _dynamicCameras ? _dynamicCameras.cameras.count > 0 : false property bool _isCamera: _dynamicCameras ? _dynamicCameras.cameras.count > 0 : false
...@@ -305,15 +305,15 @@ Item { ...@@ -305,15 +305,15 @@ Item {
_camera.stopVideo() _camera.stopVideo()
//-- Local video as well //-- Local video as well
if (_recordingVideo) { if (_recordingVideo) {
_videoReceiver.stopRecording() _videoManager.stopRecording()
} }
} else { } else {
if(!_fullSD) { if(!_fullSD) {
_camera.startVideo() _camera.startVideo()
} }
//-- Local video as well //-- Local video as well
if(_videoReceiver) { if(_videoManager) {
_videoReceiver.startRecording() _videoManager.startRecording()
} }
} }
} else { } else {
......
...@@ -25,7 +25,6 @@ Item { ...@@ -25,7 +25,6 @@ Item {
clip: true clip: true
property double _ar: QGroundControl.videoManager.aspectRatio property double _ar: QGroundControl.videoManager.aspectRatio
property bool _showGrid: QGroundControl.settingsManager.videoSettings.gridLines.rawValue > 0 property bool _showGrid: QGroundControl.settingsManager.videoSettings.gridLines.rawValue > 0
property var _videoReceiver: QGroundControl.videoManager.videoReceiver
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property bool _connected: activeVehicle ? !activeVehicle.connectionLost : false property bool _connected: activeVehicle ? !activeVehicle.connectionLost : false
property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0 property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0
...@@ -40,7 +39,7 @@ Item { ...@@ -40,7 +39,7 @@ Item {
id: noVideo id: noVideo
anchors.fill: parent anchors.fill: parent
color: Qt.rgba(0,0,0,0.75) color: Qt.rgba(0,0,0,0.75)
visible: !(_videoReceiver && _videoReceiver.decoding) visible: !(QGroundControl.videoManager.decoding)
QGCLabel { QGCLabel {
text: QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue ? qsTr("WAITING FOR VIDEO") : qsTr("VIDEO DISABLED") text: QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue ? qsTr("WAITING FOR VIDEO") : qsTr("VIDEO DISABLED")
font.family: ScreenTools.demiboldFontFamily font.family: ScreenTools.demiboldFontFamily
...@@ -58,7 +57,7 @@ Item { ...@@ -58,7 +57,7 @@ Item {
Rectangle { Rectangle {
anchors.fill: parent anchors.fill: parent
color: "black" color: "black"
visible: _videoReceiver && _videoReceiver.decoding visible: QGroundControl.videoManager.decoding
function getWidth() { function getWidth() {
//-- Fit Width or Stretch //-- Fit Width or Stretch
if(_fitMode === 0 || _fitMode === 2) { if(_fitMode === 0 || _fitMode === 2) {
...@@ -129,7 +128,7 @@ Item { ...@@ -129,7 +128,7 @@ Item {
height: parent.getHeight() height: parent.getHeight()
width: parent.getWidth() width: parent.getWidth()
anchors.centerIn: parent anchors.centerIn: parent
visible: _videoReceiver && _videoReceiver.decoding visible: QGroundControl.videoManager.decoding
sourceComponent: videoBackgroundComponent sourceComponent: videoBackgroundComponent
property bool videoDisabled: QGroundControl.settingsManager.videoSettings.videoSource.rawValue === QGroundControl.settingsManager.videoSettings.disabledVideoSource property bool videoDisabled: QGroundControl.settingsManager.videoSettings.videoSource.rawValue === QGroundControl.settingsManager.videoSettings.disabledVideoSource
......
...@@ -31,9 +31,8 @@ Item { ...@@ -31,9 +31,8 @@ Item {
anchors.centerIn: parent anchors.centerIn: parent
property bool _communicationLost: activeVehicle ? activeVehicle.connectionLost : false property bool _communicationLost: activeVehicle ? activeVehicle.connectionLost : false
property var _videoReceiver: QGroundControl.videoManager.videoReceiver property bool _recordingVideo: QGroundControl.videoManager.recording
property bool _recordingVideo: _videoReceiver && _videoReceiver.recording property bool _decodingVideo: QGroundControl.videoManager.decoding
property bool _decodingVideo: _videoReceiver && _videoReceiver.decoding
property bool _streamingEnabled: QGroundControl.settingsManager.videoSettings.streamConfigured property bool _streamingEnabled: QGroundControl.settingsManager.videoSettings.streamConfigured
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0 property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0
...@@ -70,10 +69,10 @@ Item { ...@@ -70,10 +69,10 @@ Item {
onClicked: { onClicked: {
if(checked) { if(checked) {
QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue = 1 QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue = 1
_videoReceiver.start() QGroundControl.videoManager.startVideo()
} else { } else {
QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue = 0 QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue = 0
_videoReceiver.stop() QGroundControl.videoManager.stopVideo()
} }
} }
} }
......
...@@ -114,14 +114,39 @@ VideoManager::setToolbox(QGCToolbox *toolbox) ...@@ -114,14 +114,39 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
_startReceiver(0); _startReceiver(0);
}); });
connect(_videoReceiver, &VideoReceiver::streamingChanged, this, [this](){ connect(_videoReceiver, &VideoReceiver::streamingChanged, this, [this](bool active){
if (!_enableVideoRestart || _videoReceiver->streaming()) return; _streaming = active;
emit streamingChanged();
if (!_enableVideoRestart || active) return;
_startReceiver(0); _startReceiver(0);
}); });
connect(_videoReceiver, &VideoReceiver::recordingStarted, this, &VideoManager::_recordingStarted); connect(_videoReceiver, &VideoReceiver::decodingChanged, this, [this](bool active){
connect(_videoReceiver, &VideoReceiver::recordingChanged, this, &VideoManager::_recordingChanged); _decoding = active;
connect(_videoReceiver, &VideoReceiver::onTakeScreenshotComplete, this, &VideoManager::_onTakeScreenshotComplete); emit decodingChanged();
});
connect(_videoReceiver, &VideoReceiver::recordingChanged, this, [this](bool active){
_recording = active;
if (!active) {
_subtitleWriter.stopCapturingTelemetry();
}
emit recordingChanged();
});
connect(_videoReceiver, &VideoReceiver::recordingStarted, this, [this](){
_subtitleWriter.startCapturingTelemetry(_videoFile);
});
connect(_videoReceiver, &VideoReceiver::videoSizeChanged, this, [this](QSize size){
_videoSize = ((quint32)size.width() << 16) | (quint32)size.height();
emit videoSizeChanged();
});
connect(_videoReceiver, &VideoReceiver::onTakeScreenshotComplete, this, [this](VideoReceiver::STATUS status){
if (status == VideoReceiver::STATUS_OK) {
}
});
// FIXME: AV: I believe _thermalVideoReceiver should be handled just like _videoReceiver in terms of event // FIXME: AV: I believe _thermalVideoReceiver should be handled just like _videoReceiver in terms of event
// and I expect that it will be changed during multiple video stream activity // and I expect that it will be changed during multiple video stream activity
...@@ -131,8 +156,8 @@ VideoManager::setToolbox(QGCToolbox *toolbox) ...@@ -131,8 +156,8 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
_startReceiver(1); _startReceiver(1);
}); });
connect(_thermalVideoReceiver, &VideoReceiver::streamingChanged, this, [this](){ connect(_thermalVideoReceiver, &VideoReceiver::streamingChanged, this, [this](bool active){
if (!_enableVideoRestart) return; if (!_enableVideoRestart || active) return;
_startReceiver(1); _startReceiver(1);
}); });
} }
...@@ -668,30 +693,6 @@ VideoManager::_restartVideo() ...@@ -668,30 +693,6 @@ VideoManager::_restartVideo()
#endif #endif
} }
//-----------------------------------------------------------------------------
void
VideoManager::_recordingStarted()
{
_subtitleWriter.startCapturingTelemetry(_videoFile);
}
//-----------------------------------------------------------------------------
void
VideoManager::_recordingChanged()
{
#if defined(QGC_GST_STREAMING)
if (_videoReceiver && !_videoReceiver->recording()) {
_subtitleWriter.stopCapturingTelemetry();
}
#endif
}
//----------------------------------------------------------------------------------------
void
VideoManager::_onTakeScreenshotComplete(VideoReceiver::STATUS status)
{
}
//---------------------------------------------------------------------------------------- //----------------------------------------------------------------------------------------
void void
VideoManager::_startReceiver(unsigned id) VideoManager::_startReceiver(unsigned id)
......
...@@ -51,6 +51,10 @@ public: ...@@ -51,6 +51,10 @@ public:
Q_PROPERTY(bool autoStreamConfigured READ autoStreamConfigured NOTIFY autoStreamConfiguredChanged) Q_PROPERTY(bool autoStreamConfigured READ autoStreamConfigured NOTIFY autoStreamConfiguredChanged)
Q_PROPERTY(bool hasThermal READ hasThermal NOTIFY aspectRatioChanged) Q_PROPERTY(bool hasThermal READ hasThermal NOTIFY aspectRatioChanged)
Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged) Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged)
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
virtual bool hasVideo (); virtual bool hasVideo ();
virtual bool isGStreamer (); virtual bool isGStreamer ();
...@@ -65,7 +69,25 @@ public: ...@@ -65,7 +69,25 @@ public:
virtual bool hasThermal (); virtual bool hasThermal ();
virtual QString imageFile (); virtual QString imageFile ();
bool streaming(void) {
return _streaming;
}
bool decoding(void) {
return _decoding;
}
bool recording(void) {
return _recording;
}
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
}
// FIXME: AV: they should be removed after finishing multiple video stream support
// new arcitecture does not assume direct access to video receiver from QML side, even if it works for now
virtual VideoReceiver* videoReceiver () { return _videoReceiver; } virtual VideoReceiver* videoReceiver () { return _videoReceiver; }
virtual VideoReceiver* thermalVideoReceiver () { return _thermalVideoReceiver; } virtual VideoReceiver* thermalVideoReceiver () { return _thermalVideoReceiver; }
...@@ -99,6 +121,11 @@ signals: ...@@ -99,6 +121,11 @@ signals:
void aspectRatioChanged (); void aspectRatioChanged ();
void autoStreamConfiguredChanged(); void autoStreamConfiguredChanged();
void imageFileChanged (); void imageFileChanged ();
void streamingChanged ();
void decodingChanged ();
void recordingChanged ();
void recordingStarted ();
void videoSizeChanged ();
protected slots: protected slots:
void _videoSourceChanged (); void _videoSourceChanged ();
...@@ -120,28 +147,29 @@ protected: ...@@ -120,28 +147,29 @@ protected:
bool _updateThermalVideoUri (const QString& uri); bool _updateThermalVideoUri (const QString& uri);
void _cleanupOldVideos (); void _cleanupOldVideos ();
void _restartVideo (); void _restartVideo ();
void _recordingStarted ();
void _recordingChanged ();
void _onTakeScreenshotComplete (VideoReceiver::STATUS status);
void _startReceiver (unsigned id); void _startReceiver (unsigned id);
void _stopReceiver (unsigned id); void _stopReceiver (unsigned id);
protected: protected:
QString _videoFile; QString _videoFile;
QString _imageFile; QString _imageFile;
SubtitleWriter _subtitleWriter; SubtitleWriter _subtitleWriter;
bool _isTaisync = false; bool _isTaisync = false;
VideoReceiver* _videoReceiver = nullptr; VideoReceiver* _videoReceiver = nullptr;
VideoReceiver* _thermalVideoReceiver = nullptr; QAtomicInteger<bool> _streaming = false;
bool _enableVideoRestart = false; QAtomicInteger<bool> _decoding = false;
void* _videoSink = nullptr; QAtomicInteger<bool> _recording = false;
void* _thermalVideoSink = nullptr; QAtomicInteger<quint32> _videoSize = 0;
VideoSettings* _videoSettings = nullptr; VideoReceiver* _thermalVideoReceiver = nullptr;
QString _videoUri; bool _enableVideoRestart = false;
QString _thermalVideoUri; void* _videoSink = nullptr;
QString _videoSourceID; void* _thermalVideoSink = nullptr;
bool _fullScreen = false; VideoSettings* _videoSettings = nullptr;
Vehicle* _activeVehicle = nullptr; QString _videoUri;
QString _thermalVideoUri;
QString _videoSourceID;
bool _fullScreen = false;
Vehicle* _activeVehicle = nullptr;
}; };
#endif #endif
...@@ -35,6 +35,9 @@ QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog") ...@@ -35,6 +35,9 @@ QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")
GstVideoReceiver::GstVideoReceiver(QObject* parent) GstVideoReceiver::GstVideoReceiver(QObject* parent)
: VideoReceiver(parent) : VideoReceiver(parent)
, _streaming(false)
, _decoding(false)
, _recording(false)
, _removingDecoder(false) , _removingDecoder(false)
, _removingRecorder(false) , _removingRecorder(false)
, _source(nullptr) , _source(nullptr)
...@@ -319,7 +322,7 @@ GstVideoReceiver::stop(void) ...@@ -319,7 +322,7 @@ GstVideoReceiver::stop(void)
_streaming = false; _streaming = false;
qCDebug(VideoReceiverLog) << "Streaming stopped"; qCDebug(VideoReceiverLog) << "Streaming stopped";
_dispatchSignal([this](){ _dispatchSignal([this](){
emit streamingChanged(); emit streamingChanged(_streaming);
}); });
} else { } else {
qCDebug(VideoReceiverLog) << "Streaming did not start"; qCDebug(VideoReceiverLog) << "Streaming did not start";
...@@ -533,7 +536,7 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format) ...@@ -533,7 +536,7 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
qCDebug(VideoReceiverLog) << "Recording started"; qCDebug(VideoReceiverLog) << "Recording started";
_dispatchSignal([this](){ _dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_OK); emit onStartRecordingComplete(STATUS_OK);
emit recordingChanged(); emit recordingChanged(_recording);
}); });
} }
...@@ -934,7 +937,7 @@ GstVideoReceiver::_onNewSourcePad(GstPad* pad) ...@@ -934,7 +937,7 @@ GstVideoReceiver::_onNewSourcePad(GstPad* pad)
_streaming = true; _streaming = true;
qCDebug(VideoReceiverLog) << "Streaming started"; qCDebug(VideoReceiverLog) << "Streaming started";
_dispatchSignal([this](){ _dispatchSignal([this](){
emit streamingChanged(); emit streamingChanged(_streaming);
}); });
} }
...@@ -1049,19 +1052,23 @@ GstVideoReceiver::_addVideoSink(GstPad* pad) ...@@ -1049,19 +1052,23 @@ GstVideoReceiver::_addVideoSink(GstPad* pad)
gint width, height; gint width, height;
gst_structure_get_int(s, "width", &width); gst_structure_get_int(s, "width", &width);
gst_structure_get_int(s, "height", &height); gst_structure_get_int(s, "height", &height);
_setVideoSize(QSize(width, height)); _dispatchSignal([this, width, height](){
emit videoSizeChanged(QSize(width, height));
});
} }
gst_caps_unref(caps); gst_caps_unref(caps);
caps = nullptr; caps = nullptr;
} else { } else {
_setVideoSize(QSize(0, 0)); _dispatchSignal([this](){
emit videoSizeChanged(QSize(0, 0));
});
} }
_decoding = true; _decoding = true;
qCDebug(VideoReceiverLog) << "Decoding started"; qCDebug(VideoReceiverLog) << "Decoding started";
_dispatchSignal([this](){ _dispatchSignal([this](){
emit decodingChanged(); emit decodingChanged(_decoding);
}); });
return true; return true;
...@@ -1181,7 +1188,7 @@ GstVideoReceiver::_shutdownDecodingBranch(void) ...@@ -1181,7 +1188,7 @@ GstVideoReceiver::_shutdownDecodingBranch(void)
_decoding = false; _decoding = false;
qCDebug(VideoReceiverLog) << "Decoding stopped"; qCDebug(VideoReceiverLog) << "Decoding stopped";
_dispatchSignal([this](){ _dispatchSignal([this](){
emit decodingChanged(); emit decodingChanged(_decoding);
}); });
} }
...@@ -1202,7 +1209,7 @@ GstVideoReceiver::_shutdownRecordingBranch(void) ...@@ -1202,7 +1209,7 @@ GstVideoReceiver::_shutdownRecordingBranch(void)
_recording = false; _recording = false;
qCDebug(VideoReceiverLog) << "Recording stopped"; qCDebug(VideoReceiverLog) << "Recording stopped";
_dispatchSignal([this](){ _dispatchSignal([this](){
emit recordingChanged(); emit recordingChanged(_recording);
}); });
} }
......
...@@ -102,11 +102,6 @@ protected slots: ...@@ -102,11 +102,6 @@ protected slots:
virtual void _handleEOS(void); virtual void _handleEOS(void);
protected: protected:
void _setVideoSize(const QSize& size) {
_videoSize = ((quint32)size.width() << 16) | (quint32)size.height();
emit videoSizeChanged();
}
virtual GstElement* _makeSource(const QString& uri); virtual GstElement* _makeSource(const QString& uri);
virtual GstElement* _makeDecoder(GstCaps* caps, GstElement* videoSink); virtual GstElement* _makeDecoder(GstCaps* caps, GstElement* videoSink);
virtual GstElement* _makeFileSink(const QString& videoFile, FILE_FORMAT format); virtual GstElement* _makeFileSink(const QString& videoFile, FILE_FORMAT format);
...@@ -140,6 +135,9 @@ private: ...@@ -140,6 +135,9 @@ private:
static GstPadProbeReturn _eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data); static GstPadProbeReturn _eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data); static GstPadProbeReturn _keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
bool _streaming;
bool _decoding;
bool _recording;
bool _removingDecoder; bool _removingDecoder;
bool _removingRecorder; bool _removingRecorder;
GstElement* _source; GstElement* _source;
......
...@@ -17,9 +17,6 @@ ...@@ -17,9 +17,6 @@
#include <QObject> #include <QObject>
#include <QSize> #include <QSize>
#include <QQuickItem>
#include <atomic>
class VideoReceiver : public QObject class VideoReceiver : public QObject
{ {
...@@ -28,10 +25,6 @@ class VideoReceiver : public QObject ...@@ -28,10 +25,6 @@ class VideoReceiver : public QObject
public: public:
explicit VideoReceiver(QObject* parent = nullptr) explicit VideoReceiver(QObject* parent = nullptr)
: QObject(parent) : QObject(parent)
, _streaming(false)
, _decoding(false)
, _recording(false)
, _videoSize(0)
{} {}
virtual ~VideoReceiver(void) {} virtual ~VideoReceiver(void) {}
...@@ -54,35 +47,13 @@ public: ...@@ -54,35 +47,13 @@ public:
Q_ENUM(STATUS) Q_ENUM(STATUS)
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
bool streaming(void) {
return _streaming;
}
bool decoding(void) {
return _decoding;
}
bool recording(void) {
return _recording;
}
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
}
signals: signals:
void timeout(void); void timeout(void);
void streamingChanged(void); void streamingChanged(bool active);
void decodingChanged(void); void decodingChanged(bool active);
void recordingChanged(void); void recordingChanged(bool active);
void recordingStarted(void); void recordingStarted(void);
void videoSizeChanged(void); void videoSizeChanged(QSize size);
void onStartComplete(STATUS status); void onStartComplete(STATUS status);
void onStopComplete(STATUS status); void onStopComplete(STATUS status);
...@@ -100,11 +71,4 @@ public slots: ...@@ -100,11 +71,4 @@ public slots:
virtual void startRecording(const QString& videoFile, FILE_FORMAT format) = 0; virtual void startRecording(const QString& videoFile, FILE_FORMAT format) = 0;
virtual void stopRecording(void) = 0; virtual void stopRecording(void) = 0;
virtual void takeScreenshot(const QString& imageFile) = 0; virtual void takeScreenshot(const QString& imageFile) = 0;
protected:
std::atomic<bool> _streaming;
std::atomic<bool> _decoding;
std::atomic<bool> _recording;
std::atomic<quint32>_videoSize;
}; };
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment