Commit f7be8bff authored by DoinLakeFlyer's avatar DoinLakeFlyer

Merge remote-tracking branch 'origin/TerrainProfile' into TerrainProfile

parents ecb051f3 aa8a9ac6
......@@ -7,6 +7,6 @@
[submodule "libs/OpenSSL/android_openssl"]
path = libs/OpenSSL/android_openssl
url = https://github.com/Auterion/android_openssl
[submodule "libs/gst-plugins-good"]
[submodule "libs/qmlglsink/gst-plugins-good"]
path = libs/qmlglsink/gst-plugins-good
url = https://github.com/mavlink/gst-plugins-good.git
# VideoReceiverApp
## Application
This is a simple test application developed to make VideoReceiver library development and testing easier. It can also be used as part of CI for system tests.
## Use cases and options
Application's behaviour depends on the executable name. There are two modes - QML and console. QML mode is enabled by renaming application executable to something that starts with **Q** (for example QVideoReceiverApp). In this case **video-sink** option is not available and application always tries to use **qmlglsink** for video rendering. In regular case (executable name does not start with **Q**) **autovideosink** or **fakesink** are used, depending on options.
### Available options and required arguments
```VideoReceiverApp [options] url```
for example:
```VideoReceiverApp -d --stop-decoding 30 rtsp://127.0.0.1:8554/test```
#### Options
```-h, --help``` - displays help
```-t, --timeout <seconds>``` - specifies source timeout
```-c, --connect <attempts>``` - specifies number of connection attempts
```-d, --decode``` - enables or disables video decoding and rendering
```--no-decode``` - disables video decoding and rendering if it was enabled by default
```--stop-decoding <seconds>``` - specifies amount of seconds after which decoding should be stopped
```-r, --record <file>``` - enables record video into file
```-f, --format <format>``` - specifies recording file format, where format 0 - MKV, 1 - MOV, 2 - MP4
```--stop-recording <seconds>``` - specifies amount of seconds after which recording should be stopped
```--video-sink <sink>``` - specifies which video sink to use : 0 - autovideosink, 1 - fakesink
#### Arguments
```url``` - required, specifies video URL.
Following URLs are supported:
```rtsp://<host>:<port>/mount/point``` - usual RTSP URL
```udp://<interface>:<port>``` - H.264 over RTP/UDP
```udp265://<interface>:<port>``` - H.265 over RTP/UDP
```tsusb://<interface>:<port>``` - Taisync's forwarded H.264 byte aligned NALU stream over UDP
```tcp://<host>:<port>``` - MPEG-2 TS over TCP
```mpegts://<interface>:<port>``` - MPEG-2 TS over UDP
......@@ -155,6 +155,9 @@ private:
unsigned int _fileFormat = VideoReceiver::FILE_FORMAT_MIN;
unsigned _stopRecordingAfter = 15;
bool _useFakeSink = false;
bool _streaming = false;
bool _decoding = false;
bool _recording = false;
};
void
......@@ -299,52 +302,27 @@ VideoReceiverApp::exec()
startStreaming();
QObject::connect(_receiver, &VideoReceiver::timeout, [this](){
QObject::connect(_receiver, &VideoReceiver::timeout, [](){
qCDebug(AppLog) << "Streaming timeout";
_dispatch([this](){
if (_receiver->streaming()) {
_receiver->stop();
} else {
if (--_connect > 0) {
qCDebug(AppLog) << "Restarting streaming";
_dispatch([this](){
startStreaming();
});
} else {
qCDebug(AppLog) << "Closing...";
delete _receiver;
_app.exit();
}
}
});
});
QObject::connect(_receiver, &VideoReceiver::streamingChanged, [this](){
if (_receiver->streaming()) {
QObject::connect(_receiver, &VideoReceiver::streamingChanged, [this](bool active){
_streaming = active;
if (_streaming) {
qCDebug(AppLog) << "Streaming started";
} else {
qCDebug(AppLog) << "Streaming stopped";
_dispatch([this](){
if (--_connect > 0) {
qCDebug(AppLog) << "Restarting streaming";
startStreaming();
} else {
qCDebug(AppLog) << "Closing...";
delete _receiver;
_app.exit();
}
});
}
});
QObject::connect(_receiver, &VideoReceiver::decodingChanged, [this](){
if (_receiver->decoding()) {
QObject::connect(_receiver, &VideoReceiver::decodingChanged, [this](bool active){
_decoding = active;
if (_decoding) {
qCDebug(AppLog) << "Decoding started";
} else {
qCDebug(AppLog) << "Decoding stopped";
if (_receiver->streaming()) {
if (!_receiver->recording()) {
if (_streaming) {
if (!_recording) {
_dispatch([this](){
_receiver->stop();
});
......@@ -353,13 +331,14 @@ VideoReceiverApp::exec()
}
});
QObject::connect(_receiver, &VideoReceiver::recordingChanged, [this](){
if (_receiver->recording()) {
QObject::connect(_receiver, &VideoReceiver::recordingChanged, [this](bool active){
_recording = active;
if (_recording) {
qCDebug(AppLog) << "Recording started";
} else {
qCDebug(AppLog) << "Recording stopped";
if (_receiver->streaming()) {
if (!_receiver->decoding()) {
if (_streaming) {
if (!_decoding) {
_dispatch([this](){
_receiver->stop();
});
......@@ -368,6 +347,44 @@ VideoReceiverApp::exec()
}
});
QObject::connect(_receiver, &VideoReceiver::onStartComplete, [this](VideoReceiver::STATUS status){
if (status != VideoReceiver::STATUS_OK) {
qCDebug(AppLog) << "Video receiver start failed";
_dispatch([this](){
if (--_connect > 0) {
qCDebug(AppLog) << "Restarting ...";
_dispatch([this](){
startStreaming();
});
} else {
qCDebug(AppLog) << "Closing...";
delete _receiver;
_app.exit();
}
});
} else {
qCDebug(AppLog) << "Video receiver started";
}
});
QObject::connect(_receiver, &VideoReceiver::onStopComplete, [this](VideoReceiver::STATUS ){
qCDebug(AppLog) << "Video receiver stopped";
_dispatch([this](){
if (--_connect > 0) {
qCDebug(AppLog) << "Restarting ...";
_dispatch([this](){
startStreaming();
});
} else {
qCDebug(AppLog) << "Closing...";
delete _receiver;
_app.exit();
}
});
});
return _app.exec();
}
......
......@@ -11,7 +11,7 @@ if(DEFINED ENV{QT_MKSPEC})
set(QT_MKSPEC $ENV{QT_MKSPEC})
endif()
if(UNIX AND NOT APPLE)
if(UNIX AND NOT APPLE AND NOT ANDROID)
set(LINUX TRUE)
endif()
......
......@@ -44,8 +44,8 @@ Item {
property real _labelFieldWidth: ScreenTools.defaultFontPixelWidth * 28
property real _editFieldWidth: ScreenTools.defaultFontPixelWidth * 30
property real _editFieldHeight: ScreenTools.defaultFontPixelHeight * 2
property var _videoReceiver: QGroundControl.videoManager.videoReceiver
property bool _recordingLocalVideo: _videoReceiver && _videoReceiver.recording
property var _videoManager: QGroundControl.videoManager
property bool _recordingLocalVideo: QGroundControl.videoManager.recording
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property bool _isCamera: _dynamicCameras ? _dynamicCameras.cameras.count > 0 : false
......@@ -305,15 +305,15 @@ Item {
_camera.stopVideo()
//-- Local video as well
if (_recordingVideo) {
_videoReceiver.stopRecording()
_videoManager.stopRecording()
}
} else {
if(!_fullSD) {
_camera.startVideo()
}
//-- Local video as well
if(_videoReceiver) {
_videoReceiver.startRecording()
if(_videoManager) {
_videoManager.startRecording()
}
}
} else {
......
......@@ -29,12 +29,8 @@
QGC_LOGGING_CATEGORY(CustomLog, "CustomLog")
CustomVideoReceiver::CustomVideoReceiver(QObject* parent)
: VideoReceiver(parent)
: GstVideoReceiver(parent)
{
#if defined(QGC_GST_STREAMING)
//-- Shorter RTSP test interval
_restart_time_ms = 1000;
#endif
}
CustomVideoReceiver::~CustomVideoReceiver()
......
......@@ -14,7 +14,7 @@
#include "QGCCorePlugin.h"
#include "QGCOptions.h"
#include "QGCLoggingCategory.h"
#include "VideoReceiver.h"
#include "GstVideoReceiver.h"
#include "SettingsManager.h"
#include <QTranslator>
......@@ -25,7 +25,7 @@ class CustomSettings;
Q_DECLARE_LOGGING_CATEGORY(CustomLog)
//-- Our own, custom video receiver
class CustomVideoReceiver : public VideoReceiver
class CustomVideoReceiver : public GstVideoReceiver
{
Q_OBJECT
public:
......
......@@ -20,10 +20,10 @@ CustomVideoManager::CustomVideoManager(QGCApplication* app, QGCToolbox* toolbox)
//-----------------------------------------------------------------------------
void
CustomVideoManager::_updateSettings()
CustomVideoManager::_updateSettings(unsigned id)
{
if(!_videoSettings || !_videoReceiver)
return;
VideoManager::_updateSettings();
VideoManager::_updateSettings(id);
}
......@@ -23,6 +23,6 @@ public:
CustomVideoManager (QGCApplication* app, QGCToolbox* toolbox);
protected:
void _updateSettings ();
void _updateSettings (unsigned id);
};
......@@ -8595,7 +8595,8 @@ for current-based compensation [G/kA]</short_desc>
<values>
<value code="0">Disabled</value>
<value code="1">Throttle-based compensation</value>
<value code="2">Current-based compensation</value>
<value code="2">Current-based compensation (battery_status instance 0)</value>
<value code="3">Current-based compensation (battery_status instance 1)</value>
</values>
</parameter>
<parameter category="System" default="0" name="CAL_MAG_PRIME" type="INT32">
......@@ -10348,8 +10349,7 @@ How often the sensor is readout</short_desc>
</parameter>
<parameter default="0" name="TC_A_ENABLE" type="INT32">
<short_desc>Thermal compensation for accelerometer sensors</short_desc>
<min>0</min>
<max>1</max>
<reboot_required>true</reboot_required>
</parameter>
<parameter category="System" default="0" name="TC_B0_ID" type="INT32">
<short_desc>ID of Barometer that the calibration is for</short_desc>
......@@ -10452,8 +10452,7 @@ How often the sensor is readout</short_desc>
</parameter>
<parameter default="0" name="TC_B_ENABLE" type="INT32">
<short_desc>Thermal compensation for barometric pressure sensors</short_desc>
<min>0</min>
<max>1</max>
<reboot_required>true</reboot_required>
</parameter>
<parameter category="System" default="0" name="TC_G0_ID" type="INT32">
<short_desc>ID of Gyro that the calibration is for</short_desc>
......@@ -10628,8 +10627,7 @@ How often the sensor is readout</short_desc>
</parameter>
<parameter default="0" name="TC_G_ENABLE" type="INT32">
<short_desc>Thermal compensation for rate gyro sensors</short_desc>
<min>0</min>
<max>1</max>
<reboot_required>true</reboot_required>
</parameter>
</group>
<group name="UAVCAN">
......@@ -10804,9 +10802,9 @@ tailsitter, tiltrotor: main throttle</short_desc>
<increment>0.01</increment>
</parameter>
<parameter default="5.0" name="VT_DWN_PITCH_MAX" type="FLOAT">
<short_desc>Maximum allowed down-pitch the controller is able to demand. This prevents large, negative
lift values being created when facing strong winds. The vehicle will use the pusher motor
to accelerate forward if necessary</short_desc>
<short_desc>Maximum allowed angle the vehicle is allowed to pitch down to generate forward force
when fixed-wing forward actuation is active (seeVT_FW_TRHUST_EN).
If demanded down pitch exceeds this limmit, the fixed-wing forward actuators are used instead</short_desc>
<min>0.0</min>
<max>45.0</max>
</parameter>
......@@ -10814,9 +10812,24 @@ to accelerate forward if necessary</short_desc>
<short_desc>Lock elevons in multicopter mode</short_desc>
<long_desc>If set to 1 the elevons are locked in multicopter mode</long_desc>
</parameter>
<parameter default="0.0" name="VT_FWD_THRUST_SC" type="FLOAT">
<short_desc>Fixed wing thrust scale for hover forward flight</short_desc>
<long_desc>Scale applied to fixed wing thrust being used as source for forward acceleration in multirotor mode. This technique can be used to avoid the plane having to pitch down a lot in order to move forward. Setting this value to 0 (default) will disable this strategy.</long_desc>
<parameter default="0" name="VT_FWD_THRUST_EN" type="INT32">
<short_desc>Enable/disable usage of fixed-wing actuators in hover to generate forward force (instead of pitching down).
This technique can be used to avoid the plane having to pitch down in order to move forward.
This prevents large, negative lift values being created when facing strong winds.
Fixed-wing forward actuators refers to puller/pusher (standard VTOL), or forward-tilt (tiltrotor VTOL).
Only active if demaded down pitch is above VT_DWN_PITCH_MAX, and uses VT_FWD_THRUST_SC to get from
demanded down pitch to fixed-wing actuation</short_desc>
<values>
<value code="0">Disable FW forward actuation in hover.</value>
<value code="1">Enable FW forward actuation in hover in altitude, position and auto modes (except LANDING).</value>
<value code="2">Enable FW forward actuation in hover in altitude, position and auto modes if above MPC_LAND_ALT1.</value>
<value code="3">Enable FW forward actuation in hover in altitude, position and auto modes if above MPC_LAND_ALT2.</value>
<value code="4">Enable FW forward actuation in hover in altitude, position and auto modes.</value>
</values>
</parameter>
<parameter default="0.7" name="VT_FWD_THRUST_SC" type="FLOAT">
<short_desc>Fixed-wing actuator thrust scale for hover forward flight</short_desc>
<long_desc>Scale applied to the demanded down-pitch to get the fixed-wing forward actuation in hover mode. Only active if demaded down pitch is above VT_DWN_PITCH_MAX. Enabled via VT_FWD_THRUST_EN.</long_desc>
<min>0.0</min>
<max>2.0</max>
</parameter>
......
......@@ -25,7 +25,6 @@ Item {
clip: true
property double _ar: QGroundControl.videoManager.aspectRatio
property bool _showGrid: QGroundControl.settingsManager.videoSettings.gridLines.rawValue > 0
property var _videoReceiver: QGroundControl.videoManager.videoReceiver
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property bool _connected: activeVehicle ? !activeVehicle.connectionLost : false
property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0
......@@ -40,7 +39,7 @@ Item {
id: noVideo
anchors.fill: parent
color: Qt.rgba(0,0,0,0.75)
visible: !(_videoReceiver && _videoReceiver.decoding)
visible: !(QGroundControl.videoManager.decoding)
QGCLabel {
text: QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue ? qsTr("WAITING FOR VIDEO") : qsTr("VIDEO DISABLED")
font.family: ScreenTools.demiboldFontFamily
......@@ -58,7 +57,7 @@ Item {
Rectangle {
anchors.fill: parent
color: "black"
visible: _videoReceiver && _videoReceiver.decoding
visible: QGroundControl.videoManager.decoding
function getWidth() {
//-- Fit Width or Stretch
if(_fitMode === 0 || _fitMode === 2) {
......@@ -80,10 +79,9 @@ Item {
QGCVideoBackground {
id: videoContent
objectName: "videoContent"
receiver: _videoReceiver
Connections {
target: _videoReceiver
target: QGroundControl.videoManager
onImageFileChanged: {
videoContent.grabToImage(function(result) {
if (!result.saveToFile(QGroundControl.videoManager.imageFile)) {
......@@ -130,7 +128,7 @@ Item {
height: parent.getHeight()
width: parent.getWidth()
anchors.centerIn: parent
visible: _videoReceiver && _videoReceiver.decoding
visible: QGroundControl.videoManager.decoding
sourceComponent: videoBackgroundComponent
property bool videoDisabled: QGroundControl.settingsManager.videoSettings.videoSource.rawValue === QGroundControl.settingsManager.videoSettings.disabledVideoSource
......
......@@ -38,6 +38,7 @@ MapQuickItem {
showGimbalYaw: !isNaN(missionItem.missionGimbalYaw)
highlightSelected: true
onClicked: _item.clicked()
opacity: _item.opacity
property bool _isCurrentItem: missionItem ? missionItem.isCurrentItem || missionItem.hasCurrentChildItem : false
}
......
......@@ -71,6 +71,7 @@ Rectangle {
drag.maximumX: itemDragger.parent.width - parent.width
drag.maximumY: itemDragger.parent.height - parent.height
preventStealing: true
enabled: itemDragger.visible
onClicked: {
focus = true
......
......@@ -31,9 +31,8 @@ Item {
anchors.centerIn: parent
property bool _communicationLost: activeVehicle ? activeVehicle.connectionLost : false
property var _videoReceiver: QGroundControl.videoManager.videoReceiver
property bool _recordingVideo: _videoReceiver && _videoReceiver.recording
property bool _decodingVideo: _videoReceiver && _videoReceiver.decoding
property bool _recordingVideo: QGroundControl.videoManager.recording
property bool _decodingVideo: QGroundControl.videoManager.decoding
property bool _streamingEnabled: QGroundControl.settingsManager.videoSettings.streamConfigured
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0
......@@ -70,10 +69,10 @@ Item {
onClicked: {
if(checked) {
QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue = 1
_videoReceiver.start()
QGroundControl.videoManager.startVideo()
} else {
QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue = 0
_videoReceiver.stop()
QGroundControl.videoManager.stopVideo()
}
}
}
......
......@@ -40,6 +40,8 @@ if(BUILD_TESTING)
SurveyComplexItemTest.h
TransectStyleComplexItemTest.cc
TransectStyleComplexItemTest.h
TransectStyleComplexItemTestBase.cc
TransectStyleComplexItemTestBase.h
VisualMissionItemTest.cc
VisualMissionItemTest.h
)
......
......@@ -301,7 +301,7 @@ Item {
sourceItem: SplitIndicator {
z: _zorderSplitHandle
onClicked: mapPolygon.splitPolygonSegment(mapQuickItem.vertexIndex)
onClicked: if(_root.interactive) mapPolygon.splitPolygonSegment(mapQuickItem.vertexIndex)
}
}
}
......@@ -366,7 +366,7 @@ Item {
}
}
onClicked: menu.popupVertex(polygonVertex)
onClicked: if(_root.interactive) menu.popupVertex(polygonVertex)
}
}
......@@ -574,7 +574,7 @@ Item {
z: QGroundControl.zOrderMapItems + 1 // Over item indicators
onClicked: {
if (mouse.button === Qt.LeftButton) {
if (mouse.button === Qt.LeftButton && _root.interactive) {
mapPolygon.appendVertex(mapControl.toCoordinate(Qt.point(mouse.x, mouse.y), false /* clipToViewPort */))
}
}
......@@ -596,7 +596,7 @@ Item {
height: width
radius: width / 2
color: "white"
opacity: .90
opacity: interiorOpacity * .90
}
}
}
......
......@@ -175,6 +175,7 @@ Item {
line.color: lineColor
path: mapPolyline.path
visible: _root.visible
opacity: _root.opacity
}
}
......@@ -186,6 +187,7 @@ Item {
anchorPoint.x: sourceItem.width / 2
anchorPoint.y: sourceItem.height / 2
z: _zorderSplitHandle
opacity: _root.opacity
property int vertexIndex
......@@ -205,6 +207,8 @@ Item {
property var _splitHandle
property var _vertices: mapPolyline.path
opacity: _root.opacity
function _setHandlePosition() {
var nextIndex = index + 1
var distance = _vertices[index].distanceTo(_vertices[nextIndex])
......@@ -238,6 +242,7 @@ Item {
mapControl: _root.mapControl
id: dragArea
z: _zorderDragHandle
opacity: _root.opacity
property int polylineVertex
......@@ -267,6 +272,7 @@ Item {
anchorPoint.x: dragHandle.width / 2
anchorPoint.y: dragHandle.height / 2
z: _zorderDragHandle
opacity: _root.opacity
property int polylineVertex
......@@ -292,6 +298,8 @@ Item {
delegate: Item {
property var _visuals: [ ]
opacity: _root.opacity
Component.onCompleted: {
var dragHandle = dragHandleComponent.createObject(mapControl)
dragHandle.coordinate = Qt.binding(function() { return object.coordinate })
......@@ -366,7 +374,7 @@ Item {
z: QGroundControl.zOrderMapItems + 1 // Over item indicators
onClicked: {
if (mouse.button === Qt.LeftButton) {
if (mouse.button === Qt.LeftButton && _root.interactive) {
mapPolyline.appendVertex(mapControl.toCoordinate(Qt.point(mouse.x, mouse.y), false /* clipToViewPort */))
}
}
......
......@@ -25,9 +25,10 @@ TransectStyleMapVisuals {
id: mapPolylineVisuals
mapControl: map
mapPolyline: object.corridorPolyline
interactive: _currentItem
interactive: _currentItem && parent.interactive
lineWidth: 3
lineColor: "#be781c"
visible: _currentItem
opacity: parent.opacity
}
}
......@@ -24,6 +24,7 @@ Item {
id: _root
property var map ///< Map control to place item in
property bool interactive: true
signal clicked(int sequenceNumber)
......@@ -178,6 +179,7 @@ Item {
MouseArea {
anchors.fill: map
z: QGroundControl.zOrderMapItems + 1 // Over item indicators
visible: _root.interactive
readonly property int _decimalPlaces: 8
......@@ -199,6 +201,7 @@ Item {
mapControl: _root.map
itemIndicator: _loiterPointObject
itemCoordinate: _missionItem.loiterCoordinate
visible: _root.interactive
property bool _preventReentrancy: false
......@@ -224,6 +227,7 @@ Item {
mapControl: _root.map
itemIndicator: _landingPointObject
itemCoordinate: _missionItem.landingCoordinate
visible: _root.interactive
onItemCoordinateChanged: _missionItem.moveLandingPosition(itemCoordinate)
}
......
......@@ -39,8 +39,8 @@ Item {
property int _borderWidthExclusion: 0
property color _interiorColorExclusion: "orange"
property color _interiorColorInclusion: "transparent"
property real _interiorOpacityExclusion: 0.2
property real _interiorOpacityInclusion: 1
property real _interiorOpacityExclusion: 0.2 * opacity
property real _interiorOpacityInclusion: 1 * opacity
function addPolygon(inclusionPolygon) {
// Initial polygon is inset to take 2/3rds space
......@@ -104,6 +104,7 @@ Item {
borderColor: _borderColor
interiorColor: object.inclusion ? _interiorColorInclusion : _interiorColorExclusion
interiorOpacity: object.inclusion ? _interiorOpacityInclusion : _interiorOpacityExclusion
interactive: _root.interactive && mapPolygon && mapPolygon.interactive
}
}
......@@ -118,6 +119,7 @@ Item {
borderColor: _borderColor
interiorColor: object.inclusion ? _interiorColorInclusion : _interiorColorExclusion
interiorOpacity: object.inclusion ? _interiorOpacityInclusion : _interiorOpacityExclusion
interactive: _root.interactive && mapCircle && mapCircle.interactive
}
}
......@@ -146,7 +148,7 @@ Item {
MissionItemIndicatorDrag {
mapControl: map
itemCoordinate: myGeoFenceController.breachReturnPoint
//visible: itemCoordinate.isValid
visible: _root.interactive
onItemCoordinateChanged: myGeoFenceController.breachReturnPoint = itemCoordinate
}
......@@ -162,6 +164,7 @@ Item {
anchorPoint.y: sourceItem.anchorPointY
z: QGroundControl.zOrderMapItems
coordinate: myGeoFenceController.breachReturnPoint
opacity: _root.opacity
sourceItem: MissionItemIndexLabel {
label: qsTr("B", "Breach Return Point item indicator")
......
......@@ -23,6 +23,7 @@ Item {
property var map ///< Map control to place item in
property var vehicle ///< Vehicle associated with this item
property var interactive: true ///< Vehicle associated with this item
signal clicked(int sequenceNumber)
......@@ -34,7 +35,7 @@ Item {
if (component.status === Component.Error) {
console.log("Error loading Qml: ", object.mapVisualQML, component.errorString())
}
_visualItem = component.createObject(map, { "map": _root.map, vehicle: _root.vehicle })
_visualItem = component.createObject(map, { "map": _root.map, vehicle: _root.vehicle, 'opacity': Qt.binding(function() { return _root.opacity }), 'interactive': Qt.binding(function() { return _root.interactive }) })
_visualItem.clicked.connect(_root.clicked)
}
}
......
......@@ -388,8 +388,9 @@ Item {
// This is the center rectangle of the map which is not obscured by tools
property rect centerViewport: Qt.rect(_leftToolWidth + _margin, _margin, editorMap.width - _leftToolWidth - _rightToolWidth - (_margin * 2), (terrainStatus.visible ? terrainStatus.y : height - _margin) - _margin)
property real _leftToolWidth: toolStrip.x + toolStrip.width
property real _rightToolWidth: rightPanel.width + rightPanel.anchors.rightMargin
property real _leftToolWidth: toolStrip.x + toolStrip.width
property real _rightToolWidth: rightPanel.width + rightPanel.anchors.rightMargin
property real _nonInteractiveOpacity: 0.5
// Initial map position duplicates Fly view position
Component.onCompleted: editorMap.center = QGroundControl.flightMapPosition
......@@ -436,18 +437,20 @@ Item {
// Add the mission item visuals to the map
Repeater {
model: _editingLayer == _layerMission ? _missionController.visualItems : undefined
model: _missionController.visualItems
delegate: MissionItemMapVisual {
map: editorMap
onClicked: _missionController.setCurrentPlanViewSeqNum(sequenceNumber, false)
visible: _editingLayer == _layerMission
opacity: _editingLayer == _layerMission ? 1 : editorMap._nonInteractiveOpacity
interactive: _editingLayer == _layerMission
}
}
// Add lines between waypoints
MissionLineView {
showSpecialVisual: _missionController.isROIBeginCurrentItem
model: _editingLayer == _layerMission ? _missionController.simpleFlightPathSegments : undefined
model: _missionController.simpleFlightPathSegments
opacity: _editingLayer == _layerMission ? 1 : editorMap._nonInteractiveOpacity
}
// Direction arrows in waypoint lines
......@@ -464,13 +467,14 @@ Item {
// Incomplete segment lines
MapItemView {
model: _editingLayer == _layerMission ? _missionController.incompleteComplexItemLines : undefined
model: _missionController.incompleteComplexItemLines
delegate: MapPolyline {
path: [ object.coordinate1, object.coordinate2 ]
line.width: 1
line.color: "red"
z: QGroundControl.zOrderWaypointLines
opacity: _editingLayer == _layerMission ? 1 : editorMap._nonInteractiveOpacity
}
}
......@@ -513,8 +517,7 @@ Item {
// Add the vehicles to the map
MapItemView {
model: QGroundControl.multiVehicleManager.vehicles
delegate:
VehicleMapItem {
delegate: VehicleMapItem {
vehicle: object
coordinate: object.coordinate
map: editorMap
......@@ -529,6 +532,7 @@ Item {
interactive: _editingLayer == _layerGeoFence
homePosition: _missionController.plannedHomePosition
planView: true
opacity: _editingLayer != _layerGeoFence ? editorMap._nonInteractiveOpacity : 1
}
RallyPointMapVisuals {
......@@ -536,6 +540,7 @@ Item {
myRallyPointController: _rallyPointController
interactive: _editingLayer == _layerRallyPoints
planView: true
opacity: _editingLayer != _layerRallyPoints ? editorMap._nonInteractiveOpacity : 1
}
// Airspace overlap support
......@@ -851,6 +856,7 @@ Item {
flightMap: editorMap
visible: _editingLayer == _layerGeoFence
}
// Rally Point Editor
RallyPointEditorHeader {
id: rallyPointHeader
......
......@@ -47,7 +47,7 @@ Item {
MissionItemIndicatorDrag {
mapControl: _root.map
itemCoordinate: rallyPointObject.coordinate
visible: rallyPointObject === myRallyPointController.currentRallyPoint
visible: rallyPointObject === myRallyPointController.currentRallyPoint && _root.interactive
property var rallyPointObject
......@@ -63,6 +63,7 @@ Item {
anchorPoint.x: sourceItem.anchorPointX
anchorPoint.y: sourceItem.anchorPointY
z: QGroundControl.zOrderMapItems
opacity: _root.opacity
property var rallyPointObject
......@@ -84,6 +85,7 @@ Item {
model: _rallyPoints
delegate: Item {
opacity: _root.opacity
property var _visuals: [ ]
Component.onCompleted: {
......
......@@ -24,6 +24,7 @@ Item {
property var map ///< Map control to place item in
property var vehicle ///< Vehicle associated with this item
property bool interactive: true
property var _missionItem: object
property var _itemVisual
......@@ -95,6 +96,7 @@ Item {
mapControl: _root.map
itemIndicator: _itemVisual
itemCoordinate: _missionItem.coordinate
visible: _root.interactive
onItemCoordinateChanged: _missionItem.coordinate = itemCoordinate
}
......@@ -109,7 +111,8 @@ Item {
z: QGroundControl.zOrderMapItems
missionItem: _missionItem
sequenceNumber: _missionItem.sequenceNumber
onClicked: _root.clicked(_missionItem.sequenceNumber)
onClicked: if(_root.interactive) _root.clicked(_missionItem.sequenceNumber)
opacity: _root.opacity
}
}
}
......@@ -27,6 +27,7 @@ Item {
property var _missionItem: object
property var _structurePolygon: object.structurePolygon
property var _flightPolygon: object.flightPolygon
property bool interactive: parent.interactive
signal clicked(int sequenceNumber)
......@@ -43,12 +44,12 @@ Item {
QGCMapPolygonVisuals {
mapControl: map
mapPolygon: _structurePolygon
interactive: _missionItem.isCurrentItem
interactive: _missionItem.isCurrentItem && _root.interactive
borderWidth: 1
borderColor: "black"
interiorColor: "green"
altColor: "red"
interiorOpacity: 0.25
interiorOpacity: 0.5 * _root.opacity
}
QGCMapPolygonVisuals {
......@@ -57,6 +58,7 @@ Item {
interactive: false
borderWidth: 2
borderColor: "white"
interiorOpacity: _root.opacity
}
// Entry point
......@@ -68,7 +70,7 @@ Item {
anchorPoint.y: sourceItem.anchorPointY
z: QGroundControl.zOrderMapItems
coordinate: _missionItem.coordinate
visible: _missionItem.exitCoordinate.isValid
visible: _missionItem.exitCoordinate.isValid && _root.interactive
sourceItem: MissionItemIndexLabel {
index: _missionItem.sequenceNumber
......@@ -88,7 +90,7 @@ Item {
anchorPoint.y: sourceItem.anchorPointY
z: QGroundControl.zOrderMapItems
coordinate: _missionItem.exitCoordinate
visible: _missionItem.exitCoordinate.isValid
visible: _missionItem.exitCoordinate.isValid && _root.interactive
sourceItem: MissionItemIndexLabel {
index: _missionItem.lastSequenceNumber
......
......@@ -24,6 +24,7 @@ Item {
property var map ///< Map control to place item in
property var vehicle ///< Vehicle associated with this item
property bool interactive: true
property var _missionItem: object
property var _takeoffIndicatorItem
......@@ -78,6 +79,7 @@ Item {
mapControl: _root.map
itemIndicator: _takeoffIndicatorItem
itemCoordinate: _missionItem.specifiesCoordinate ? _missionItem.coordinate : _missionItem.launchCoordinate
visible: _root.interactive
onItemCoordinateChanged: {
if (_missionItem.specifiesCoordinate) {
......@@ -96,7 +98,7 @@ Item {
mapControl: _root.map
itemIndicator: _launchIndicatorItem
itemCoordinate: _missionItem.launchCoordinate
visible: !_missionItem.launchTakeoffAtSameLocation
visible: !_missionItem.launchTakeoffAtSameLocation && _root.interactive
onItemCoordinateChanged: _missionItem.launchCoordinate = itemCoordinate
}
......@@ -111,6 +113,7 @@ Item {
missionItem: _missionItem
sequenceNumber: _missionItem.sequenceNumber
onClicked: _root.clicked(_missionItem.sequenceNumber)
opacity: _root.opacity
}
}
......@@ -121,7 +124,7 @@ Item {
coordinate: _missionItem.launchCoordinate
anchorPoint.x: sourceItem.anchorPointX
anchorPoint.y: sourceItem.anchorPointY
visible: !_missionItem.launchTakeoffAtSameLocation
visible: !_missionItem.launchTakeoffAtSameLocation && _root.interactive
sourceItem:
MissionItemIndexLabel {
......@@ -129,6 +132,7 @@ Item {
label: qsTr("Launch")
highlightSelected: true
onClicked: _root.clicked(_missionItem.sequenceNumber)
visible: _root.interactive
}
}
}
......@@ -140,7 +144,7 @@ Item {
MouseArea {
anchors.fill: map
z: QGroundControl.zOrderMapItems + 1 // Over item indicators
visible: !_missionItem.launchCoordinate.isValid
visible: !_missionItem.launchCoordinate.isValid && _root.interactive
readonly property int _decimalPlaces: 8
......
......@@ -24,6 +24,7 @@ Item {
property var map ///< Map control to place item in
property bool polygonInteractive: true
property bool interactive: true
property var _missionItem: object
property var _mapPolygon: object.surveyAreaPolygon
......@@ -70,12 +71,12 @@ Item {
id: mapPolygonVisuals
mapControl: map
mapPolygon: _mapPolygon
interactive: polygonInteractive && _missionItem.isCurrentItem
interactive: polygonInteractive && _missionItem.isCurrentItem && _root.interactive
borderWidth: 1
borderColor: "black"
interiorColor: QGroundControl.globalPalette.surveyPolygonInterior
altColor: QGroundControl.globalPalette.surveyPolygonTerrainCollision
interiorOpacity: 0.5
interiorOpacity: 0.5 * _root.opacity
}
// Full set of transects lines. Shown when item is selected.
......@@ -87,6 +88,7 @@ Item {
line.width: 2
path: _transectPoints
visible: _currentItem
opacity: _root.opacity
}
}
......@@ -99,6 +101,7 @@ Item {
line.width: 2
path: _showPartialEntryExit ? [ _transectPoints[0], _transectPoints[1] ] : []
visible: _showPartialEntryExit
opacity: _root.opacity
}
}
Component {
......@@ -109,6 +112,7 @@ Item {
line.width: 2
path: _showPartialEntryExit ? [ _transectPoints[_lastPointIndex - 1], _transectPoints[_lastPointIndex] ] : []
visible: _showPartialEntryExit
opacity: _root.opacity
}
}
......@@ -122,11 +126,12 @@ Item {
z: QGroundControl.zOrderMapItems
coordinate: _missionItem.coordinate
visible: _missionItem.exitCoordinate.isValid
opacity: _root.opacity
sourceItem: MissionItemIndexLabel {
index: _missionItem.sequenceNumber
checked: _missionItem.isCurrentItem
onClicked: _root.clicked(_missionItem.sequenceNumber)
onClicked: if(_root.interactive) _root.clicked(_missionItem.sequenceNumber)
}
}
}
......@@ -139,6 +144,7 @@ Item {
toCoord: _transectPoints[_firstTrueTransectIndex + 1]
arrowPosition: 1
visible: _currentItem
opacity: _root.opacity
}
}
......@@ -150,6 +156,7 @@ Item {
toCoord: _transectPoints[nextTrueTransectIndex + 1]
arrowPosition: 1
visible: _currentItem && _transectCount > 3
opacity: _root.opacity
property int nextTrueTransectIndex: _firstTrueTransectIndex + (_hasTurnaround ? 4 : 2)
}
......@@ -163,6 +170,7 @@ Item {
toCoord: _transectPoints[_lastTrueTransectIndex]
arrowPosition: 3
visible: _currentItem
opacity: _root.opacity
}
}
......@@ -174,6 +182,7 @@ Item {
toCoord: _transectPoints[prevTrueTransectIndex]
arrowPosition: 13
visible: _currentItem && _transectCount > 3
opacity: _root.opacity
property int prevTrueTransectIndex: _lastTrueTransectIndex - (_hasTurnaround ? 4 : 2)
}
......@@ -189,11 +198,12 @@ Item {
z: QGroundControl.zOrderMapItems
coordinate: _missionItem.exitCoordinate
visible: _missionItem.exitCoordinate.isValid
opacity: _root.opacity
sourceItem: MissionItemIndexLabel {
index: _missionItem.lastSequenceNumber
checked: _missionItem.isCurrentItem
onClicked: _root.clicked(_missionItem.sequenceNumber)
onClicked: if(_root.interactive) _root.clicked(_missionItem.sequenceNumber)
}
}
}
......
......@@ -11,7 +11,6 @@
#include <QApplication>
#include <QTimer>
#include <QQmlApplicationEngine>
#include <QElapsedTimer>
#include <QMap>
#include <QSet>
......@@ -38,6 +37,7 @@
#endif
// Work around circular header includes
class QQmlApplicationEngine;
class QGCSingleton;
class QGCToolbox;
class QGCFileDownload;
......
......@@ -11,6 +11,7 @@ import QtQuick 2.3
import QtQuick.Controls 1.2
import QtQuick.Controls.Styles 1.4
import QtQuick.Dialogs 1.2
import QtQuick.Layouts 1.12
import QGroundControl 1.0
import QGroundControl.Palette 1.0
......@@ -29,35 +30,71 @@ Item {
id: filtersDialogComponent
QGCViewDialog {
QGCFlickable {
anchors.fill: parent
anchors.fill: parent
contentHeight: categoryColumn.height
clip: true
Column {
id: categoryColumn
spacing: ScreenTools.defaultFontPixelHeight / 2
QGCButton {
text: qsTr("Clear All")
onClicked: {
var logCats = QGroundControl.loggingCategories()
for (var i=0; i<logCats.length; i++) {
QGroundControl.setCategoryLoggingOn(logCats[i], false)
}
QGroundControl.updateLoggingFilterRules()
categoryRepeater.model = undefined
categoryRepeater.model = QGroundControl.loggingCategories()
ColumnLayout {
anchors.fill: parent
RowLayout {
spacing: ScreenTools.defaultFontPixelHeight / 2
Layout.alignment: Qt.AlignVCenter
Layout.fillHeight: true
Layout.fillWidth: true
QGCLabel {
text: qsTr("Search:")
}
QGCTextField {
id: searchText
text: ""
Layout.fillWidth: true
enabled: true
}
QGCButton {
text: qsTr("Clear")
onClicked: searchText.text = ""
}
}
Repeater {
id: categoryRepeater
model: QGroundControl.loggingCategories()
QGCCheckBox {
text: modelData
checked: QGroundControl.categoryLoggingOn(modelData)
onClicked: {
QGroundControl.setCategoryLoggingOn(modelData, checked)
Row {
spacing: ScreenTools.defaultFontPixelHeight / 2
QGCButton {
text: qsTr("Clear All")
onClicked: categoryRepeater.setAllLogs(false)
}
}
Column {
id: categoryColumn
spacing: ScreenTools.defaultFontPixelHeight / 2
Repeater {
id: categoryRepeater
model: QGroundControl.loggingCategories()
function setAllLogs(value) {
var logCategories = QGroundControl.loggingCategories()
for (var category of logCategories) {
QGroundControl.setCategoryLoggingOn(category, value)
}
QGroundControl.updateLoggingFilterRules()
// Update model for repeater
categoryRepeater.model = undefined
categoryRepeater.model = QGroundControl.loggingCategories()
}
QGCCheckBox {
text: modelData
visible: searchText.text ? text.match(`(${searchText.text})`, "i") : true
checked: QGroundControl.categoryLoggingOn(modelData)
onClicked: {
QGroundControl.setCategoryLoggingOn(modelData, checked)
QGroundControl.updateLoggingFilterRules()
}
}
}
}
......
......@@ -34,6 +34,10 @@
#include "GLVideoItemStub.h"
#endif
#ifdef QGC_GST_TAISYNC_ENABLED
#include "TaisyncHandler.h"
#endif
QGC_LOGGING_CATEGORY(VideoManagerLog, "VideoManagerLog")
#if defined(QGC_GST_STREAMING)
......@@ -60,16 +64,22 @@ VideoManager::VideoManager(QGCApplication* app, QGCToolbox* toolbox)
//-----------------------------------------------------------------------------
VideoManager::~VideoManager()
{
delete _videoReceiver;
_videoReceiver = nullptr;
delete _thermalVideoReceiver;
_thermalVideoReceiver = nullptr;
for (int i = 0; i < 2; i++) {
if (_videoReceiver[i] != nullptr) {
delete _videoReceiver[i];
_videoReceiver[i] = nullptr;
}
#if defined(QGC_GST_STREAMING)
GStreamer::releaseVideoSink(_thermalVideoSink);
_thermalVideoSink = nullptr;
GStreamer::releaseVideoSink(_videoSink);
_videoSink = nullptr;
if (_videoSink[i] != nullptr) {
// FIXME: AV: we need some interaface for video sink with .release() call
// Currently VideoManager is destroyed after corePlugin() and we are crashing on app exit
// calling qgcApp()->toolbox()->corePlugin()->releaseVideoSink(_videoSink[i]);
// As for now let's call GStreamer::releaseVideoSink() directly
GStreamer::releaseVideoSink(_videoSink[i]);
_videoSink[i] = nullptr;
}
#endif
}
}
//-----------------------------------------------------------------------------
......@@ -102,21 +112,90 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
emit isGStreamerChanged();
qCDebug(VideoManagerLog) << "New Video Source:" << videoSource;
#if defined(QGC_GST_STREAMING)
_videoReceiver = toolbox->corePlugin()->createVideoReceiver(this);
_thermalVideoReceiver = toolbox->corePlugin()->createVideoReceiver(this);
_videoReceiver[0] = toolbox->corePlugin()->createVideoReceiver(this);
_videoReceiver[1] = toolbox->corePlugin()->createVideoReceiver(this);
connect(_videoReceiver[0], &VideoReceiver::streamingChanged, this, [this](bool active){
_streaming = active;
emit streamingChanged();
});
connect(_videoReceiver[0], &VideoReceiver::onStartComplete, this, [this](VideoReceiver::STATUS status) {
if (status == VideoReceiver::STATUS_OK) {
_videoStarted[0] = true;
if (_videoSink[0] != nullptr) {
// It is absolytely ok to have video receiver active (streaming) and decoding not active
// It should be handy for cases when you have many streams and want to show only some of them
// NOTE that even if decoder did not start it is still possible to record video
_videoReceiver[0]->startDecoding(_videoSink[0]);
}
} else if (status == VideoReceiver::STATUS_INVALID_URL) {
// Invalid URL - don't restart
} else if (status == VideoReceiver::STATUS_INVALID_STATE) {
// Already running
} else {
_restartVideo(0);
}
});
connect(_videoReceiver[0], &VideoReceiver::onStopComplete, this, [this](VideoReceiver::STATUS) {
_videoStarted[0] = false;
_startReceiver(0);
});
connect(_videoReceiver[0], &VideoReceiver::decodingChanged, this, [this](bool active){
_decoding = active;
emit decodingChanged();
});
connect(_videoReceiver[0], &VideoReceiver::recordingChanged, this, [this](bool active){
_recording = active;
if (!active) {
_subtitleWriter.stopCapturingTelemetry();
}
emit recordingChanged();
});
connect(_videoReceiver[0], &VideoReceiver::recordingStarted, this, [this](){
_subtitleWriter.startCapturingTelemetry(_videoFile);
});
connect(_videoReceiver[0], &VideoReceiver::videoSizeChanged, this, [this](QSize size){
_videoSize = ((quint32)size.width() << 16) | (quint32)size.height();
emit videoSizeChanged();
});
connect(_videoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
connect(_videoReceiver, &VideoReceiver::recordingStarted, this, &VideoManager::_recordingStarted);
connect(_videoReceiver, &VideoReceiver::recordingChanged, this, &VideoManager::_recordingChanged);
connect(_videoReceiver, &VideoReceiver::screenshotComplete, this, &VideoManager::_screenshotComplete);
//connect(_videoReceiver, &VideoReceiver::onTakeScreenshotComplete, this, [this](VideoReceiver::STATUS status){
// if (status == VideoReceiver::STATUS_OK) {
// }
//});
// FIXME: AV: I believe _thermalVideoReceiver should be handled just like _videoReceiver in terms of event
// and I expect that it will be changed during multiple video stream activity
connect(_thermalVideoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_thermalVideoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
if (_videoReceiver[1] != nullptr) {
connect(_videoReceiver[1], &VideoReceiver::onStartComplete, this, [this](VideoReceiver::STATUS status) {
if (status == VideoReceiver::STATUS_OK) {
_videoStarted[1] = true;
if (_videoSink[1] != nullptr) {
_videoReceiver[1]->startDecoding(_videoSink[1]);
}
} else if (status == VideoReceiver::STATUS_INVALID_URL) {
// Invalid URL - don't restart
} else if (status == VideoReceiver::STATUS_INVALID_STATE) {
// Already running
} else {
_restartVideo(1);
}
});
connect(_videoReceiver[1], &VideoReceiver::onStopComplete, this, [this](VideoReceiver::STATUS) {
_videoStarted[1] = false;
_startReceiver(1);
});
}
#endif
_updateSettings();
_updateSettings(0);
_updateSettings(1);
if(isGStreamer()) {
startVideo();
} else {
......@@ -180,23 +259,8 @@ VideoManager::startVideo()
return;
}
#if defined(QGC_GST_STREAMING)
const unsigned timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
if(_videoReceiver != nullptr) {
_videoReceiver->start(_videoUri, timeout);
if (_videoSink != nullptr) {
_videoReceiver->startDecoding(_videoSink);
}
}
if(_thermalVideoReceiver != nullptr) {
_thermalVideoReceiver->start(_thermalVideoUri, timeout);
if (_thermalVideoSink != nullptr) {
_thermalVideoReceiver->startDecoding(_thermalVideoSink);
}
}
#endif
_startReceiver(0);
_startReceiver(1);
}
//-----------------------------------------------------------------------------
......@@ -206,10 +270,9 @@ VideoManager::stopVideo()
if (qgcApp()->runningUnitTests()) {
return;
}
#if defined(QGC_GST_STREAMING)
if(_videoReceiver) _videoReceiver->stop();
if(_thermalVideoReceiver) _thermalVideoReceiver->stop();
#endif
_stopReceiver(1);
_stopReceiver(0);
}
void
......@@ -219,7 +282,7 @@ VideoManager::startRecording(const QString& videoFile)
return;
}
#if defined(QGC_GST_STREAMING)
if (!_videoReceiver) {
if (!_videoReceiver[0]) {
qgcApp()->showAppMessage(tr("Video receiver is not ready."));
return;
}
......@@ -230,22 +293,31 @@ VideoManager::startRecording(const QString& videoFile)
qgcApp()->showAppMessage(tr("Invalid video format defined."));
return;
}
QString ext = kFileExtension[fileFormat - VideoReceiver::FILE_FORMAT_MIN];
//-- Disk usage maintenance
_cleanupOldVideos();
QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
if(savePath.isEmpty()) {
if (savePath.isEmpty()) {
qgcApp()->showAppMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
return;
}
_videoFile = savePath + "/"
+ (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
+ "." + kFileExtension[fileFormat - VideoReceiver::FILE_FORMAT_MIN];
+ ".";
QString videoFile2 = _videoFile + "2." + ext;
_videoFile += ext;
if (_videoReceiver[0] && _videoStarted[0]) {
_videoReceiver[0]->startRecording(_videoFile, fileFormat);
}
if (_videoReceiver[1] && _videoStarted[1]) {
_videoReceiver[1]->startRecording(videoFile2, fileFormat);
}
_videoReceiver->startRecording(_videoFile, fileFormat);
#else
Q_UNUSED(videoFile)
#endif
......@@ -258,11 +330,12 @@ VideoManager::stopRecording()
return;
}
#if defined(QGC_GST_STREAMING)
if (!_videoReceiver) {
return;
}
_videoReceiver->stopRecording();
for (int i = 0; i < 2; i++) {
if (_videoReceiver[i]) {
_videoReceiver[i]->stopRecording();
}
}
#endif
}
......@@ -273,7 +346,7 @@ VideoManager::grabImage(const QString& imageFile)
return;
}
#if defined(QGC_GST_STREAMING)
if (!_videoReceiver) {
if (!_videoReceiver[0]) {
return;
}
......@@ -281,7 +354,7 @@ VideoManager::grabImage(const QString& imageFile)
emit imageFileChanged();
_videoReceiver->takeScreenshot(_imageFile);
_videoReceiver[0]->takeScreenshot(_imageFile);
#else
Q_UNUSED(imageFile)
#endif
......@@ -399,35 +472,35 @@ VideoManager::_videoSourceChanged()
emit hasVideoChanged();
emit isGStreamerChanged();
emit isAutoStreamChanged();
_restartVideo();
_restartVideo(0);
}
//-----------------------------------------------------------------------------
void
VideoManager::_udpPortChanged()
{
_restartVideo();
_restartVideo(0);
}
//-----------------------------------------------------------------------------
void
VideoManager::_rtspUrlChanged()
{
_restartVideo();
_restartVideo(0);
}
//-----------------------------------------------------------------------------
void
VideoManager::_tcpUrlChanged()
{
_restartVideo();
_restartVideo(0);
}
//-----------------------------------------------------------------------------
void
VideoManager::_lowLatencyModeChanged()
{
//restartVideo();
_restartAllVideos();
}
//-----------------------------------------------------------------------------
......@@ -482,6 +555,7 @@ VideoManager::setfullScreen(bool f)
emit fullScreenChanged();
}
//-----------------------------------------------------------------------------
void
VideoManager::_initVideo()
{
......@@ -495,9 +569,12 @@ VideoManager::_initVideo()
QQuickItem* widget = root->findChild<QQuickItem*>("videoContent");
if (widget != nullptr && _videoReceiver != nullptr) {
if ((_videoSink = qgcApp()->toolbox()->corePlugin()->createVideoSink(this, widget)) != nullptr) {
_videoReceiver->startDecoding(_videoSink);
if (widget != nullptr && _videoReceiver[0] != nullptr) {
_videoSink[0] = qgcApp()->toolbox()->corePlugin()->createVideoSink(this, widget);
if (_videoSink[0] != nullptr) {
if (_videoStarted[0]) {
_videoReceiver[0]->startDecoding(_videoSink[0]);
}
} else {
qCDebug(VideoManagerLog) << "createVideoSink() failed";
}
......@@ -507,9 +584,12 @@ VideoManager::_initVideo()
widget = root->findChild<QQuickItem*>("thermalVideo");
if (widget != nullptr && _thermalVideoReceiver != nullptr) {
if ((_thermalVideoSink = qgcApp()->toolbox()->corePlugin()->createVideoSink(this, widget)) != nullptr) {
_thermalVideoReceiver->startDecoding(_thermalVideoSink);
if (widget != nullptr && _videoReceiver[1] != nullptr) {
_videoSink[1] = qgcApp()->toolbox()->corePlugin()->createVideoSink(this, widget);
if (_videoSink[1] != nullptr) {
if (_videoStarted[1]) {
_videoReceiver[1]->startDecoding(_videoSink[1]);
}
} else {
qCDebug(VideoManagerLog) << "createVideoSink() failed";
}
......@@ -520,150 +600,186 @@ VideoManager::_initVideo()
}
//-----------------------------------------------------------------------------
void
VideoManager::_updateSettings()
bool
VideoManager::_updateSettings(unsigned id)
{
if(!_videoSettings)
return;
return false;
const bool lowLatencyStreaming =_videoSettings->lowLatencyMode()->rawValue().toBool();
bool settingsChanged = _lowLatencyStreaming[id] != lowLatencyStreaming;
_lowLatencyStreaming[id] = lowLatencyStreaming;
//-- Auto discovery
if(_activeVehicle && _activeVehicle->dynamicCameras()) {
QGCVideoStreamInfo* pInfo = _activeVehicle->dynamicCameras()->currentStreamInstance();
if(pInfo) {
qCDebug(VideoManagerLog) << "Configure primary stream: " << pInfo->uri();
switch(pInfo->type()) {
case VIDEO_STREAM_TYPE_RTSP:
_setVideoUri(pInfo->uri());
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceRTSP);
break;
case VIDEO_STREAM_TYPE_TCP_MPEG:
_setVideoUri(pInfo->uri());
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceTCP);
break;
case VIDEO_STREAM_TYPE_RTPUDP:
_setVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pInfo->uri()));
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceUDPH264);
break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264:
_setVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pInfo->uri()));
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceMPEGTS);
break;
default:
_setVideoUri(pInfo->uri());
break;
}
//-- Thermal stream (if any)
QGCVideoStreamInfo* pTinfo = _activeVehicle->dynamicCameras()->thermalStreamInstance();
if(pTinfo) {
qCDebug(VideoManagerLog) << "Configure secondary stream: " << pTinfo->uri();
switch(pTinfo->type()) {
if (id == 0) {
qCDebug(VideoManagerLog) << "Configure primary stream:" << pInfo->uri();
switch(pInfo->type()) {
case VIDEO_STREAM_TYPE_RTSP:
if ((settingsChanged |= _updateVideoUri(id, pInfo->uri()))) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceRTSP);
}
break;
case VIDEO_STREAM_TYPE_TCP_MPEG:
_setThermalVideoUri(pTinfo->uri());
if ((settingsChanged |= _updateVideoUri(id, pInfo->uri()))) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceTCP);
}
break;
case VIDEO_STREAM_TYPE_RTPUDP:
_setThermalVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pTinfo->uri()));
if ((settingsChanged |= _updateVideoUri(id, QStringLiteral("udp://0.0.0.0:%1").arg(pInfo->uri())))) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceUDPH264);
}
break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264:
_setThermalVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pTinfo->uri()));
if ((settingsChanged |= _updateVideoUri(id, QStringLiteral("mpegts://0.0.0.0:%1").arg(pInfo->uri())))) {
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceMPEGTS);
}
break;
default:
_setThermalVideoUri(pTinfo->uri());
settingsChanged |= _updateVideoUri(id, pInfo->uri());
break;
}
}
return;
else if (id == 1) { //-- Thermal stream (if any)
QGCVideoStreamInfo* pTinfo = _activeVehicle->dynamicCameras()->thermalStreamInstance();
if (pTinfo) {
qCDebug(VideoManagerLog) << "Configure secondary stream:" << pTinfo->uri();
switch(pTinfo->type()) {
case VIDEO_STREAM_TYPE_RTSP:
case VIDEO_STREAM_TYPE_TCP_MPEG:
settingsChanged |= _updateVideoUri(id, pTinfo->uri());
break;
case VIDEO_STREAM_TYPE_RTPUDP:
settingsChanged |= _updateVideoUri(id, QStringLiteral("udp://0.0.0.0:%1").arg(pTinfo->uri()));
break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264:
settingsChanged |= _updateVideoUri(id, QStringLiteral("mpegts://0.0.0.0:%1").arg(pTinfo->uri()));
break;
default:
settingsChanged |= _updateVideoUri(id, pTinfo->uri());
break;
}
}
}
return settingsChanged;
}
}
QString source = _videoSettings->videoSource()->rawValue().toString();
if (source == VideoSettings::videoSourceUDPH264)
_setVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
settingsChanged |= _updateVideoUri(0, QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceUDPH265)
_setVideoUri(QStringLiteral("udp265://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
settingsChanged |= _updateVideoUri(0, QStringLiteral("udp265://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceMPEGTS)
_setVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
settingsChanged |= _updateVideoUri(0, QStringLiteral("mpegts://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceRTSP)
_setVideoUri(_videoSettings->rtspUrl()->rawValue().toString());
settingsChanged |= _updateVideoUri(0, _videoSettings->rtspUrl()->rawValue().toString());
else if (source == VideoSettings::videoSourceTCP)
_setVideoUri(QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString()));
settingsChanged |= _updateVideoUri(0, QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString()));
return settingsChanged;
}
void
VideoManager::_setVideoUri(const QString& uri)
//-----------------------------------------------------------------------------
bool
VideoManager::_updateVideoUri(unsigned id, const QString& uri)
{
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
//-- Taisync on iOS or Android sends a raw h.264 stream
if (isTaisync()) {
_videoUri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
return;
if (id == 0) {
return _updateVideoUri(0, QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT));
} if (id == 1) {
// FIXME: AV: TAISYNC_VIDEO_UDP_PORT is used by video stream, thermal stream should go via its own proxy
if (!_videoUri[1].isEmpty()) {
_videoUri[1].clear();
return true;
} else {
return false;
}
}
}
#endif
_videoUri = uri;
if (uri == _videoUri[id]) {
return false;
}
_videoUri[id] = uri;
return true;
}
//-----------------------------------------------------------------------------
void
VideoManager::_setThermalVideoUri(const QString& uri)
VideoManager::_restartVideo(unsigned id)
{
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
//-- Taisync on iOS or Android sends a raw h.264 stream
if (isTaisync()) {
// FIXME: AV: TAISYNC_VIDEO_UDP_PORT is used by video stream, thermal stream should go via its own proxy
_thermalVideoUri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
if (qgcApp()->runningUnitTests()) {
return;
}
#endif
_thermalVideoUri = uri;
}
//-----------------------------------------------------------------------------
void
VideoManager::_streamingChanged()
{
#if defined(QGC_GST_STREAMING)
// FIXME: AV: we need VideoReceiver::running() to avoid restarting if one of streams is not active
// but since VideoManager is going to be relpaced by Video Model during multiple video streaming development activity
// I'll leave it as is for week or two
if ((_videoReceiver && !_videoReceiver->streaming())
/*|| (_thermalVideoReceiver && !_thermalVideoReceiver->streaming())*/) {
_restartVideo();
bool oldLowLatencyStreaming = _lowLatencyStreaming[id];
QString oldUri = _videoUri[id];
_updateSettings(id);
bool newLowLatencyStreaming = _lowLatencyStreaming[id];
QString newUri = _videoUri[id];
// FIXME: AV: use _updateSettings() result to check if settings were changed
if (oldUri == newUri && oldLowLatencyStreaming == newLowLatencyStreaming && _videoStarted[id]) {
qCDebug(VideoManagerLog) << "No sense to restart video streaming, skipped" << id;
return;
}
#endif
}
//-----------------------------------------------------------------------------
void
VideoManager::_restartVideo()
{
#if defined(QGC_GST_STREAMING)
qCDebug(VideoManagerLog) << "Restart video streaming";
stopVideo();
_updateSettings();
startVideo();
emit aspectRatioChanged();
qCDebug(VideoManagerLog) << "Restart video streaming" << id;
if (_videoStarted[id]) {
_stopReceiver(id);
} else {
_startReceiver(id);
}
#endif
}
//-----------------------------------------------------------------------------
void
VideoManager::_recordingStarted()
VideoManager::_restartAllVideos()
{
_subtitleWriter.startCapturingTelemetry(_videoFile);
_restartVideo(0);
_restartVideo(1);
}
//-----------------------------------------------------------------------------
//----------------------------------------------------------------------------------------
void
VideoManager::_recordingChanged()
VideoManager::_startReceiver(unsigned id)
{
#if defined(QGC_GST_STREAMING)
if (_videoReceiver && !_videoReceiver->recording()) {
_subtitleWriter.stopCapturingTelemetry();
const unsigned timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
if (id > 1) {
qCDebug(VideoManagerLog) << "Unsupported receiver id" << id;
} else if (_videoReceiver[id] != nullptr/* && _videoSink[id] != nullptr*/) {
if (!_videoUri[id].isEmpty()) {
_videoReceiver[id]->start(_videoUri[id], timeout, _lowLatencyStreaming[id] ? -1 : 0);
}
}
#endif
}
//----------------------------------------------------------------------------------------
void
VideoManager::_screenshotComplete()
VideoManager::_stopReceiver(unsigned id)
{
#if defined(QGC_GST_STREAMING)
if (id > 1) {
qCDebug(VideoManagerLog) << "Unsupported receiver id" << id;
} else if (_videoReceiver[id] != nullptr) {
_videoReceiver[id]->stop();
}
#endif
}
//----------------------------------------------------------------------------------------
......@@ -677,14 +793,14 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle)
if(pCamera) {
pCamera->stopStream();
}
disconnect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartVideo);
disconnect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartAllVideos);
}
}
_activeVehicle = vehicle;
if(_activeVehicle) {
connect(_activeVehicle, &Vehicle::connectionLostChanged, this, &VideoManager::_connectionLostChanged);
if(_activeVehicle->dynamicCameras()) {
connect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartVideo);
connect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartAllVideos);
QGCCameraControl* pCamera = _activeVehicle->dynamicCameras()->currentCameraInstance();
if(pCamera) {
pCamera->resumeStream();
......@@ -695,7 +811,7 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle)
setfullScreen(false);
}
emit autoStreamConfiguredChanged();
_restartVideo();
_restartAllVideos();
}
//----------------------------------------------------------------------------------------
......
......@@ -49,8 +49,12 @@ public:
Q_PROPERTY(double hfov READ hfov NOTIFY aspectRatioChanged)
Q_PROPERTY(double thermalHfov READ thermalHfov NOTIFY aspectRatioChanged)
Q_PROPERTY(bool autoStreamConfigured READ autoStreamConfigured NOTIFY autoStreamConfiguredChanged)
Q_PROPERTY(bool hasThermal READ hasThermal NOTIFY aspectRatioChanged)
Q_PROPERTY(bool hasThermal READ hasThermal NOTIFY decodingChanged)
Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged)
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
virtual bool hasVideo ();
virtual bool isGStreamer ();
......@@ -65,9 +69,27 @@ public:
virtual bool hasThermal ();
virtual QString imageFile ();
bool streaming(void) {
return _streaming;
}
virtual VideoReceiver* videoReceiver () { return _videoReceiver; }
virtual VideoReceiver* thermalVideoReceiver () { return _thermalVideoReceiver; }
bool decoding(void) {
return _decoding;
}
bool recording(void) {
return _recording;
}
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
}
// FIXME: AV: they should be removed after finishing multiple video stream support
// new arcitecture does not assume direct access to video receiver from QML side, even if it works for now
virtual VideoReceiver* videoReceiver () { return _videoReceiver[0]; }
virtual VideoReceiver* thermalVideoReceiver () { return _videoReceiver[1]; }
#if defined(QGC_DISABLE_UVC)
virtual bool uvcEnabled () { return false; }
......@@ -99,6 +121,11 @@ signals:
void aspectRatioChanged ();
void autoStreamConfiguredChanged();
void imageFileChanged ();
void streamingChanged ();
void decodingChanged ();
void recordingChanged ();
void recordingStarted ();
void videoSizeChanged ();
protected slots:
void _videoSourceChanged ();
......@@ -115,31 +142,37 @@ protected:
friend class FinishVideoInitialization;
void _initVideo ();
void _updateSettings ();
void _setVideoUri (const QString& uri);
void _setThermalVideoUri (const QString& uri);
bool _updateSettings (unsigned id);
bool _updateVideoUri (unsigned id, const QString& uri);
void _cleanupOldVideos ();
void _restartVideo ();
void _streamingChanged ();
void _recordingStarted ();
void _recordingChanged ();
void _screenshotComplete ();
void _restartAllVideos ();
void _restartVideo (unsigned id);
void _startReceiver (unsigned id);
void _stopReceiver (unsigned id);
protected:
QString _videoFile;
QString _imageFile;
SubtitleWriter _subtitleWriter;
bool _isTaisync = false;
VideoReceiver* _videoReceiver = nullptr;
VideoReceiver* _thermalVideoReceiver = nullptr;
void* _videoSink = nullptr;
void* _thermalVideoSink = nullptr;
VideoSettings* _videoSettings = nullptr;
QString _videoUri;
QString _thermalVideoUri;
QString _videoSourceID;
bool _fullScreen = false;
Vehicle* _activeVehicle = nullptr;
QString _videoFile;
QString _imageFile;
SubtitleWriter _subtitleWriter;
bool _isTaisync = false;
VideoReceiver* _videoReceiver[2] = { nullptr, nullptr };
void* _videoSink[2] = { nullptr, nullptr };
QString _videoUri[2];
// FIXME: AV: _videoStarted seems to be access from 3 different threads, from time to time
// 1) Video Receiver thread
// 2) Video Manager/main app thread
// 3) Qt rendering thread (during video sink creation process which should happen in this thread)
// It works for now but...
bool _videoStarted[2] = { false, false };
bool _lowLatencyStreaming[2] = { false, false };
QAtomicInteger<bool> _streaming = false;
QAtomicInteger<bool> _decoding = false;
QAtomicInteger<bool> _recording = false;
QAtomicInteger<quint32> _videoSize = 0;
VideoSettings* _videoSettings = nullptr;
QString _videoSourceID;
bool _fullScreen = false;
Vehicle* _activeVehicle = nullptr;
};
#endif
......@@ -105,9 +105,28 @@ static void qgcputenv(const QString& key, const QString& root, const QString& pa
}
#endif
static void
blacklist()
{
GstRegistry* reg;
if ((reg = gst_registry_get()) == nullptr) {
return;
}
GstPluginFeature* plugin;
if ((plugin = gst_registry_lookup_feature(reg, "bcmdec")) != nullptr) {
qCCritical(GStreamerLog) << "Disable bcmdec";
gst_plugin_feature_set_rank(plugin, GST_RANK_NONE);
}
}
void
GStreamer::initialize(int argc, char* argv[], int debuglevel)
{
qRegisterMetaType<VideoReceiver::STATUS>("STATUS");
#ifdef Q_OS_MAC
#ifdef QGC_INSTALL_RELEASE
QString currentDir = QCoreApplication::applicationDirPath();
......@@ -171,6 +190,8 @@ GStreamer::initialize(int argc, char* argv[], int debuglevel)
gst_ios_post_init();
#endif
blacklist();
/* the plugin must be loaded before loading the qml file to register the
* GstGLVideoItem qml item
* FIXME Add a QQmlExtensionPlugin into qmlglsink to register GstGLVideoItem
......
......@@ -35,6 +35,9 @@ QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")
GstVideoReceiver::GstVideoReceiver(QObject* parent)
: VideoReceiver(parent)
, _streaming(false)
, _decoding(false)
, _recording(false)
, _removingDecoder(false)
, _removingRecorder(false)
, _source(nullptr)
......@@ -50,47 +53,54 @@ GstVideoReceiver::GstVideoReceiver(QObject* parent)
, _resetVideoSink(true)
, _videoSinkProbeId(0)
, _udpReconnect_us(5000000)
, _signalDepth(0)
, _endOfStream(false)
{
_apiHandler.start();
_notificationHandler.start();
_slotHandler.start();
connect(&_watchdogTimer, &QTimer::timeout, this, &GstVideoReceiver::_watchdog);
_watchdogTimer.start(1000);
}
GstVideoReceiver::~GstVideoReceiver(void)
{
stop();
_notificationHandler.shutdown();
_apiHandler.shutdown();
_slotHandler.shutdown();
}
void
GstVideoReceiver::start(const QString& uri, unsigned timeout)
GstVideoReceiver::start(const QString& uri, unsigned timeout, int buffer)
{
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, uri, timeout]() {
start(uri, timeout);
if (_needDispatch()) {
QString cachedUri = uri;
_slotHandler.dispatch([this, cachedUri, timeout, buffer]() {
start(cachedUri, timeout, buffer);
});
return;
}
if(_pipeline) {
qCDebug(VideoReceiverLog) << "Already running!";
qCDebug(VideoReceiverLog) << "Already running!" << _uri;
_dispatchSignal([this](){
emit onStartComplete(STATUS_INVALID_STATE);
});
return;
}
if (uri.isEmpty()) {
qCDebug(VideoReceiverLog) << "Failed because URI is not specified";
_dispatchSignal([this](){
emit onStartComplete(STATUS_INVALID_URL);
});
return;
}
qCDebug(VideoReceiverLog) << "Starting";
_uri = uri;
_timeout = timeout;
_buffer = buffer;
qCDebug(VideoReceiverLog) << "Starting" << _uri << ", buffer" << _buffer;
_endOfStream = false;
_timeout = timeout;
bool running = false;
bool pipelineUp = false;
......@@ -152,12 +162,35 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout)
break;
}
g_signal_connect(_source, "pad-added", G_CALLBACK(_onNewPad), this);
gst_bin_add_many(GST_BIN(_pipeline), _source, _tee, decoderQueue, _decoderValve, recorderQueue, _recorderValve, nullptr);
pipelineUp = true;
GstPad* srcPad = nullptr;
GstIterator* it;
if ((it = gst_element_iterate_src_pads(_source)) != nullptr) {
GValue vpad = G_VALUE_INIT;
if (gst_iterator_next(it, &vpad) == GST_ITERATOR_OK) {
srcPad = GST_PAD(g_value_get_object(&vpad));
gst_object_ref(srcPad);
g_value_reset(&vpad);
}
gst_iterator_free(it);
it = nullptr;
}
if (srcPad != nullptr) {
_onNewSourcePad(srcPad);
gst_object_unref(srcPad);
srcPad = nullptr;
} else {
g_signal_connect(_source, "pad-added", G_CALLBACK(_onNewPad), this);
}
if(!gst_element_link_many(_tee, decoderQueue, _decoderValve, nullptr)) {
qCCritical(VideoReceiverLog) << "Unable to link decoder queue";
break;
......@@ -223,23 +256,36 @@ GstVideoReceiver::start(const QString& uri, unsigned timeout)
_source = nullptr;
}
}
_dispatchSignal([this](){
emit onStartComplete(STATUS_FAIL);
});
} else {
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
qCDebug(VideoReceiverLog) << "Started";
qCDebug(VideoReceiverLog) << "Started" << _uri;
_dispatchSignal([this](){
emit onStartComplete(STATUS_OK);
});
}
}
void
GstVideoReceiver::stop(void)
{
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this]() {
if (_needDispatch()) {
_slotHandler.dispatch([this]() {
stop();
});
return;
}
qCDebug(VideoReceiverLog) << "Stopping";
if (_uri.isEmpty()) {
qCWarning(VideoReceiverLog) << "Stop called on empty URI";
return;
}
qCDebug(VideoReceiverLog) << "Stopping" << _uri;
if (_pipeline != nullptr) {
GstBus* bus;
......@@ -247,21 +293,29 @@ GstVideoReceiver::stop(void)
if ((bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline))) != nullptr) {
gst_bus_disable_sync_message_emission(bus);
gst_element_send_event(_pipeline, gst_event_new_eos());
g_signal_handlers_disconnect_by_data(bus, this);
GstMessage* msg;
gboolean recordingValveClosed = TRUE;
if((msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR))) != nullptr) {
if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
} else if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS) {
qCDebug(VideoReceiverLog) << "End of stream received!";
}
g_object_get(_recorderValve, "drop", &recordingValveClosed, nullptr);
gst_message_unref(msg);
msg = nullptr;
} else {
qCCritical(VideoReceiverLog) << "gst_bus_timed_pop_filtered() failed";
if (recordingValveClosed == FALSE) {
gst_element_send_event(_pipeline, gst_event_new_eos());
GstMessage* msg;
if((msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_EOS|GST_MESSAGE_ERROR))) != nullptr) {
if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
qCCritical(VideoReceiverLog) << "Error stopping pipeline!";
} else if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS) {
qCDebug(VideoReceiverLog) << "End of stream received!";
}
gst_message_unref(msg);
msg = nullptr;
} else {
qCCritical(VideoReceiverLog) << "gst_bus_timed_pop_filtered() failed";
}
}
gst_object_unref(bus);
......@@ -295,35 +349,41 @@ GstVideoReceiver::stop(void)
if (_streaming) {
_streaming = false;
qCDebug(VideoReceiverLog) << "Streaming stopped";
_notificationHandler.dispatch([this](){
emit streamingChanged();
qCDebug(VideoReceiverLog) << "Streaming stopped" << _uri;
_dispatchSignal([this](){
emit streamingChanged(_streaming);
});
} else {
qCDebug(VideoReceiverLog) << "Streaming did not start" << _uri;
}
}
qCDebug(VideoReceiverLog) << "Stopped";
qCDebug(VideoReceiverLog) << "Stopped" << _uri;
_dispatchSignal([this](){
emit onStopComplete(STATUS_OK);
});
}
void
GstVideoReceiver::startDecoding(void* sink)
{
if (sink == nullptr) {
qCCritical(VideoReceiverLog) << "VideoSink is NULL";
qCCritical(VideoReceiverLog) << "VideoSink is NULL" << _uri;
return;
}
if (_apiHandler.needDispatch()) {
if (_needDispatch()) {
GstElement* videoSink = GST_ELEMENT(sink);
gst_object_ref(videoSink);
_apiHandler.dispatch([this, videoSink]() mutable {
_slotHandler.dispatch([this, videoSink]() mutable {
startDecoding(videoSink);
gst_object_unref(videoSink);
});
return;
}
qCDebug(VideoReceiverLog) << "Starting decoding";
qCDebug(VideoReceiverLog) << "Starting decoding" << _uri;
if (_pipeline == nullptr) {
if (_videoSink != nullptr) {
......@@ -335,14 +395,20 @@ GstVideoReceiver::startDecoding(void* sink)
GstElement* videoSink = GST_ELEMENT(sink);
if(_videoSink != nullptr || _decoding) {
qCDebug(VideoReceiverLog) << "Already decoding!";
qCDebug(VideoReceiverLog) << "Already decoding!" << _uri;
_dispatchSignal([this](){
emit onStartDecodingComplete(STATUS_INVALID_STATE);
});
return;
}
GstPad* pad;
if ((pad = gst_element_get_static_pad(videoSink, "sink")) == nullptr) {
qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink";
qCCritical(VideoReceiverLog) << "Unable to find sink pad of video sink" << _uri;
_dispatchSignal([this](){
emit onStartDecodingComplete(STATUS_FAIL);
});
return;
}
......@@ -359,34 +425,47 @@ GstVideoReceiver::startDecoding(void* sink)
_removingDecoder = false;
if (!_streaming) {
_dispatchSignal([this](){
emit onStartDecodingComplete(STATUS_OK);
});
return;
}
if (!_addDecoder(_decoderValve)) {
qCCritical(VideoReceiverLog) << "_addDecoder() failed";
qCCritical(VideoReceiverLog) << "_addDecoder() failed" << _uri;
_dispatchSignal([this](){
emit onStartDecodingComplete(STATUS_FAIL);
});
return;
}
g_object_set(_decoderValve, "drop", FALSE, nullptr);
qCDebug(VideoReceiverLog) << "Decoding started";
qCDebug(VideoReceiverLog) << "Decoding started" << _uri;
_dispatchSignal([this](){
emit onStartDecodingComplete(STATUS_OK);
});
}
void
GstVideoReceiver::stopDecoding(void)
{
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this]() {
if (_needDispatch()) {
_slotHandler.dispatch([this]() {
stopDecoding();
});
return;
}
qCDebug(VideoReceiverLog) << "Stopping decoding";
qCDebug(VideoReceiverLog) << "Stopping decoding" << _uri;
// exit immediately if we are not decoding
if (_pipeline == nullptr || !_decoding) {
qCDebug(VideoReceiverLog) << "Not decoding!";
qCDebug(VideoReceiverLog) << "Not decoding!" << _uri;
_dispatchSignal([this](){
emit onStopDecodingComplete(STATUS_INVALID_STATE);
});
return;
}
......@@ -394,35 +473,51 @@ GstVideoReceiver::stopDecoding(void)
_removingDecoder = true;
_unlinkBranch(_decoderValve);
bool ret = _unlinkBranch(_decoderValve);
// FIXME: AV: it is much better to emit onStopDecodingComplete() after decoding is really stopped
// (which happens later due to async design) but as for now it is also not so bad...
_dispatchSignal([this, ret](){
emit onStopDecodingComplete(ret ? STATUS_OK : STATUS_FAIL);
});
}
void
GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
{
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, videoFile, format]() {
startRecording(videoFile, format);
if (_needDispatch()) {
QString cachedVideoFile = videoFile;
_slotHandler.dispatch([this, cachedVideoFile, format]() {
startRecording(cachedVideoFile, format);
});
return;
}
qCDebug(VideoReceiverLog) << "Starting recording";
qCDebug(VideoReceiverLog) << "Starting recording" << _uri;
if (_pipeline == nullptr) {
qCDebug(VideoReceiverLog) << "Streaming is not active!";
qCDebug(VideoReceiverLog) << "Streaming is not active!" << _uri;
_dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_INVALID_STATE);
});
return;
}
if (_recording) {
qCDebug(VideoReceiverLog) << "Already recording!";
qCDebug(VideoReceiverLog) << "Already recording!" << _uri;
_dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_INVALID_STATE);
});
return;
}
qCDebug(VideoReceiverLog) << "New video file:" << videoFile;
qCDebug(VideoReceiverLog) << "New video file:" << videoFile << "" << _uri;
if ((_fileSink = _makeFileSink(videoFile, format)) == nullptr) {
qCCritical(VideoReceiverLog) << "_makeFileSink() failed";
qCCritical(VideoReceiverLog) << "_makeFileSink() failed" << _uri;
_dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_FAIL);
});
return;
}
......@@ -433,7 +528,10 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
gst_bin_add(GST_BIN(_pipeline), _fileSink);
if (!gst_element_link(_recorderValve, _fileSink)) {
qCCritical(VideoReceiverLog) << "Failed to link valve and file sink";
qCCritical(VideoReceiverLog) << "Failed to link valve and file sink" << _uri;
_dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_FAIL);
});
return;
}
......@@ -447,7 +545,10 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
GstPad* probepad;
if ((probepad = gst_element_get_static_pad(_recorderValve, "src")) == nullptr) {
qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed" << _uri;
_dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_FAIL);
});
return;
}
......@@ -458,9 +559,10 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
g_object_set(_recorderValve, "drop", FALSE, nullptr);
_recording = true;
qCDebug(VideoReceiverLog) << "Recording started";
_notificationHandler.dispatch([this](){
emit recordingChanged();
qCDebug(VideoReceiverLog) << "Recording started" << _uri;
_dispatchSignal([this](){
emit onStartRecordingComplete(STATUS_OK);
emit recordingChanged(_recording);
});
}
......@@ -468,18 +570,21 @@ GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
void
GstVideoReceiver::stopRecording(void)
{
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this]() {
if (_needDispatch()) {
_slotHandler.dispatch([this]() {
stopRecording();
});
return;
}
qCDebug(VideoReceiverLog) << "Stopping recording";
qCDebug(VideoReceiverLog) << "Stopping recording" << _uri;
// exit immediately if we are not recording
if (_pipeline == nullptr || !_recording) {
qCDebug(VideoReceiverLog) << "Not recording!";
qCDebug(VideoReceiverLog) << "Not recording!" << _uri;
_dispatchSignal([this](){
emit onStopRecordingComplete(STATUS_INVALID_STATE);
});
return;
}
......@@ -487,22 +592,29 @@ GstVideoReceiver::stopRecording(void)
_removingRecorder = true;
_unlinkBranch(_recorderValve);
bool ret = _unlinkBranch(_recorderValve);
// FIXME: AV: it is much better to emit onStopRecordingComplete() after recording is really stopped
// (which happens later due to async design) but as for now it is also not so bad...
_dispatchSignal([this, ret](){
emit onStopRecordingComplete(ret ? STATUS_OK : STATUS_FAIL);
});
}
void
GstVideoReceiver::takeScreenshot(const QString& imageFile)
{
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, imageFile]() {
takeScreenshot(imageFile);
if (_needDispatch()) {
QString cachedImageFile = imageFile;
_slotHandler.dispatch([this, cachedImageFile]() {
takeScreenshot(cachedImageFile);
});
return;
}
// FIXME: AV: record screenshot here
_notificationHandler.dispatch([this](){
emit screenshotComplete();
_dispatchSignal([this](){
emit onTakeScreenshotComplete(STATUS_NOT_IMPLEMENTED);
});
}
......@@ -515,7 +627,7 @@ const char* GstVideoReceiver::_kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN] = {
void
GstVideoReceiver::_watchdog(void)
{
_apiHandler.dispatch([this](){
_slotHandler.dispatch([this](){
if(_pipeline == nullptr) {
return;
}
......@@ -527,10 +639,11 @@ GstVideoReceiver::_watchdog(void)
}
if (now - _lastSourceFrameTime > _timeout) {
qCDebug(VideoReceiverLog) << "Stream timeout, no frames for " << now - _lastSourceFrameTime;
_notificationHandler.dispatch([this](){
qCDebug(VideoReceiverLog) << "Stream timeout, no frames for " << now - _lastSourceFrameTime << "" << _uri;
_dispatchSignal([this](){
emit timeout();
});
stop();
}
if (_decoding && !_removingDecoder) {
......@@ -539,10 +652,11 @@ GstVideoReceiver::_watchdog(void)
}
if (now - _lastVideoFrameTime > _timeout * 2) {
qCDebug(VideoReceiverLog) << "Video decoder timeout, no frames for " << now - _lastVideoFrameTime;
_notificationHandler.dispatch([this](){
qCDebug(VideoReceiverLog) << "Video decoder timeout, no frames for " << now - _lastVideoFrameTime << " " << _uri;
_dispatchSignal([this](){
emit timeout();
});
stop();
}
}
});
......@@ -552,7 +666,6 @@ void
GstVideoReceiver::_handleEOS(void)
{
if(_pipeline == nullptr) {
qCWarning(VideoReceiverLog) << "We should not be here";
return;
}
......@@ -646,6 +759,8 @@ GstVideoReceiver::_makeSource(const QString& uri)
break;
}
g_signal_connect(parser, "autoplug-query", G_CALLBACK(_filterParserCaps), nullptr);
gst_bin_add_many(GST_BIN(bin), source, parser, nullptr);
// FIXME: AV: Android does not determine MPEG2-TS via parsebin - have to explicitly state which demux to use
......@@ -672,7 +787,7 @@ GstVideoReceiver::_makeSource(const QString& uri)
gst_element_foreach_src_pad(source, _padProbe, &probeRes);
if (probeRes & 1) {
if (probeRes & 2) {
if (probeRes & 2 && _buffer >= 0) {
if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) == nullptr) {
qCCritical(VideoReceiverLog) << "gst_element_factory_make('rtpjitterbuffer') failed";
break;
......@@ -691,7 +806,7 @@ GstVideoReceiver::_makeSource(const QString& uri)
}
}
} else {
g_signal_connect(source, "pad-added", G_CALLBACK(_linkPadWithOptionalBuffer), parser);
g_signal_connect(source, "pad-added", G_CALLBACK(_linkPad), parser);
}
g_signal_connect(parser, "pad-added", G_CALLBACK(_wrapWithGhostPad), nullptr);
......@@ -847,9 +962,9 @@ GstVideoReceiver::_onNewSourcePad(GstPad* pad)
if (!_streaming) {
_streaming = true;
qCDebug(VideoReceiverLog) << "Streaming started";
_notificationHandler.dispatch([this](){
emit streamingChanged();
qCDebug(VideoReceiverLog) << "Streaming started" << _uri;
_dispatchSignal([this](){
emit streamingChanged(_streaming);
});
}
......@@ -868,7 +983,7 @@ GstVideoReceiver::_onNewSourcePad(GstPad* pad)
g_object_set(_decoderValve, "drop", FALSE, nullptr);
qCDebug(VideoReceiverLog) << "Decoding started";
qCDebug(VideoReceiverLog) << "Decoding started" << _uri;
}
void
......@@ -876,6 +991,8 @@ GstVideoReceiver::_onNewDecoderPad(GstPad* pad)
{
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-decoder-pad");
qCDebug(VideoReceiverLog) << "_onNewDecoderPad" << _uri;
if (!_addVideoSink(pad)) {
qCCritical(VideoReceiverLog) << "_addVideoSink() failed";
}
......@@ -915,11 +1032,6 @@ GstVideoReceiver::_addDecoder(GstElement* src)
gst_caps_unref(caps);
caps = nullptr;
// FIXME: AV: check if srcpad exists - if it does then no need to wait for new pad
// int probeRes = 0;
// gst_element_foreach_src_pad(source, _padProbe, &probeRes);
g_signal_connect(_decoder, "pad-added", G_CALLBACK(_onNewPad), this);
gst_bin_add(GST_BIN(_pipeline), _decoder);
gst_element_sync_state_with_parent(_decoder);
......@@ -931,6 +1043,31 @@ GstVideoReceiver::_addDecoder(GstElement* src)
return false;
}
GstPad* srcPad = nullptr;
GstIterator* it;
if ((it = gst_element_iterate_src_pads(_decoder)) != nullptr) {
GValue vpad = G_VALUE_INIT;
if (gst_iterator_next(it, &vpad) == GST_ITERATOR_OK) {
srcPad = GST_PAD(g_value_get_object(&vpad));
gst_object_ref(srcPad);
g_value_reset(&vpad);
}
gst_iterator_free(it);
it = nullptr;
}
if (srcPad != nullptr) {
_onNewDecoderPad(srcPad);
gst_object_unref(srcPad);
srcPad = nullptr;
} else {
g_signal_connect(_decoder, "pad-added", G_CALLBACK(_onNewPad), this);
}
return true;
}
......@@ -955,6 +1092,8 @@ GstVideoReceiver::_addVideoSink(GstPad* pad)
gst_element_sync_state_with_parent(_videoSink);
g_object_set(_videoSink, "sync", _buffer >= 0, NULL);
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-videosink");
if (caps != nullptr) {
......@@ -964,21 +1103,19 @@ GstVideoReceiver::_addVideoSink(GstPad* pad)
gint width, height;
gst_structure_get_int(s, "width", &width);
gst_structure_get_int(s, "height", &height);
_setVideoSize(QSize(width, height));
_dispatchSignal([this, width, height](){
emit videoSizeChanged(QSize(width, height));
});
}
gst_caps_unref(caps);
caps = nullptr;
} else {
_setVideoSize(QSize(0, 0));
_dispatchSignal([this](){
emit videoSizeChanged(QSize(0, 0));
});
}
_decoding = true;
qCDebug(VideoReceiverLog) << "Decoding started";
_notificationHandler.dispatch([this](){
emit decodingChanged();
});
return true;
}
......@@ -992,6 +1129,13 @@ void
GstVideoReceiver::_noteVideoSinkFrame(void)
{
_lastVideoFrameTime = QDateTime::currentSecsSinceEpoch();
if (!_decoding) {
_decoding = true;
qCDebug(VideoReceiverLog) << "Decoding started";
_dispatchSignal([this](){
emit decodingChanged(_decoding);
});
}
}
void
......@@ -1002,14 +1146,14 @@ GstVideoReceiver::_noteEndOfStream(void)
// -Unlink the branch from the src pad
// -Send an EOS event at the beginning of that branch
void
bool
GstVideoReceiver::_unlinkBranch(GstElement* from)
{
GstPad* src;
if ((src = gst_element_get_static_pad(from, "src")) == nullptr) {
qCCritical(VideoReceiverLog) << "gst_element_get_static_pad() failed";
return;
return false;
}
GstPad* sink;
......@@ -1018,7 +1162,7 @@ GstVideoReceiver::_unlinkBranch(GstElement* from)
gst_object_unref(src);
src = nullptr;
qCCritical(VideoReceiverLog) << "gst_pad_get_peer() failed";
return;
return false;
}
if (!gst_pad_unlink(src, sink)) {
......@@ -1027,7 +1171,7 @@ GstVideoReceiver::_unlinkBranch(GstElement* from)
gst_object_unref(sink);
sink = nullptr;
qCCritical(VideoReceiverLog) << "gst_pad_unlink() failed";
return;
return false;
}
gst_object_unref(src);
......@@ -1039,11 +1183,14 @@ GstVideoReceiver::_unlinkBranch(GstElement* from)
gst_object_unref(sink);
sink = nullptr;
if (ret) {
qCDebug(VideoReceiverLog) << "Branch EOS was sent";
} else {
if (!ret) {
qCCritical(VideoReceiverLog) << "Branch EOS was NOT sent";
return false;
}
qCDebug(VideoReceiverLog) << "Branch EOS was sent";
return true;
}
void
......@@ -1092,8 +1239,8 @@ GstVideoReceiver::_shutdownDecodingBranch(void)
if (_decoding) {
_decoding = false;
qCDebug(VideoReceiverLog) << "Decoding stopped";
_notificationHandler.dispatch([this](){
emit decodingChanged();
_dispatchSignal([this](){
emit decodingChanged(_decoding);
});
}
......@@ -1113,14 +1260,28 @@ GstVideoReceiver::_shutdownRecordingBranch(void)
if (_recording) {
_recording = false;
qCDebug(VideoReceiverLog) << "Recording stopped";
_notificationHandler.dispatch([this](){
emit recordingChanged();
_dispatchSignal([this](){
emit recordingChanged(_recording);
});
}
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording-stopped");
}
bool
GstVideoReceiver::_needDispatch(void)
{
return _slotHandler.needDispatch();
}
void
GstVideoReceiver::_dispatchSignal(std::function<void()> emitter)
{
_signalDepth += 1;
emitter();
_signalDepth -= 1;
}
gboolean
GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
{
......@@ -1142,18 +1303,20 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
}
if (error != nullptr) {
qCCritical(VideoReceiverLog) << error->message;
qCCritical(VideoReceiverLog) << "GStreamer error:" << error->message;
g_error_free(error);
error = nullptr;
}
pThis->_apiHandler.dispatch([pThis](){
pThis->_slotHandler.dispatch([pThis](){
qCDebug(VideoReceiverLog) << "Stopping because of error";
pThis->stop();
});
} while(0);
break;
case GST_MESSAGE_EOS:
pThis->_apiHandler.dispatch([pThis](){
pThis->_slotHandler.dispatch([pThis](){
qCDebug(VideoReceiverLog) << "Received EOS";
pThis->_handleEOS();
});
break;
......@@ -1174,7 +1337,8 @@ GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
}
if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
pThis->_apiHandler.dispatch([pThis](){
pThis->_slotHandler.dispatch([pThis](){
qCDebug(VideoReceiverLog) << "Received branch EOS";
pThis->_handleEOS();
});
}
......@@ -1236,57 +1400,8 @@ GstVideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer d
}
void
GstVideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
GstVideoReceiver::_linkPad(GstElement* element, GstPad* pad, gpointer data)
{
bool isRtpPad = false;
GstCaps* filter;
if ((filter = gst_caps_from_string("application/x-rtp")) != nullptr) {
GstCaps* caps = gst_pad_query_caps(pad, nullptr);
if (caps != nullptr) {
if (!gst_caps_is_any(caps) && gst_caps_can_intersect(caps, filter)) {
isRtpPad = true;
}
gst_caps_unref(caps);
caps = nullptr;
}
gst_caps_unref(filter);
filter = nullptr;
}
if (isRtpPad) {
GstElement* buffer;
if ((buffer = gst_element_factory_make("rtpjitterbuffer", nullptr)) != nullptr) {
gst_bin_add(GST_BIN(GST_ELEMENT_PARENT(element)), buffer);
gst_element_sync_state_with_parent(buffer);
GstPad* sinkpad = gst_element_get_static_pad(buffer, "sink");
if (sinkpad != nullptr) {
const GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
gst_object_unref(sinkpad);
sinkpad = nullptr;
if (ret == GST_PAD_LINK_OK) {
pad = gst_element_get_static_pad(buffer, "src");
element = buffer;
} else {
qCDebug(VideoReceiverLog) << "Partially failed - gst_pad_link()";
}
} else {
qCDebug(VideoReceiverLog) << "Partially failed - gst_element_get_static_pad()";
}
} else {
qCDebug(VideoReceiverLog) << "Partially failed - gst_element_factory_make('rtpjitterbuffer')";
}
}
gchar* name;
if ((name = gst_pad_get_name(pad)) != nullptr) {
......@@ -1331,6 +1446,58 @@ GstVideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data
return TRUE;
}
gboolean
GstVideoReceiver::_filterParserCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
Q_UNUSED(bin)
Q_UNUSED(pad)
Q_UNUSED(element)
Q_UNUSED(data)
if (GST_QUERY_TYPE(query) != GST_QUERY_CAPS) {
return FALSE;
}
GstCaps* srcCaps;
gst_query_parse_caps(query, &srcCaps);
if (srcCaps == nullptr || gst_caps_is_any(srcCaps)) {
return FALSE;
}
GstCaps* sinkCaps = nullptr;
GstCaps* filter;
if (sinkCaps == nullptr && (filter = gst_caps_from_string("video/x-h264")) != nullptr) {
if (gst_caps_can_intersect(srcCaps, filter)) {
sinkCaps = gst_caps_from_string("video/x-h264,stream-format=avc");
}
gst_caps_unref(filter);
filter = nullptr;
} else if (sinkCaps == nullptr && (filter = gst_caps_from_string("video/x-h265")) != nullptr) {
if (gst_caps_can_intersect(srcCaps, filter)) {
sinkCaps = gst_caps_from_string("video/x-h265,stream-format=hvc1");
}
gst_caps_unref(filter);
filter = nullptr;
}
if (sinkCaps == nullptr) {
return FALSE;
}
gst_query_set_caps_result(query, sinkCaps);
gst_caps_unref(sinkCaps);
sinkCaps = nullptr;
return TRUE;
}
gboolean
GstVideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
......@@ -1505,7 +1672,9 @@ GstVideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer us
qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
pThis->recordingStarted();
pThis->_dispatchSignal([pThis]() {
pThis->recordingStarted();
});
return GST_PAD_PROBE_REMOVE;
}
......@@ -89,7 +89,7 @@ public:
~GstVideoReceiver(void);
public slots:
virtual void start(const QString& uri, unsigned timeout);
virtual void start(const QString& uri, unsigned timeout, int buffer = 0);
virtual void stop(void);
virtual void startDecoding(void* sink);
virtual void stopDecoding(void);
......@@ -102,11 +102,6 @@ protected slots:
virtual void _handleEOS(void);
protected:
void _setVideoSize(const QSize& size) {
_videoSize = ((quint32)size.width() << 16) | (quint32)size.height();
emit videoSizeChanged();
}
virtual GstElement* _makeSource(const QString& uri);
virtual GstElement* _makeDecoder(GstCaps* caps, GstElement* videoSink);
virtual GstElement* _makeFileSink(const QString& videoFile, FILE_FORMAT format);
......@@ -118,16 +113,19 @@ protected:
virtual void _noteTeeFrame(void);
virtual void _noteVideoSinkFrame(void);
virtual void _noteEndOfStream(void);
virtual void _unlinkBranch(GstElement* from);
virtual bool _unlinkBranch(GstElement* from);
virtual void _shutdownDecodingBranch (void);
virtual void _shutdownRecordingBranch(void);
private:
bool _needDispatch(void);
void _dispatchSignal(std::function<void()> emitter);
static gboolean _onBusMessage(GstBus* bus, GstMessage* message, gpointer user_data);
static void _onNewPad(GstElement* element, GstPad* pad, gpointer data);
static void _wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data);
static void _linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data);
static void _linkPad(GstElement* element, GstPad* pad, gpointer data);
static gboolean _padProbe(GstElement* element, GstPad* pad, gpointer user_data);
static gboolean _filterParserCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
static gboolean _autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
static gboolean _autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
static gboolean _autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
......@@ -136,6 +134,9 @@ private:
static GstPadProbeReturn _eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
bool _streaming;
bool _decoding;
bool _recording;
bool _removingDecoder;
bool _removingRecorder;
GstElement* _source;
......@@ -157,10 +158,12 @@ private:
//-- RTSP UDP reconnect timeout
uint64_t _udpReconnect_us;
QString _uri;
unsigned _timeout;
int _buffer;
Worker _apiHandler;
Worker _notificationHandler;
Worker _slotHandler;
uint32_t _signalDepth;
bool _endOfStream;
......
......@@ -39,7 +39,7 @@ gst-launch-1.0 videotestsrc ! video/x-raw,width=640,height=480 ! videoconvert !
On the receiving end, if you want to test it from the command line, you can use something like:
```
gst-launch-1.0 udpsrc port=5600 caps='application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264' ! rtph264depay ! h264parse ! avdec_h264 ! autovideosink fps-update-interval=1000 sync=false
gst-launch-1.0 udpsrc port=5600 caps='application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264' ! rtpjitterbuffer ! rtph264depay ! h264parse ! avdec_h264 ! autovideosink fps-update-interval=1000 sync=false
```
### Additional Protocols
......
......@@ -17,9 +17,6 @@
#include <QObject>
#include <QSize>
#include <QQuickItem>
#include <atomic>
class VideoReceiver : public QObject
{
......@@ -28,10 +25,6 @@ class VideoReceiver : public QObject
public:
explicit VideoReceiver(QObject* parent = nullptr)
: QObject(parent)
, _streaming(false)
, _decoding(false)
, _recording(false)
, _videoSize(0)
{}
virtual ~VideoReceiver(void) {}
......@@ -44,50 +37,42 @@ public:
FILE_FORMAT_MAX
} FILE_FORMAT;
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
bool streaming(void) {
return _streaming;
}
bool decoding(void) {
return _decoding;
}
bool recording(void) {
return _recording;
}
typedef enum {
STATUS_OK = 0,
STATUS_FAIL,
STATUS_INVALID_STATE,
STATUS_INVALID_URL,
STATUS_NOT_IMPLEMENTED
} STATUS;
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
}
Q_ENUM(STATUS)
signals:
void timeout(void);
void streamingChanged(void);
void decodingChanged(void);
void recordingChanged(void);
void streamingChanged(bool active);
void decodingChanged(bool active);
void recordingChanged(bool active);
void recordingStarted(void);
void videoSizeChanged(void);
void screenshotComplete(void);
void videoSizeChanged(QSize size);
void onStartComplete(STATUS status);
void onStopComplete(STATUS status);
void onStartDecodingComplete(STATUS status);
void onStopDecodingComplete(STATUS status);
void onStartRecordingComplete(STATUS status);
void onStopRecordingComplete(STATUS status);
void onTakeScreenshotComplete(STATUS status);
public slots:
virtual void start(const QString& uri, unsigned timeout) = 0;
// buffer:
// -1 - disable buffer and video sync
// 0 - default buffer length
// N - buffer length, ms
virtual void start(const QString& uri, unsigned timeout, int buffer = 0) = 0;
virtual void stop(void) = 0;
virtual void startDecoding(void* sink) = 0;
virtual void stopDecoding(void) = 0;
virtual void startRecording(const QString& videoFile, FILE_FORMAT format) = 0;
virtual void stopRecording(void) = 0;
virtual void takeScreenshot(const QString& imageFile) = 0;
protected:
std::atomic<bool> _streaming;
std::atomic<bool> _decoding;
std::atomic<bool> _recording;
std::atomic<quint32>_videoSize;
};
......@@ -485,6 +485,15 @@ void* QGCCorePlugin::createVideoSink(QObject* parent, QQuickItem* widget)
#endif
}
void QGCCorePlugin::releaseVideoSink(void* sink)
{
#if defined(QGC_GST_STREAMING)
GStreamer::releaseVideoSink(sink);
#else
Q_UNUSED(sink)
#endif
}
bool QGCCorePlugin::guidedActionsControllerLogging() const
{
return GuidedActionsControllerLog().isDebugEnabled();
......
......@@ -119,6 +119,8 @@ public:
virtual VideoReceiver* createVideoReceiver(QObject* parent);
/// Allows the plugin to override the creation of VideoSink.
virtual void* createVideoSink(QObject* parent, QQuickItem* widget);
/// Allows the plugin to override the release of VideoSink.
virtual void releaseVideoSink(void* sink);
/// Allows the plugin to see all mavlink traffic to a vehicle
/// @return true: Allow vehicle to continue processing, false: Vehicle should not process message
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment