Commit bf928283 authored by Andrew Voznytsa's avatar Andrew Voznytsa

Make VideoReceiver standalone

parent bf304e55
......@@ -8,5 +8,5 @@
path = libs/OpenSSL/android_openssl
url = https://github.com/Auterion/android_openssl
[submodule "libs/gst-plugins-good"]
path = libs/gst-plugins-good
path = libs/qmlglsink/gst-plugins-good
url = https://github.com/mavlink/gst-plugins-good.git
......@@ -102,8 +102,6 @@ if (GST_FOUND)
-DQGC_GST_TAISYNC_ENABLED
-DQGC_GST_MICROHARD_ENABLED
)
include(qmlglsink.cmake)
endif()
add_definitions(
......
cmake_minimum_required(VERSION 3.10)
project(VideoReceiverApp LANGUAGES C CXX)
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug;Release;RelWithDebInfo;MinSizeRel;Coverage")
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
set(CMAKE_AUTORCC ON)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
include(FeatureSummary)
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
add_compile_options(-Wall -Wextra -Wno-address-of-packed-member)
endif()
# CMake build type
# Debug Release RelWithDebInfo MinSizeRel Coverage
if (NOT CMAKE_BUILD_TYPE)
# default to release with debug symbols
set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Build type" FORCE)
endif()
set(QGC_ROOT ${CMAKE_SOURCE_DIR}/..)
# Add folder where are supportive functions
list(APPEND CMAKE_MODULE_PATH ${QGC_ROOT}/cmake)
# Configure Qt5 to get necessary variables
include(Qt5QGCConfiguration)
message(STATUS "Build Type: ${CMAKE_BUILD_TYPE}")
message(STATUS "Qt version: ${QT_VERSION}")
message(STATUS "Qt spec: ${QT_MKSPEC}")
set(COMPANY "Auterion")
set(COPYRIGHT "Copyright (c) 2020 VideoReceiverApp. All rights reserved.")
set(IDENTIFIER "labs.auterion.VideoReceiverApp")
include(Git)
message(STATUS "VideoReceiverApp version: ${GIT_VERSION}")
#=============================================================================
# ccache
#
option(CCACHE "Use ccache if available" ON)
find_program(CCACHE_PROGRAM ccache)
if (CCACHE AND CCACHE_PROGRAM AND NOT DEFINED ENV{CCACHE_DISABLE})
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE "${CCACHE_PROGRAM}")
endif()
#=============================================================================
# Compile QML
#
option(COMPILE_QML "Pre-compile QML files using the Qt Quick compiler." FALSE)
add_feature_info(COMPILE_QML COMPILE_QML "Pre-compile QML files using the Qt Quick compiler.")
if(COMPILE_QML)
find_package(Qt5QuickCompiler)
set_package_properties(Qt5QuickCompiler PROPERTIES
DESCRIPTION "Pre-compile QML files using the Qt Quick compiler."
TYPE OPTIONAL
)
endif()
#=============================================================================
# Debug QML
#
option(DEBUG_QML "Build VideoReceiverApp with QML debugging/profiling support." FALSE)
add_feature_info(DEBUG_QML DEBUG_QML "Build VideoReceiverApp with QML debugging/profiling support.")
if(DEBUG_QML)
message(STATUS "To enable the QML debugger/profiler, run with: '-qmljsdebugger=port:1234'")
add_definitions(-DQMLJSDEBUGGER)
add_definitions(-DQT_DECLARATIVE_DEBUG)
add_definitions(-DQT_QML_DEBUG)
endif()
#=============================================================================
# GStreamer
#
find_package(PkgConfig)
pkg_check_modules(GST
gstreamer-1.0>=1.14
gstreamer-video-1.0>=1.14
gstreamer-gl-1.0>=1.14
egl
)
if (GST_FOUND)
include_directories(
${GST_INCLUDE_DIRS}
)
endif()
#=============================================================================
# Qt5
#
find_package(Qt5 ${QT_VERSION}
COMPONENTS
Bluetooth
Charts
Concurrent
Core
Location
Multimedia
Network
Positioning
Quick
QuickWidgets
OpenGL
Sql
Svg
Test
TextToSpeech
Widgets
Xml
REQUIRED
HINTS
${QT_LIBRARY_HINTS}
)
# Sets the default flags for compilation and linking.
include(CompileOptions)
include_directories(
${QGC_ROOT}/src
${CMAKE_CURRENT_BINARY_DIR}
${Qt5Location_PRIVATE_INCLUDE_DIRS}
VideoReceiver
)
add_subdirectory(${QGC_ROOT}/libs/qmlglsink qmlglsink.build)
add_subdirectory(${QGC_ROOT}/src/VideoReceiver VideoReceiver.build)
set(VIDEORECIVERAPP_SOURCES main.cpp ${QGC_ROOT}/src/QGCLoggingCategory.cc)
set(VIDEORECIVERAPP_RESOURCES qml.qrc)
if(ANDROID)
add_library(VideoReceiverApp SHARED ${VIDEORECIVERAPP_SOURCES} ${VIDEORECIVERAPP_RESOURCES})
else()
add_executable(VideoReceiverApp ${VIDEORECIVERAPP_SOURCES} ${VIDEORECIVERAPP_RESOURCES})
endif()
target_link_libraries(VideoReceiverApp
PRIVATE
VideoReceiver
Qt5::Core
Qt5::Multimedia
Qt5::OpenGL
Qt5::Quick
Qt5::QuickWidgets
)
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDisplayName</key>
<string>QQmlGlSinkTest</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>NSHumanReadableCopyright</key>
<string>Open Source Flight Systems GmbH - Internal Build</string>
<key>CFBundleIconFile</key>
<string></string>
<key>CFBundleIdentifier</key>
<string>labs.auterion.VideoReceiverApp</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>0.0.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UIFileSharingEnabled</key>
<true/>
</dict>
</plist>
<?xml version="1.0"?>
<manifest package="labs.mavlink.VideoReceiverApp" xmlns:android="http://schemas.android.com/apk/res/android" android:versionName="1" android:versionCode="100000" android:installLocation="auto">
<application android:hardwareAccelerated="true" android:name="org.qtproject.qt5.android.bindings.QtApplication" android:label="-- %%INSERT_APP_NAME%% --" android:icon="@drawable/icon">
<activity android:configChanges="orientation|uiMode|screenLayout|screenSize|smallestScreenSize|locale|fontScale|keyboard|keyboardHidden|navigation" android:name="labs.mavlink.VideoReceiverApp.QGLSinkActivity" android:label="-- %%INSERT_APP_NAME%% --" android:screenOrientation="sensorLandscape" android:launchMode="singleTask" android:keepScreenOn="true">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
<action android:name="android.hardware.usb.action.USB_DEVICE_ATTACHED"/>
<action android:name="android.hardware.usb.action.USB_DEVICE_DETACHED"/>
<action android:name="android.bluetooth.device.action.ACL_CONNECTED"/>
<action android:name="android.bluetooth.device.action.ACL_DISCONNECTED"/>
<action android:name="android.hardware.usb.action.USB_ACCESSORY_ATTACHED"/>
</intent-filter>
<!-- Rest of Standard Manifest -->
<meta-data android:name="android.app.lib_name" android:value="-- %%INSERT_APP_LIB_NAME%% --"/>
<meta-data android:name="android.app.qt_sources_resource_id" android:resource="@array/qt_sources"/>
<meta-data android:name="android.app.repository" android:value="default"/>
<meta-data android:name="android.app.qt_libs_resource_id" android:resource="@array/qt_libs"/>
<meta-data android:name="android.app.bundled_libs_resource_id" android:resource="@array/bundled_libs"/>
<!-- Deploy Qt libs as part of package -->
<meta-data android:name="android.app.bundle_local_qt_libs" android:value="-- %%BUNDLE_LOCAL_QT_LIBS%% --"/>
<meta-data android:name="android.app.bundled_in_lib_resource_id" android:resource="@array/bundled_in_lib"/>
<meta-data android:name="android.app.bundled_in_assets_resource_id" android:resource="@array/bundled_in_assets"/>
<!-- Run with local libs -->
<meta-data android:name="android.app.use_local_qt_libs" android:value="-- %%USE_LOCAL_QT_LIBS%% --"/>
<meta-data android:name="android.app.libs_prefix" android:value="/data/local/tmp/qt/"/>
<meta-data android:name="android.app.load_local_libs" android:value="-- %%INSERT_LOCAL_LIBS%% --"/>
<meta-data android:name="android.app.load_local_jars" android:value="-- %%INSERT_LOCAL_JARS%% --"/>
<meta-data android:name="android.app.static_init_classes" android:value="-- %%INSERT_INIT_CLASSES%% --"/>
<!-- Messages maps -->
<meta-data android:value="@string/ministro_not_found_msg" android:name="android.app.ministro_not_found_msg"/>
<meta-data android:value="@string/ministro_needed_msg" android:name="android.app.ministro_needed_msg"/>
<meta-data android:value="@string/fatal_error_msg" android:name="android.app.fatal_error_msg"/>
<!-- Messages maps -->
<!-- Splash screen -->
<!--
<meta-data android:name="android.app.splash_screen_drawable" android:resource="@drawable/logo"/>
-->
<!-- Splash screen -->
<!-- Background running -->
<!-- Warning: changing this value to true may cause unexpected crashes if the
application still try to draw after
"applicationStateChanged(Qt::ApplicationSuspended)"
signal is sent! -->
<meta-data android:name="android.app.background_running" android:value="false"/>
<!-- Background running -->
</activity>
</application>
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="28"/>
<!-- Needed to keep working while 'asleep' -->
<!-- The following comment will be replaced upon deployment with default permissions based on the dependencies of the application.
Remove the comment if you do not require these default permissions. -->
<!-- %%INSERT_PERMISSIONS -->
<!-- Support devices without USB host mode since there are other connection types -->
<uses-feature android:name="android.hardware.usb.host" android:required="false"/>
<!-- Support devices without Bluetooth since there are other connection types -->
<uses-feature android:name="android.hardware.bluetooth" android:required="false"/>
<!-- Support devices that don't have location services -->
<uses-feature android:name="android.hardware.location.gps" android:required="false"/>
<uses-feature android:name="android.hardware.location.network" android:required="false"/>
<uses-feature android:name="android.hardware.location" android:required="false"/>
<uses-feature android:name="android.hardware.usb.accessory"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<!-- The following comment will be replaced upon deployment with default features based on the dependencies of the application.
Remove the comment if you do not require these default features. -->
<!-- %%INSERT_FEATURES -->
</manifest>
<?xml version="1.0"?>
<manifest android:versionName="@QT_ANDROID_APP_VERSION@" package="@QT_ANDROID_APP_PACKAGE_NAME@" android:installLocation="auto" xmlns:android="http://schemas.android.com/apk/res/android" android:versionCode="@QT_ANDROID_APP_VERSION_CODE@">
<application android:label="@QT_ANDROID_APP_NAME@" android:name="org.qtproject.qt5.android.bindings.QtApplication">
<activity android:label="@QT_ANDROID_APP_NAME@" android:name="org.qtproject.qt5.android.bindings.QtActivity" android:screenOrientation="unspecified" android:configChanges="orientation|uiMode|screenLayout|screenSize|smallestScreenSize|locale|fontScale|keyboard|keyboardHidden|navigation">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
<meta-data android:name="android.app.lib_name" android:value="-- %%INSERT_APP_LIB_NAME%% --"/>
<meta-data android:name="android.app.qt_sources_resource_id" android:resource="@array/qt_sources"/>
<meta-data android:name="android.app.repository" android:value="default"/>
<meta-data android:name="android.app.qt_libs_resource_id" android:resource="@array/qt_libs"/>
<meta-data android:name="android.app.bundled_libs_resource_id" android:resource="@array/bundled_libs"/>
<!-- Deploy Qt libs as part of package -->
<meta-data android:name="android.app.bundle_local_qt_libs" android:value="-- %%BUNDLE_LOCAL_QT_LIBS%% --"/>
<meta-data android:name="android.app.bundled_in_lib_resource_id" android:resource="@array/bundled_in_lib"/>
<meta-data android:name="android.app.bundled_in_assets_resource_id" android:resource="@array/bundled_in_assets"/>
<!-- Run with local libs -->
<meta-data android:name="android.app.use_local_qt_libs" android:value="-- %%USE_LOCAL_QT_LIBS%% --"/>
<meta-data android:name="android.app.libs_prefix" android:value="/data/local/tmp/qt/"/>
<meta-data android:name="android.app.load_local_libs" android:value="-- %%INSERT_LOCAL_LIBS%% --"/>
<meta-data android:name="android.app.load_local_jars" android:value="-- %%INSERT_LOCAL_JARS%% --"/>
<meta-data android:name="android.app.static_init_classes" android:value="-- %%INSERT_INIT_CLASSES%% --"/>
<!-- Messages maps -->
<!--<meta-data android:name="android.app.ministro_not_found_msg" android:value="@string/ministro_not_found_msg"/>
<meta-data android:name="android.app.ministro_needed_msg" android:value="@string/ministro_needed_msg"/>
<meta-data android:name="android.app.fatal_error_msg" android:value="@string/fatal_error_msg"/>-->
</activity>
</application>
<supports-screens android:anyDensity="true" android:normalScreens="true" android:smallScreens="true" android:largeScreens="true"/>
<uses-sdk android:minSdkVersion="18" android:targetSdkVersion="19"/>
<uses-permission android:name="android.permission.INTERNET" />
</manifest>
buildscript {
repositories {
maven {
url "http://repo1.maven.org/maven2"
}
}
dependencies {
classpath 'com.android.tools.build:gradle:1.1.0'
}
}
allprojects {
repositories {
jcenter()
}
}
apply plugin: 'com.android.application'
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
}
android {
/*******************************************************
* The following variables:
* - androidBuildToolsVersion,
* - androidCompileSdkVersion
* - qt5AndroidDir - holds the path to qt android files
* needed to build any Qt application
* on Android.
*
* are defined in gradle.properties file. This file is
* updated by QtCreator and androiddeployqt tools.
* Changing them manually might break the compilation!
*******************************************************/
compileSdkVersion androidCompileSdkVersion.toInteger()
buildToolsVersion androidBuildToolsVersion
sourceSets {
main {
manifest.srcFile 'AndroidManifest.xml'
java.srcDirs = [qt5AndroidDir + '/src', 'src', 'java']
aidl.srcDirs = [qt5AndroidDir + '/src', 'src', 'aidl']
res.srcDirs = [qt5AndroidDir + '/res', 'res']
resources.srcDirs = ['src']
renderscript.srcDirs = ['src']
assets.srcDirs = ['assets']
jniLibs.srcDirs = ['libs']
}
}
aaptOptions {
cruncherEnabled = false
}
lintOptions {
abortOnError false
}
}
#Wed Apr 10 15:27:10 PDT 2013
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-all.zip
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >&-
APP_HOME="`pwd -P`"
cd "$SAVED" >&-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
<?xml version='1.0' encoding='utf-8'?>
<resources>
<array name="qt_sources">
<item>https://download.qt-project.org/ministro/android/qt5/qt-5.4</item>
</array>
<!-- The following is handled automatically by the deployment tool. It should
not be edited manually. -->
<array name="bundled_libs">
<!-- %%INSERT_EXTRA_LIBS%% -->
</array>
<array name="qt_libs">
<!-- %%INSERT_QT_LIBS%% -->
</array>
<array name="bundled_in_lib">
<!-- %%INSERT_BUNDLED_IN_LIB%% -->
</array>
<array name="bundled_in_assets">
<!-- %%INSERT_BUNDLED_IN_ASSETS%% -->
</array>
</resources>
package labs.mavlink.VideoReceiverApp;
/* Copyright 2013 Google Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* Project home page: http://code.google.com/p/usb-serial-for-android/
*/
///////////////////////////////////////////////////////////////////////////////////////////
// Written by: Mike Goza April 2014
//
// These routines interface with the Android USB Host devices for serial port communication.
// The code uses the usb-serial-for-android software library. The QGCActivity class is the
// interface to the C++ routines through jni calls. Do not change the functions without also
// changing the corresponding calls in the C++ routines or you will break the interface.
//
////////////////////////////////////////////////////////////////////////////////////////////
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.Timer;
import java.util.TimerTask;
import java.io.IOException;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.hardware.usb.UsbAccessory;
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbManager;
import android.widget.Toast;
import android.util.Log;
import android.os.PowerManager;
import android.os.Bundle;
import android.app.PendingIntent;
import android.view.WindowManager;
import android.os.Bundle;
import android.bluetooth.BluetoothDevice;
import org.qtproject.qt5.android.bindings.QtActivity;
import org.qtproject.qt5.android.bindings.QtApplication;
public class QGLSinkActivity extends QtActivity
{
public native void nativeInit();
// QGLSinkActivity singleton
public QGLSinkActivity() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
nativeInit();
}
@Override
public void onResume() {
super.onResume();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
public void onInit(int status) {
}
public void jniOnLoad() {
nativeInit();
}
}
/*
* Copyright (C) 2012, Collabora Ltd.
* Author: Youness Alaoui
*
* Copyright (C) 2015, Collabora Ltd.
* Author: Justin Kim <justin.kim@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
package org.freedesktop.gstreamer.androidmedia;
import android.hardware.Camera;
public class GstAhcCallback implements Camera.PreviewCallback,
Camera.ErrorCallback,
Camera.AutoFocusCallback {
public long mUserData;
public long mCallback;
public static native void gst_ah_camera_on_preview_frame(byte[] data, Camera camera,
long callback, long user_data);
public static native void gst_ah_camera_on_error(int error, Camera camera,
long callback, long user_data);
public static native void gst_ah_camera_on_auto_focus(boolean success, Camera camera,
long callback, long user_data);
public GstAhcCallback(long callback, long user_data) {
mCallback = callback;
mUserData = user_data;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
gst_ah_camera_on_preview_frame(data, camera, mCallback, mUserData);
}
@Override
public void onError(int error, Camera camera) {
gst_ah_camera_on_error(error, camera, mCallback, mUserData);
}
@Override
public void onAutoFocus(boolean success, Camera camera) {
gst_ah_camera_on_auto_focus(success, camera, mCallback, mUserData);
}
}
/*
* Copyright (C) 2016 SurroundIO
* Author: Martin Kelly <martin@surround.io>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
package org.freedesktop.gstreamer.androidmedia;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
public class GstAhsCallback implements SensorEventListener {
public long mUserData;
public long mSensorCallback;
public long mAccuracyCallback;
public static native void gst_ah_sensor_on_sensor_changed(SensorEvent event,
long callback, long user_data);
public static native void gst_ah_sensor_on_accuracy_changed(Sensor sensor, int accuracy,
long callback, long user_data);
public GstAhsCallback(long sensor_callback,
long accuracy_callback, long user_data) {
mSensorCallback = sensor_callback;
mAccuracyCallback = accuracy_callback;
mUserData = user_data;
}
@Override
public void onSensorChanged(SensorEvent event) {
gst_ah_sensor_on_sensor_changed(event, mSensorCallback, mUserData);
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
gst_ah_sensor_on_accuracy_changed(sensor, accuracy,
mAccuracyCallback, mUserData);
}
}
/*
* Copyright (C) 2015, Collabora Ltd.
* Author: Matthieu Bouron <matthieu.bouron@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
package org.freedesktop.gstreamer.androidmedia;
import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
public class GstAmcOnFrameAvailableListener implements OnFrameAvailableListener
{
private long context = 0;
public synchronized void onFrameAvailable (SurfaceTexture surfaceTexture) {
native_onFrameAvailable(context, surfaceTexture);
}
public synchronized long getContext () {
return context;
}
public synchronized void setContext (long c) {
context = c;
}
private native void native_onFrameAvailable (long context, SurfaceTexture surfaceTexture);
}
#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include <QQuickWindow>
#include <QQuickItem>
#include <QRunnable>
#include <QCommandLineParser>
#include <QTimer>
#include <gst/gst.h>
#include "QGCLoggingCategory.h"
QGC_LOGGING_CATEGORY(AppLog, "VideoReceiverApp")
#if defined(__android__)
#include <QtAndroidExtras>
#include <jni.h>
#include <android/log.h>
static jobject _class_loader = nullptr;
static jobject _context = nullptr;
extern "C" {
void gst_amc_jni_set_java_vm(JavaVM *java_vm);
jobject gst_android_get_application_class_loader(void) {
return _class_loader;
}
}
static void
gst_android_init(JNIEnv* env, jobject context)
{
jobject class_loader = nullptr;
jclass context_cls = env->GetObjectClass(context);
if (!context_cls) {
return;
}
jmethodID get_class_loader_id = env->GetMethodID(context_cls, "getClassLoader", "()Ljava/lang/ClassLoader;");
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
return;
}
class_loader = env->CallObjectMethod(context, get_class_loader_id);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
return;
}
_context = env->NewGlobalRef(context);
_class_loader = env->NewGlobalRef(class_loader);
}
static const char kJniClassName[] {"labs/mavlink/VideoReceiverApp/QGLSinkActivity"};
static void setNativeMethods(void)
{
JNINativeMethod javaMethods[] {
{"nativeInit", "()V", reinterpret_cast<void *>(gst_android_init)}
};
QAndroidJniEnvironment jniEnv;
if (jniEnv->ExceptionCheck()) {
jniEnv->ExceptionDescribe();
jniEnv->ExceptionClear();
}
jclass objectClass = jniEnv->FindClass(kJniClassName);
if (!objectClass) {
qWarning() << "Couldn't find class:" << kJniClassName;
return;
}
jint val = jniEnv->RegisterNatives(objectClass, javaMethods, sizeof(javaMethods) / sizeof(javaMethods[0]));
if (val < 0) {
qWarning() << "Error registering methods: " << val;
} else {
qDebug() << "Main Native Functions Registered";
}
if (jniEnv->ExceptionCheck()) {
jniEnv->ExceptionDescribe();
jniEnv->ExceptionClear();
}
}
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
Q_UNUSED(reserved);
JNIEnv* env;
if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
return -1;
}
setNativeMethods();
gst_amc_jni_set_java_vm(vm);
return JNI_VERSION_1_6;
}
#endif
#include <GStreamer.h>
#include <VideoReceiver.h>
class VideoReceiverApp : public QRunnable
{
public:
VideoReceiverApp(QCoreApplication& app, bool qmlAllowed)
: _app(app)
, _qmlAllowed(qmlAllowed)
{}
void run();
int exec();
void startStreaming();
void startDecoding();
void startRecording();
protected:
void _dispatch(std::function<void()> code);
private:
QCoreApplication& _app;
bool _qmlAllowed;
VideoReceiver* _receiver = nullptr;
QQuickWindow* _window = nullptr;
QQuickItem* _widget = nullptr;
void* _videoSink = nullptr;
QString _url;
unsigned _timeout = 5;
unsigned _connect = 1;
bool _decode = true;
unsigned _stopDecodingAfter = 0;
bool _record = false;
QString _videoFile;
unsigned int _fileFormat = VideoReceiver::FILE_FORMAT_MIN;
unsigned _stopRecordingAfter = 15;
bool _useFakeSink = false;
};
void
VideoReceiverApp::run()
{
if((_videoSink = GStreamer::createVideoSink(nullptr, _widget)) == nullptr) {
qCDebug(AppLog) << "createVideoSink failed";
return;
}
_receiver->startDecoding(_videoSink);
}
int
VideoReceiverApp::exec()
{
QCommandLineParser parser;
parser.addHelpOption();
parser.addPositionalArgument("url",
QCoreApplication::translate("main", "Source URL."));
QCommandLineOption timeoutOption(QStringList() << "t" << "timeout",
QCoreApplication::translate("main", "Source timeout."),
QCoreApplication::translate("main", "seconds"));
parser.addOption(timeoutOption);
QCommandLineOption connectOption(QStringList() << "c" << "connect",
QCoreApplication::translate("main", "Number of connection attempts."),
QCoreApplication::translate("main", "attempts"));
parser.addOption(connectOption);
QCommandLineOption decodeOption(QStringList() << "d" << "decode",
QCoreApplication::translate("main", "Decode and render video."));
parser.addOption(decodeOption);
QCommandLineOption noDecodeOption("no-decode",
QCoreApplication::translate("main", "Don't decode and render video."));
parser.addOption(noDecodeOption);
QCommandLineOption stopDecodingOption("stop-decoding",
QCoreApplication::translate("main", "Stop decoding after time."),
QCoreApplication::translate("main", "seconds"));
parser.addOption(stopDecodingOption);
QCommandLineOption recordOption(QStringList() << "r" << "record",
QCoreApplication::translate("main", "Record video."),
QGuiApplication::translate("main", "file"));
parser.addOption(recordOption);
QCommandLineOption formatOption(QStringList() << "f" << "format",
QCoreApplication::translate("main", "File format."),
QCoreApplication::translate("main", "format"));
parser.addOption(formatOption);
QCommandLineOption stopRecordingOption("stop-recording",
QCoreApplication::translate("main", "Stop recording after time."),
QCoreApplication::translate("main", "seconds"));
parser.addOption(stopRecordingOption);
QCommandLineOption videoSinkOption("video-sink",
QCoreApplication::translate("main", "Use video sink: 0 - autovideosink, 1 - fakesink"),
QCoreApplication::translate("main", "sink"));
if (!_qmlAllowed) {
parser.addOption(videoSinkOption);
}
parser.process(_app);
const QStringList args = parser.positionalArguments();
if (args.size() != 1) {
parser.showHelp(0);
}
_url = args.at(0);
if (parser.isSet(timeoutOption)) {
_timeout = parser.value(timeoutOption).toUInt();
}
if (parser.isSet(connectOption)) {
_connect = parser.value(connectOption).toUInt();
}
if (parser.isSet(decodeOption) && parser.isSet(noDecodeOption)) {
parser.showHelp(0);
}
if (parser.isSet(decodeOption)) {
_decode = true;
}
if (parser.isSet(noDecodeOption)) {
_decode = false;
}
if (_decode && parser.isSet(stopDecodingOption)) {
_stopDecodingAfter = parser.value(stopDecodingOption).toUInt();
}
if (parser.isSet(recordOption)) {
_record = true;
_videoFile = parser.value(recordOption);
}
if (parser.isSet(formatOption)) {
_fileFormat += parser.value(formatOption).toUInt();
}
if (_record && parser.isSet(stopRecordingOption)) {
_stopRecordingAfter = parser.value(stopRecordingOption).toUInt();
}
if (parser.isSet(videoSinkOption)) {
_useFakeSink = parser.value(videoSinkOption).toUInt() > 0;
}
_receiver = GStreamer::createVideoReceiver(nullptr);
QQmlApplicationEngine engine;
if (_decode && _qmlAllowed) {
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
_window = static_cast<QQuickWindow*>(engine.rootObjects().first());
Q_ASSERT(_window != nullptr);
_widget = _window->findChild<QQuickItem*>("videoItem");
Q_ASSERT(_widget != nullptr);
}
startStreaming();
QObject::connect(_receiver, &VideoReceiver::timeout, [this](){
qCDebug(AppLog) << "Streaming timeout";
_dispatch([this](){
if (_receiver->streaming()) {
_receiver->stop();
} else {
if (--_connect > 0) {
qCDebug(AppLog) << "Restarting streaming";
_dispatch([this](){
startStreaming();
});
} else {
qCDebug(AppLog) << "Closing...";
delete _receiver;
_app.exit();
}
}
});
});
QObject::connect(_receiver, &VideoReceiver::streamingChanged, [this](){
if (_receiver->streaming()) {
qCDebug(AppLog) << "Streaming started";
} else {
qCDebug(AppLog) << "Streaming stopped";
_dispatch([this](){
if (--_connect > 0) {
qCDebug(AppLog) << "Restarting streaming";
startStreaming();
} else {
qCDebug(AppLog) << "Closing...";
delete _receiver;
_app.exit();
}
});
}
});
QObject::connect(_receiver, &VideoReceiver::decodingChanged, [this](){
if (_receiver->decoding()) {
qCDebug(AppLog) << "Decoding started";
} else {
qCDebug(AppLog) << "Decoding stopped";
if (_receiver->streaming()) {
if (!_receiver->recording()) {
_dispatch([this](){
_receiver->stop();
});
}
}
}
});
QObject::connect(_receiver, &VideoReceiver::recordingChanged, [this](){
if (_receiver->recording()) {
qCDebug(AppLog) << "Recording started";
} else {
qCDebug(AppLog) << "Recording stopped";
if (_receiver->streaming()) {
if (!_receiver->decoding()) {
_dispatch([this](){
_receiver->stop();
});
}
}
}
});
return _app.exec();
}
void
VideoReceiverApp::startStreaming()
{
_receiver->start(_url, _timeout);
if (_decode) {
startDecoding();
}
if (_record) {
startRecording();
}
}
void
VideoReceiverApp::startDecoding()
{
if (_qmlAllowed) {
_window->scheduleRenderJob(this, QQuickWindow::BeforeSynchronizingStage);
} else {
if (_videoSink == nullptr) {
if ((_videoSink = gst_element_factory_make(_useFakeSink ? "fakesink" : "autovideosink", nullptr)) == nullptr) {
qCDebug(AppLog) << "Failed to create video sink";
return;
}
}
_receiver->startDecoding(_videoSink);
}
if (_stopDecodingAfter > 0) {
unsigned connect = _connect;
QTimer::singleShot(_stopDecodingAfter * 1000, Qt::PreciseTimer, [this, connect](){
if (connect != _connect) {
return;
}
_receiver->stopDecoding();
});
}
}
void
VideoReceiverApp::startRecording()
{
_receiver->startRecording(_videoFile, static_cast<VideoReceiver::FILE_FORMAT>(_fileFormat));
if (_stopRecordingAfter > 0) {
unsigned connect = _connect;
QTimer::singleShot(_stopRecordingAfter * 1000, [this, connect](){
if (connect != _connect) {
return;
}
_receiver->stopRecording();
});
}
}
void
VideoReceiverApp::_dispatch(std::function<void()> code)
{
QTimer* timer = new QTimer();
timer->moveToThread(qApp->thread());
timer->setSingleShot(true);
QObject::connect(timer, &QTimer::timeout, [=](){
code();
timer->deleteLater();
});
QMetaObject::invokeMethod(timer, "start", Qt::QueuedConnection, Q_ARG(int, 0));
}
static bool isQtApp(const char* app)
{
const char* s;
#if defined(Q_OS_WIN)
if ((s = strrchr(app, '\\')) != nullptr) {
#else
if ((s = strrchr(app, '/')) != nullptr) {
#endif
s += 1;
} else {
s = app;
}
return s[0] == 'Q' || s[0] == 'q';
}
int main(int argc, char *argv[])
{
if (argc < 1) {
return 0;
}
GStreamer::initialize(argc, argv, 3);
if (isQtApp(argv[0])) {
QGuiApplication app(argc, argv);
VideoReceiverApp videoApp(app, true);
return videoApp.exec();
} else {
QCoreApplication app(argc, argv);
VideoReceiverApp videoApp(app, false);
return videoApp.exec();
}
}
import QtQuick 2.12
import QtQuick.Window 2.12
import QtQuick.Layouts 1.3
import org.freedesktop.gstreamer.GLVideoItem 1.0
Window {
visible: true
width: 640
height: 480
title: qsTr("VideoReceiverApp")
RowLayout {
anchors.fill: parent
spacing: 0
GstGLVideoItem {
id: video
objectName: "videoItem"
Layout.fillWidth: true
Layout.fillHeight: true
}
}
}
<RCC>
<qresource prefix="/">
<file>main.qml</file>
</qresource>
</RCC>
add_subdirectory(qtandroidserialport)
add_subdirectory(shapelib)
if (GST_FOUND)
add_subdirectory(qmlglsink)
endif()
find_package(Qt5Gui ${QT_VERSION} CONFIG REQUIRED Private)
add_library(qmlglsink
gst-plugins-good/ext/qt/gstplugin.cc
gst-plugins-good/ext/qt/gstqtglutility.cc
gst-plugins-good/ext/qt/gstqsgtexture.cc
gst-plugins-good/ext/qt/gstqtsink.cc
gst-plugins-good/ext/qt/gstqtsrc.cc
gst-plugins-good/ext/qt/qtwindow.cc
gst-plugins-good/ext/qt/qtitem.cc
gst-plugins-good/ext/qt/gstqsgtexture.h
gst-plugins-good/ext/qt/gstqtgl.h
gst-plugins-good/ext/qt/gstqtglutility.h
gst-plugins-good/ext/qt/gstqtsink.h
gst-plugins-good/ext/qt/gstqtsrc.h
gst-plugins-good/ext/qt/qtwindow.h
gst-plugins-good/ext/qt/qtitem.h
)
if(LINUX)
target_compile_definitions(qmlglsink PUBLIC HAVE_QT_X11 HAVE_QT_EGLFS HAVE_QT_WAYLAND)
find_package(Qt5 ${QT_VERSION}
COMPONENTS
X11Extras
REQUIRED
HINTS
${QT_LIBRARY_HINTS}
)
target_link_libraries(qmlglsink
PUBLIC
Qt5::X11Extras
)
elseif(APPLE)
target_compile_definitions(qmlglsink PUBLIC HAVE_QT_MAC)
elseif(IOS)
target_compile_definitions(qmlglsink PUBLIC HAVE_QT_MAC)
elseif(WIN32)
target_compile_definitions(qmlglsink PUBLIC HAVE_QT_WIN32 HAVE_QT_QPA_HEADER)
# TODO: use FindOpenGL?
target_link_libraries(qmlglsink PUBLIC opengl32.lib user32.lib)
# LIBS += opengl32.lib user32.lib
elseif(ANDROID)
target_compile_definitions(qmlglsink PUBLIC HAVE_QT_ANDROID)
endif()
target_link_libraries(qmlglsink
PUBLIC
Qt5::Core
Qt5::OpenGL
Qt5::GuiPrivate
)
target_compile_options(qmlglsink
PRIVATE
-Wno-unused-parameter
-Wno-implicit-fallthrough
)
File moved
......@@ -1315,19 +1315,15 @@ contains (DEFINES, QGC_AIRMAP_ENABLED) {
# Video Streaming
INCLUDEPATH += \
src/VideoStreaming
src/VideoManager
HEADERS += \
src/VideoStreaming/VideoReceiver.h \
src/VideoStreaming/VideoStreaming.h \
src/VideoStreaming/SubtitleWriter.h \
src/VideoStreaming/VideoManager.h
src/VideoManager/SubtitleWriter.h \
src/VideoManager/VideoManager.h
SOURCES += \
src/VideoStreaming/VideoReceiver.cc \
src/VideoStreaming/VideoStreaming.cc \
src/VideoStreaming/SubtitleWriter.cc \
src/VideoStreaming/VideoManager.cc
src/VideoManager/SubtitleWriter.cc \
src/VideoManager/VideoManager.cc
contains (CONFIG, DISABLE_VIDEOSTREAMING) {
message("Skipping support for video streaming (manual override from command line)")
......@@ -1335,14 +1331,19 @@ contains (CONFIG, DISABLE_VIDEOSTREAMING) {
} else:exists(user_config.pri):infile(user_config.pri, DEFINES, DISABLE_VIDEOSTREAMING) {
message("Skipping support for video streaming (manual override from user_config.pri)")
} else {
include(src/VideoStreaming/VideoStreaming.pri)
include(src/VideoReceiver/VideoReceiver.pri)
}
!VideoEnabled {
INCLUDEPATH += \
src/VideoReceiver
HEADERS += \
src/VideoStreaming/GLVideoItemStub.h
src/VideoManager/GLVideoItemStub.h \
src/VideoReceiver/VideoReceiver.h
SOURCES += \
src/VideoStreaming/GLVideoItemStub.cc
src/VideoManager/GLVideoItemStub.cc
}
#-------------------------------------------------------------------------------------
......
find_package(Qt5Gui ${QT_VERSION} CONFIG REQUIRED Private)
add_library(gst_plugins_good
libs/gst-plugins-good/ext/qt/gstplugin.cc
libs/gst-plugins-good/ext/qt/gstqtglutility.cc
libs/gst-plugins-good/ext/qt/gstqsgtexture.cc
libs/gst-plugins-good/ext/qt/gstqtsink.cc
libs/gst-plugins-good/ext/qt/gstqtsrc.cc
libs/gst-plugins-good/ext/qt/qtwindow.cc
libs/gst-plugins-good/ext/qt/qtitem.cc
libs/gst-plugins-good/ext/qt/gstqsgtexture.h
libs/gst-plugins-good/ext/qt/gstqtgl.h
libs/gst-plugins-good/ext/qt/gstqtglutility.h
libs/gst-plugins-good/ext/qt/gstqtsink.h
libs/gst-plugins-good/ext/qt/gstqtsrc.h
libs/gst-plugins-good/ext/qt/qtwindow.h
libs/gst-plugins-good/ext/qt/qtitem.h
)
if(LINUX)
target_compile_definitions(gst_plugins_good PUBLIC HAVE_QT_X11 HAVE_QT_EGLFS HAVE_QT_WAYLAND)
find_package(Qt5 ${QT_VERSION}
COMPONENTS
X11Extras
REQUIRED
HINTS
${QT_LIBRARY_HINTS}
)
target_link_libraries(gst_plugins_good
PUBLIC
Qt5::X11Extras
)
elseif(APPLE)
target_compile_definitions(gst_plugins_good PUBLIC HAVE_QT_MAC)
elseif(IOS)
target_compile_definitions(gst_plugins_good PUBLIC HAVE_QT_MAC)
elseif(WIN32)
target_compile_definitions(gst_plugins_good PUBLIC HAVE_QT_WIN32 HAVE_QT_QPA_HEADER)
# TODO: use FindOpenGL?
target_link_libraries(gst_plugins_good PUBLIC opengl32.lib user32.lib)
# LIBS += opengl32.lib user32.lib
elseif(ANDROID)
target_compile_definitions(gst_plugins_good PUBLIC HAVE_QT_ANDROID)
endif()
target_link_libraries(gst_plugins_good
PUBLIC
Qt5::Core
Qt5::OpenGL
Qt5::GuiPrivate
)
target_compile_options(gst_plugins_good
PRIVATE
-Wno-unused-parameter
-Wno-implicit-fallthrough
)
......@@ -15,19 +15,19 @@ LinuxBuild {
}
SOURCES += \
libs/gst-plugins-good/ext/qt/gstplugin.cc \
libs/gst-plugins-good/ext/qt/gstqtglutility.cc \
libs/gst-plugins-good/ext/qt/gstqsgtexture.cc \
libs/gst-plugins-good/ext/qt/gstqtsink.cc \
libs/gst-plugins-good/ext/qt/gstqtsrc.cc \
libs/gst-plugins-good/ext/qt/qtwindow.cc \
libs/gst-plugins-good/ext/qt/qtitem.cc
libs/qmlglsink/gst-plugins-good/ext/qt/gstplugin.cc \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtglutility.cc \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqsgtexture.cc \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtsink.cc \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtsrc.cc \
libs/qmlglsink/gst-plugins-good/ext/qt/qtwindow.cc \
libs/qmlglsink/gst-plugins-good/ext/qt/qtitem.cc
HEADERS += \
libs/gst-plugins-good/ext/qt/gstqsgtexture.h \
libs/gst-plugins-good/ext/qt/gstqtgl.h \
libs/gst-plugins-good/ext/qt/gstqtglutility.h \
libs/gst-plugins-good/ext/qt/gstqtsink.h \
libs/gst-plugins-good/ext/qt/gstqtsrc.h \
libs/gst-plugins-good/ext/qt/qtwindow.h \
libs/gst-plugins-good/ext/qt/qtitem.h
libs/qmlglsink/gst-plugins-good/ext/qt/gstqsgtexture.h \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtgl.h \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtglutility.h \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtsink.h \
libs/qmlglsink/gst-plugins-good/ext/qt/gstqtsrc.h \
libs/qmlglsink/gst-plugins-good/ext/qt/qtwindow.h \
libs/qmlglsink/gst-plugins-good/ext/qt/qtitem.h
......@@ -3,7 +3,7 @@ include_directories(
.
${CMAKE_CURRENT_BINARY_DIR}
${Qt5Location_PRIVATE_INCLUDE_DIRS}
VideoStreaming
VideoManager
)
set(EXTRA_SRC)
......@@ -136,7 +136,8 @@ add_subdirectory(Terrain)
add_subdirectory(uas)
add_subdirectory(Vehicle)
add_subdirectory(VehicleSetup)
add_subdirectory(VideoStreaming)
add_subdirectory(VideoManager)
add_subdirectory(VideoReceiver)
add_subdirectory(ViewWidgets)
target_link_libraries(qgc
......@@ -172,7 +173,7 @@ target_link_libraries(qgc
ui
Vehicle
VehicleSetup
VideoStreaming
VideoManager
ViewWidgets
)
......
......@@ -34,7 +34,9 @@
#include <QDebug>
#include "VideoStreaming.h"
#if defined(QGC_GST_STREAMING)
#include "GStreamer.h"
#endif
#include "QGC.h"
#include "QGCApplication.h"
......@@ -323,8 +325,13 @@ QGCApplication::QGCApplication(int &argc, char* argv[], bool unitTesting)
if (settings.contains(AppSettings::gstDebugLevelName)) {
gstDebugLevel = settings.value(AppSettings::gstDebugLevelName).toInt();
}
// Initialize Video Streaming
initializeVideoStreaming(argc, argv, gstDebugLevel);
#if defined(QGC_GST_STREAMING)
// Initialize Video Receiver
GStreamer::initialize(argc, argv, gstDebugLevel);
#else
Q_UNUSED(gstDebugLevel)
#endif
_toolbox = new QGCToolbox(this);
_toolbox->setChildToolboxes();
......
add_library(VideoManager
GLVideoItemStub.cc
GLVideoItemStub.h
SubtitleWriter.cc
SubtitleWriter.h
VideoManager.cc
VideoManager.h
)
target_link_libraries(VideoManager
PUBLIC
qgc
Qt5::Multimedia
Qt5::OpenGL
VideoReceiver
)
target_include_directories(VideoManager INTERFACE ${CMAKE_CURRENT_SOURCE_DIR})
......@@ -80,11 +80,9 @@ void SubtitleWriter::startCapturingTelemetry(const QString& videoFile)
void SubtitleWriter::stopCapturingTelemetry()
{
#if defined(QGC_GST_STREAMING)
qCDebug(SubtitleWriterLog) << "Stopping writing";
_timer.stop();
_file.close();
#endif
}
void SubtitleWriter::_captureTelemetry()
......
......@@ -28,18 +28,33 @@
#include "Vehicle.h"
#include "QGCCameraManager.h"
#if defined(QGC_GST_STREAMING)
#include "GStreamer.h"
#else
#include "GLVideoItemStub.h"
#endif
QGC_LOGGING_CATEGORY(VideoManagerLog, "VideoManagerLog")
#if defined(QGC_GST_STREAMING)
static const char* kFileExtension[VideoReceiver::FILE_FORMAT_MAX - VideoReceiver::FILE_FORMAT_MIN] = {
"mkv",
"mov",
"mp4"
};
#endif
//-----------------------------------------------------------------------------
VideoManager::VideoManager(QGCApplication* app, QGCToolbox* toolbox)
: QGCTool(app, toolbox)
{
#if !defined(QGC_GST_STREAMING)
static bool once = false;
if (!once) {
qmlRegisterType<GLVideoItemStub>("org.freedesktop.gstreamer.GLVideoItem", 1, 0, "GstGLVideoItem");
once = true;
}
#endif
}
//-----------------------------------------------------------------------------
......@@ -50,15 +65,10 @@ VideoManager::~VideoManager()
delete _thermalVideoReceiver;
_thermalVideoReceiver = nullptr;
#if defined(QGC_GST_STREAMING)
if (_thermalVideoSink != nullptr) {
gst_object_unref(_thermalVideoSink);
GStreamer::releaseVideoSink(_thermalVideoSink);
_thermalVideoSink = nullptr;
}
if (_videoSink != nullptr) {
gst_object_unref(_videoSink);
GStreamer::releaseVideoSink(_videoSink);
_videoSink = nullptr;
}
#endif
}
......@@ -90,6 +100,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
emit isGStreamerChanged();
qCDebug(VideoManagerLog) << "New Video Source:" << videoSource;
#if defined(QGC_GST_STREAMING)
_videoReceiver = toolbox->corePlugin()->createVideoReceiver(this);
_thermalVideoReceiver = toolbox->corePlugin()->createVideoReceiver(this);
......@@ -103,7 +114,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
// and I expect that it will be changed during multiple video stream activity
connect(_thermalVideoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_thermalVideoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
#endif
_updateSettings();
if(isGStreamer()) {
startVideo();
......@@ -146,7 +157,7 @@ void VideoManager::_cleanupOldVideos()
//-- Remove old movies until max size is satisfied.
while(total >= maxSize && !vidList.isEmpty()) {
total -= vidList.last().size();
qCDebug(VideoReceiverLog) << "Removing old video file:" << vidList.last().filePath();
qCDebug(VideoManagerLog) << "Removing old video file:" << vidList.last().filePath();
QFile file (vidList.last().filePath());
file.remove();
vidList.removeLast();
......@@ -164,7 +175,7 @@ VideoManager::startVideo()
}
if(!_videoSettings->streamEnabled()->rawValue().toBool() || !_videoSettings->streamConfigured()) {
qCDebug(VideoReceiverLog) << "Stream not enabled/configured";
qCDebug(VideoManagerLog) << "Stream not enabled/configured";
return;
}
......@@ -194,9 +205,10 @@ VideoManager::stopVideo()
if (qgcApp()->runningUnitTests()) {
return;
}
#if defined(QGC_GST_STREAMING)
if(_videoReceiver) _videoReceiver->stop();
if(_thermalVideoReceiver) _thermalVideoReceiver->stop();
#endif
}
void
......@@ -205,7 +217,7 @@ VideoManager::startRecording(const QString& videoFile)
if (qgcApp()->runningUnitTests()) {
return;
}
#if defined(QGC_GST_STREAMING)
if (!_videoReceiver) {
qgcApp()->showMessage(tr("Video receiver is not ready."));
return;
......@@ -233,6 +245,9 @@ VideoManager::startRecording(const QString& videoFile)
+ "." + kFileExtension[fileFormat - VideoReceiver::FILE_FORMAT_MIN];
_videoReceiver->startRecording(_videoFile, fileFormat);
#else
Q_UNUSED(videoFile)
#endif
}
void
......@@ -241,12 +256,13 @@ VideoManager::stopRecording()
if (qgcApp()->runningUnitTests()) {
return;
}
#if defined(QGC_GST_STREAMING)
if (!_videoReceiver) {
return;
}
_videoReceiver->stopRecording();
#endif
}
void
......@@ -255,7 +271,7 @@ VideoManager::grabImage(const QString& imageFile)
if (qgcApp()->runningUnitTests()) {
return;
}
#if defined(QGC_GST_STREAMING)
if (!_videoReceiver) {
return;
}
......@@ -265,6 +281,9 @@ VideoManager::grabImage(const QString& imageFile)
emit imageFileChanged();
_videoReceiver->takeScreenshot(_imageFile);
#else
Q_UNUSED(imageFile)
#endif
}
//-----------------------------------------------------------------------------
......@@ -455,26 +474,6 @@ VideoManager::setfullScreen(bool f)
emit fullScreenChanged();
}
//-----------------------------------------------------------------------------
#if defined(QGC_GST_STREAMING)
GstElement*
VideoManager::_makeVideoSink(gpointer widget)
{
GstElement* sink;
if ((sink = gst_element_factory_make("qgcvideosinkbin", nullptr)) != nullptr) {
g_object_set(sink, "widget", widget, NULL);
// FIXME: AV: temporally disable sync due to MPEG2-TS sync issues
g_object_set(sink, "sync", FALSE, NULL);
} else {
qCritical() << "gst_element_factory_make('qgcvideosinkbin') failed";
}
return sink;
}
#endif
//-----------------------------------------------------------------------------
void
VideoManager::_initVideo()
{
......@@ -489,10 +488,10 @@ VideoManager::_initVideo()
QQuickItem* widget = root->findChild<QQuickItem*>("videoContent");
if (widget != nullptr && _videoReceiver != nullptr) {
if ((_videoSink = _makeVideoSink(widget)) != nullptr) {
if ((_videoSink = qgcApp()->toolbox()->corePlugin()->createVideoSink(this, widget)) != nullptr) {
_videoReceiver->startDecoding(_videoSink);
} else {
qCDebug(VideoManagerLog) << "_makeVideoSink() failed";
qCDebug(VideoManagerLog) << "createVideoSink() failed";
}
} else {
qCDebug(VideoManagerLog) << "video receiver disabled";
......@@ -501,10 +500,10 @@ VideoManager::_initVideo()
widget = root->findChild<QQuickItem*>("thermalVideo");
if (widget != nullptr && _thermalVideoReceiver != nullptr) {
if ((_thermalVideoSink = _makeVideoSink(widget)) != nullptr) {
if ((_thermalVideoSink = qgcApp()->toolbox()->corePlugin()->createVideoSink(this, widget)) != nullptr) {
_thermalVideoReceiver->startDecoding(_thermalVideoSink);
} else {
qCDebug(VideoManagerLog) << "_makeVideoSink() failed";
qCDebug(VideoManagerLog) << "createVideoSink() failed";
}
} else {
qCDebug(VideoManagerLog) << "thermal video receiver disabled";
......@@ -516,7 +515,7 @@ VideoManager::_initVideo()
void
VideoManager::_updateSettings()
{
if(!_videoSettings || !_videoReceiver)
if(!_videoSettings)
return;
//-- Auto discovery
if(_activeVehicle && _activeVehicle->dynamicCameras()) {
......@@ -646,9 +645,11 @@ VideoManager::_recordingStarted()
void
VideoManager::_recordingChanged()
{
#if defined(QGC_GST_STREAMING)
if (_videoReceiver && !_videoReceiver->recording()) {
_subtitleWriter.stopCapturingTelemetry();
}
#endif
}
//----------------------------------------------------------------------------------------
......
......@@ -112,10 +112,7 @@ protected slots:
protected:
friend class FinishVideoInitialization;
#if defined(QGC_GST_STREAMING)
static gboolean _videoSinkQuery (GstPad* pad, GstObject* parent, GstQuery* query);
GstElement* _makeVideoSink (gpointer widget);
#endif
void _initVideo ();
void _updateSettings ();
void _setVideoUri (const QString& uri);
......@@ -134,10 +131,8 @@ protected:
bool _isTaisync = false;
VideoReceiver* _videoReceiver = nullptr;
VideoReceiver* _thermalVideoReceiver = nullptr;
#if defined(QGC_GST_STREAMING)
GstElement* _videoSink = nullptr;
GstElement* _thermalVideoSink = nullptr;
#endif
void* _videoSink = nullptr;
void* _thermalVideoSink = nullptr;
VideoSettings* _videoSettings = nullptr;
QString _videoUri;
QString _thermalVideoUri;
......
set(EXTRA_SOURCES)
set(EXTRA_LIBRARIES)
if (GST_FOUND)
set(EXTRA_SOURCES gstqgc.c gstqgcvideosinkbin.c GStreamer.cc GStreamer.h GstVideoReceiver.cc GstVideoReceiver.h)
set(EXTRA_LIBRARIES qmlglsink ${GST_LIBRARIES})
endif()
add_library(VideoReceiver
${EXTRA_SOURCES}
VideoReceiver.h
)
target_link_libraries(VideoReceiver
PUBLIC
Qt5::Multimedia
Qt5::OpenGL
Qt5::Quick
${EXTRA_LIBRARIES}
)
target_include_directories(VideoReceiver INTERFACE ${CMAKE_CURRENT_SOURCE_DIR})
......@@ -14,17 +14,12 @@
* @author Gus Grubba <gus@auterion.com>
*/
#include <QtQml>
#include <QDebug>
#include "VideoReceiver.h"
#if defined(QGC_GST_STREAMING)
#include <gst/gst.h>
#include "GStreamer.h"
#include "GstVideoReceiver.h"
#include "QGCLoggingCategory.h"
QGC_LOGGING_CATEGORY(GstreamerLog, "GstreamerLog")
QGC_LOGGING_CATEGORY(GStreamerLog, "GStreamerLog")
static void qt_gst_log(GstDebugCategory * category,
GstDebugLevel level,
......@@ -35,6 +30,8 @@ static void qt_gst_log(GstDebugCategory * category,
GstDebugMessage * message,
gpointer data)
{
Q_UNUSED(data);
if (level > gst_debug_category_get_threshold(category)) {
return;
}
......@@ -46,20 +43,20 @@ static void qt_gst_log(GstDebugCategory * category,
switch (level) {
default:
case GST_LEVEL_ERROR:
log.critical(GstreamerLog, "%s %s", object_info, gst_debug_message_get(message));
log.critical(GStreamerLog, "%s %s", object_info, gst_debug_message_get(message));
break;
case GST_LEVEL_WARNING:
log.warning(GstreamerLog, "%s %s", object_info, gst_debug_message_get(message));
log.warning(GStreamerLog, "%s %s", object_info, gst_debug_message_get(message));
break;
case GST_LEVEL_FIXME:
case GST_LEVEL_INFO:
log.info(GstreamerLog, "%s %s", object_info, gst_debug_message_get(message));
log.info(GStreamerLog, "%s %s", object_info, gst_debug_message_get(message));
break;
case GST_LEVEL_DEBUG:
case GST_LEVEL_LOG:
case GST_LEVEL_TRACE:
case GST_LEVEL_MEMDUMP:
log.debug(GstreamerLog, "%s %s", object_info, gst_debug_message_get(message));
log.debug(GStreamerLog, "%s %s", object_info, gst_debug_message_get(message));
break;
}
......@@ -70,15 +67,11 @@ static void qt_gst_log(GstDebugCategory * category,
#if defined(__ios__)
#include "gst_ios_init.h"
#endif
#else
#include "GLVideoItemStub.h"
#endif
#include "VideoStreaming.h"
#include "VideoReceiver.h"
#if defined(QGC_GST_STREAMING)
G_BEGIN_DECLS
// The static plugins we use
G_BEGIN_DECLS
// The static plugins we use
#if defined(__android__) || defined(__ios__)
GST_PLUGIN_STATIC_DECLARE(coreelements);
GST_PLUGIN_STATIC_DECLARE(playback);
......@@ -102,10 +95,8 @@ static void qt_gst_log(GstDebugCategory * category,
#endif
GST_PLUGIN_STATIC_DECLARE(qmlgl);
GST_PLUGIN_STATIC_DECLARE(qgc);
G_END_DECLS
#endif
G_END_DECLS
#if defined(QGC_GST_STREAMING)
#if (defined(Q_OS_MAC) && defined(QGC_INSTALL_RELEASE)) || defined(Q_OS_WIN)
static void qgcputenv(const QString& key, const QString& root, const QString& path)
{
......@@ -113,12 +104,11 @@ static void qgcputenv(const QString& key, const QString& root, const QString& pa
qputenv(key.toStdString().c_str(), QByteArray(value.toStdString().c_str()));
}
#endif
#endif
void initializeVideoStreaming(int &argc, char* argv[], int gstDebuglevel)
void
GStreamer::initialize(int argc, char* argv[], int debuglevel)
{
#if defined(QGC_GST_STREAMING)
#ifdef Q_OS_MAC
#ifdef Q_OS_MAC
#ifdef QGC_INSTALL_RELEASE
QString currentDir = QCoreApplication::applicationDirPath();
qgcputenv("GST_PLUGIN_SCANNER", currentDir, "/../Frameworks/GStreamer.framework/Versions/1.0/libexec/gstreamer-1.0/gst-plugin-scanner");
......@@ -129,14 +119,14 @@ void initializeVideoStreaming(int &argc, char* argv[], int gstDebuglevel)
qgcputenv("GST_PLUGIN_PATH_1_0", currentDir, "/../Frameworks/GStreamer.framework/Versions/Current/lib/gstreamer-1.0");
qgcputenv("GST_PLUGIN_PATH", currentDir, "/../Frameworks/GStreamer.framework/Versions/Current/lib/gstreamer-1.0");
#endif
#elif defined(Q_OS_WIN)
#elif defined(Q_OS_WIN)
QString currentDir = QCoreApplication::applicationDirPath();
qgcputenv("GST_PLUGIN_PATH", currentDir, "/gstreamer-plugins");
#endif
#endif
//-- If gstreamer debugging is not configured via environment then use internal QT logging
if (qgetenv("GST_DEBUG").isEmpty()) {
gst_debug_set_default_threshold(static_cast<GstDebugLevel>(gstDebuglevel));
gst_debug_set_default_threshold(static_cast<GstDebugLevel>(debuglevel));
gst_debug_remove_log_function(gst_debug_log_default);
gst_debug_add_log_function(qt_gst_log, nullptr, nullptr);
}
......@@ -149,7 +139,7 @@ void initializeVideoStreaming(int &argc, char* argv[], int gstDebuglevel)
GError* error = nullptr;
if (!gst_init_check(&argc, &argv, &error)) {
qCCritical(VideoReceiverLog) << "gst_init_check() failed: " << error->message;
qCCritical(GStreamerLog) << "gst_init_check() failed: " << error->message;
g_error_free(error);
}
......@@ -196,14 +186,39 @@ void initializeVideoStreaming(int &argc, char* argv[], int gstDebuglevel)
gst_object_unref(sink);
sink = nullptr;
} else {
qCCritical(VideoReceiverLog) << "unable to find qmlglsink - you need to build it yourself and add to GST_PLUGIN_PATH";
qCCritical(GStreamerLog) << "unable to find qmlglsink - you need to build it yourself and add to GST_PLUGIN_PATH";
}
GST_PLUGIN_STATIC_REGISTER(qgc);
#else
qmlRegisterType<GLVideoItemStub>("org.freedesktop.gstreamer.GLVideoItem", 1, 0, "GstGLVideoItem");
Q_UNUSED(argc)
Q_UNUSED(argv)
Q_UNUSED(gstDebuglevel)
#endif
}
void*
GStreamer::createVideoSink(QObject* parent, QQuickItem* widget)
{
Q_UNUSED(parent)
GstElement* sink;
if ((sink = gst_element_factory_make("qgcvideosinkbin", nullptr)) != nullptr) {
g_object_set(sink, "widget", widget, NULL);
} else {
qCritical() << "gst_element_factory_make('qgcvideosinkbin') failed";
}
return sink;
}
void
GStreamer::releaseVideoSink(void* sink)
{
if (sink != nullptr) {
gst_object_unref(GST_ELEMENT(sink));
}
}
VideoReceiver*
GStreamer::createVideoReceiver(QObject* parent)
{
Q_UNUSED(parent)
return new GstVideoReceiver(nullptr);
}
#pragma once
#include <QObject>
#include <QQuickItem>
#include "VideoReceiver.h"
class GStreamer {
public:
static void initialize(int argc, char* argv[], int debuglevel);
static void* createVideoSink(QObject* parent, QQuickItem* widget);
static void releaseVideoSink(void* sink);
static VideoReceiver* createVideoReceiver(QObject* parent);
};
......@@ -14,7 +14,7 @@
* @author Gus Grubba <gus@auterion.com>
*/
#include "VideoReceiver.h"
#include "GstVideoReceiver.h"
#include <QDebug>
#include <QUrl>
......@@ -33,9 +33,8 @@ QGC_LOGGING_CATEGORY(VideoReceiverLog, "VideoReceiverLog")
// +-->queue-->_recorderValve[-->_fileSink]
//
VideoReceiver::VideoReceiver(QObject* parent)
: QThread(parent)
#if defined(QGC_GST_STREAMING)
GstVideoReceiver::GstVideoReceiver(QObject* parent)
: VideoReceiver(parent)
, _removingDecoder(false)
, _removingRecorder(false)
, _source(nullptr)
......@@ -51,36 +50,26 @@ VideoReceiver::VideoReceiver(QObject* parent)
, _resetVideoSink(true)
, _videoSinkProbeId(0)
, _udpReconnect_us(5000000)
, _shutdown(false)
#endif
, _streaming(false)
, _decoding(false)
, _recording(false)
, _endOfStream(false)
{
#if defined(QGC_GST_STREAMING)
QThread::start();
connect(&_watchdogTimer, &QTimer::timeout, this, &VideoReceiver::_watchdog);
_apiHandler.start();
_notificationHandler.start();
connect(&_watchdogTimer, &QTimer::timeout, this, &GstVideoReceiver::_watchdog);
_watchdogTimer.start(1000);
#endif
}
VideoReceiver::~VideoReceiver(void)
GstVideoReceiver::~GstVideoReceiver(void)
{
#if defined(QGC_GST_STREAMING)
stop();
_post([this](){
_shutdown = true;
});
QThread::wait();
#endif
_notificationHandler.shutdown();
_apiHandler.shutdown();
}
void
VideoReceiver::start(const QString& uri, unsigned timeout)
GstVideoReceiver::start(const QString& uri, unsigned timeout)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
_post([this, uri, timeout]() {
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, uri, timeout]() {
start(uri, timeout);
});
return;
......@@ -98,6 +87,8 @@ VideoReceiver::start(const QString& uri, unsigned timeout)
qCDebug(VideoReceiverLog) << "Starting";
_endOfStream = false;
_timeout = timeout;
bool running = false;
......@@ -236,18 +227,13 @@ VideoReceiver::start(const QString& uri, unsigned timeout)
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-started");
qCDebug(VideoReceiverLog) << "Started";
}
#else
Q_UNUSED(uri);
Q_UNUSED(timeout);
#endif
}
void
VideoReceiver::stop(void)
GstVideoReceiver::stop(void)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
_post([this]() {
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this]() {
stop();
});
return;
......@@ -309,22 +295,28 @@ VideoReceiver::stop(void)
if (_streaming) {
_streaming = false;
emit streamingChanged();
qCDebug(VideoReceiverLog) << "Streaming stopped";
_notificationHandler.dispatch([this](){
emit streamingChanged();
});
}
}
qCDebug(VideoReceiverLog) << "Stopped";
#endif
}
void
VideoReceiver::startDecoding(VideoSink* videoSink)
GstVideoReceiver::startDecoding(void* sink)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
if (sink == nullptr) {
qCCritical(VideoReceiverLog) << "VideoSink is NULL";
return;
}
if (_apiHandler.needDispatch()) {
GstElement* videoSink = GST_ELEMENT(sink);
gst_object_ref(videoSink);
_post([this, videoSink]() {
_apiHandler.dispatch([this, videoSink]() mutable {
startDecoding(videoSink);
gst_object_unref(videoSink);
});
......@@ -340,6 +332,8 @@ VideoReceiver::startDecoding(VideoSink* videoSink)
}
}
GstElement* videoSink = GST_ELEMENT(sink);
if(_videoSink != nullptr || _decoding) {
qCDebug(VideoReceiverLog) << "Already decoding!";
return;
......@@ -376,17 +370,13 @@ VideoReceiver::startDecoding(VideoSink* videoSink)
g_object_set(_decoderValve, "drop", FALSE, nullptr);
qCDebug(VideoReceiverLog) << "Decoding started";
#else
Q_UNUSED(videoSink)
#endif
}
void
VideoReceiver::stopDecoding(void)
GstVideoReceiver::stopDecoding(void)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
_post([this]() {
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this]() {
stopDecoding();
});
return;
......@@ -405,15 +395,13 @@ VideoReceiver::stopDecoding(void)
_removingDecoder = true;
_unlinkBranch(_decoderValve);
#endif
}
void
VideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
GstVideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
_post([this, videoFile, format]() {
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, videoFile, format]() {
startRecording(videoFile, format);
});
return;
......@@ -421,8 +409,12 @@ VideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
qCDebug(VideoReceiverLog) << "Starting recording";
// exit immediately if we are already recording
if (_pipeline == nullptr || _recording) {
if (_pipeline == nullptr) {
qCDebug(VideoReceiverLog) << "Streaming is not active!";
return;
}
if (_recording) {
qCDebug(VideoReceiverLog) << "Already recording!";
return;
}
......@@ -466,23 +458,18 @@ VideoReceiver::startRecording(const QString& videoFile, FILE_FORMAT format)
g_object_set(_recorderValve, "drop", FALSE, nullptr);
_recording = true;
emit recordingChanged();
qCDebug(VideoReceiverLog) << "Recording started";
#else
Q_UNUSED(videoFile)
Q_UNUSED(format)
#endif
_notificationHandler.dispatch([this](){
emit recordingChanged();
});
}
//-----------------------------------------------------------------------------
void
VideoReceiver::stopRecording(void)
GstVideoReceiver::stopRecording(void)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
_post([this]() {
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this]() {
stopRecording();
});
return;
......@@ -501,38 +488,34 @@ VideoReceiver::stopRecording(void)
_removingRecorder = true;
_unlinkBranch(_recorderValve);
#endif
}
void
VideoReceiver::takeScreenshot(const QString& imageFile)
GstVideoReceiver::takeScreenshot(const QString& imageFile)
{
#if defined(QGC_GST_STREAMING)
if (!_isOurThread()) {
_post([this, imageFile]() {
if (_apiHandler.needDispatch()) {
_apiHandler.dispatch([this, imageFile]() {
takeScreenshot(imageFile);
});
return;
}
// FIXME: AV: record screenshot here
_notificationHandler.dispatch([this](){
emit screenshotComplete();
#else
Q_UNUSED(imageFile);
#endif
});
}
#if defined(QGC_GST_STREAMING)
const char* VideoReceiver::_kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN] = {
const char* GstVideoReceiver::_kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN] = {
"matroskamux",
"qtmux",
"mp4mux"
};
void
VideoReceiver::_watchdog(void)
GstVideoReceiver::_watchdog(void)
{
_post([this](){
_apiHandler.dispatch([this](){
if(_pipeline == nullptr) {
return;
}
......@@ -544,7 +527,10 @@ VideoReceiver::_watchdog(void)
}
if (now - _lastSourceFrameTime > _timeout) {
qCDebug(VideoReceiverLog) << "Stream timeout, no frames for " << now - _lastSourceFrameTime;
_notificationHandler.dispatch([this](){
emit timeout();
});
}
if (_decoding && !_removingDecoder) {
......@@ -553,36 +539,39 @@ VideoReceiver::_watchdog(void)
}
if (now - _lastVideoFrameTime > _timeout * 2) {
qCDebug(VideoReceiverLog) << "Video decoder timeout, no frames for " << now - _lastVideoFrameTime;
_notificationHandler.dispatch([this](){
emit timeout();
});
}
}
});
}
void
VideoReceiver::_handleEOS(void)
GstVideoReceiver::_handleEOS(void)
{
if(_pipeline == nullptr) {
qCWarning(VideoReceiverLog) << "We should not be here";
return;
}
if (!_streaming) {
if (_endOfStream) {
stop();
} else {
if(_decoding && _removingDecoder) {
_shutdownDecodingBranch();
} else if(_recording && _removingRecorder) {
_shutdownRecordingBranch();
} else {
} /*else {
qCWarning(VideoReceiverLog) << "Unexpected EOS!";
stop();
}
}*/
}
}
GstElement*
VideoReceiver::_makeSource(const QString& uri)
GstVideoReceiver::_makeSource(const QString& uri)
{
if (uri.isEmpty()) {
qCCritical(VideoReceiverLog) << "Failed because URI is not specified";
......@@ -742,8 +731,10 @@ VideoReceiver::_makeSource(const QString& uri)
}
GstElement*
VideoReceiver::_makeDecoder(GstCaps* caps, GstElement* videoSink)
GstVideoReceiver::_makeDecoder(GstCaps* caps, GstElement* videoSink)
{
Q_UNUSED(caps);
GstElement* decoder = nullptr;
do {
......@@ -759,7 +750,7 @@ VideoReceiver::_makeDecoder(GstCaps* caps, GstElement* videoSink)
}
GstElement*
VideoReceiver::_makeFileSink(const QString& videoFile, FILE_FORMAT format)
GstVideoReceiver::_makeFileSink(const QString& videoFile, FILE_FORMAT format)
{
GstElement* fileSink = nullptr;
GstElement* mux = nullptr;
......@@ -846,7 +837,7 @@ VideoReceiver::_makeFileSink(const QString& videoFile, FILE_FORMAT format)
}
void
VideoReceiver::_onNewSourcePad(GstPad* pad)
GstVideoReceiver::_onNewSourcePad(GstPad* pad)
{
// FIXME: check for caps - if this is not video stream (and preferably - one of these which we have to support) then simply skip it
if(!gst_element_link(_source, _tee)) {
......@@ -857,7 +848,9 @@ VideoReceiver::_onNewSourcePad(GstPad* pad)
if (!_streaming) {
_streaming = true;
qCDebug(VideoReceiverLog) << "Streaming started";
_notificationHandler.dispatch([this](){
emit streamingChanged();
});
}
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, _eosProbe, this, nullptr);
......@@ -879,7 +872,7 @@ VideoReceiver::_onNewSourcePad(GstPad* pad)
}
void
VideoReceiver::_onNewDecoderPad(GstPad* pad)
GstVideoReceiver::_onNewDecoderPad(GstPad* pad)
{
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-with-new-decoder-pad");
......@@ -889,7 +882,7 @@ VideoReceiver::_onNewDecoderPad(GstPad* pad)
}
bool
VideoReceiver::_addDecoder(GstElement* src)
GstVideoReceiver::_addDecoder(GstElement* src)
{
GstPad* srcpad;
......@@ -942,7 +935,7 @@ VideoReceiver::_addDecoder(GstElement* src)
}
bool
VideoReceiver::_addVideoSink(GstPad* pad)
GstVideoReceiver::_addVideoSink(GstPad* pad)
{
GstCaps* caps = gst_pad_query_caps(pad, nullptr);
......@@ -981,35 +974,36 @@ VideoReceiver::_addVideoSink(GstPad* pad)
}
_decoding = true;
qCDebug(VideoReceiverLog) << "Decoding started";
_notificationHandler.dispatch([this](){
emit decodingChanged();
});
return true;
}
void
VideoReceiver::_noteTeeFrame(void)
GstVideoReceiver::_noteTeeFrame(void)
{
_lastSourceFrameTime = QDateTime::currentSecsSinceEpoch();
}
void
VideoReceiver::_noteVideoSinkFrame(void)
GstVideoReceiver::_noteVideoSinkFrame(void)
{
_lastVideoFrameTime = QDateTime::currentSecsSinceEpoch();
}
void
VideoReceiver::_noteEndOfStream(void)
GstVideoReceiver::_noteEndOfStream(void)
{
if (_streaming) {
_streaming = false;
}
_endOfStream = true;
}
// -Unlink the branch from the src pad
// -Send an EOS event at the beginning of that branch
void
VideoReceiver::_unlinkBranch(GstElement* from)
GstVideoReceiver::_unlinkBranch(GstElement* from)
{
GstPad* src;
......@@ -1053,7 +1047,7 @@ VideoReceiver::_unlinkBranch(GstElement* from)
}
void
VideoReceiver::_shutdownDecodingBranch(void)
GstVideoReceiver::_shutdownDecodingBranch(void)
{
if (_decoder != nullptr) {
GstObject* parent;
......@@ -1097,15 +1091,17 @@ VideoReceiver::_shutdownDecodingBranch(void)
if (_decoding) {
_decoding = false;
emit decodingChanged();
qCDebug(VideoReceiverLog) << "Decoding stopped";
_notificationHandler.dispatch([this](){
emit decodingChanged();
});
}
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-decoding-stopped");
}
void
VideoReceiver::_shutdownRecordingBranch(void)
GstVideoReceiver::_shutdownRecordingBranch(void)
{
gst_bin_remove(GST_BIN(_pipeline), _fileSink);
gst_element_set_state(_fileSink, GST_STATE_NULL);
......@@ -1116,51 +1112,21 @@ VideoReceiver::_shutdownRecordingBranch(void)
if (_recording) {
_recording = false;
emit recordingChanged();
qCDebug(VideoReceiverLog) << "Recording stopped";
_notificationHandler.dispatch([this](){
emit recordingChanged();
});
}
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(_pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-recording-stopped");
}
bool
VideoReceiver::_isOurThread(void)
{
return QThread::currentThread() == (QThread*)this;
}
void
VideoReceiver::_post(Task t)
{
QMutexLocker lock(&_taskQueueSync);
_taskQueue.enqueue(t);
_taskQueueUpdate.wakeOne();
}
void
VideoReceiver::run(void)
{
while(!_shutdown) {
_taskQueueSync.lock();
while (_taskQueue.isEmpty()) {
_taskQueueUpdate.wait(&_taskQueueSync);
}
Task t = _taskQueue.dequeue();
_taskQueueSync.unlock();
t();
}
}
gboolean
VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
GstVideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
{
Q_UNUSED(bus)
Q_ASSERT(msg != nullptr && data != nullptr);
VideoReceiver* pThis = (VideoReceiver*)data;
GstVideoReceiver* pThis = (GstVideoReceiver*)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
......@@ -1181,13 +1147,13 @@ VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
error = nullptr;
}
pThis->_post([pThis](){
pThis->_apiHandler.dispatch([pThis](){
pThis->stop();
});
} while(0);
break;
case GST_MESSAGE_EOS:
pThis->_post([pThis](){
pThis->_apiHandler.dispatch([pThis](){
pThis->_handleEOS();
});
break;
......@@ -1208,7 +1174,7 @@ VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
}
if (GST_MESSAGE_TYPE(forward_msg) == GST_MESSAGE_EOS) {
pThis->_post([pThis](){
pThis->_apiHandler.dispatch([pThis](){
pThis->_handleEOS();
});
}
......@@ -1225,9 +1191,9 @@ VideoReceiver::_onBusMessage(GstBus* bus, GstMessage* msg, gpointer data)
}
void
VideoReceiver::_onNewPad(GstElement* element, GstPad* pad, gpointer data)
GstVideoReceiver::_onNewPad(GstElement* element, GstPad* pad, gpointer data)
{
VideoReceiver* self = static_cast<VideoReceiver*>(data);
GstVideoReceiver* self = static_cast<GstVideoReceiver*>(data);
if (element == self->_source) {
self->_onNewSourcePad(pad);
......@@ -1239,8 +1205,10 @@ VideoReceiver::_onNewPad(GstElement* element, GstPad* pad, gpointer data)
}
void
VideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
GstVideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data)
{
Q_UNUSED(data)
gchar* name;
if ((name = gst_pad_get_name(pad)) == nullptr) {
......@@ -1268,7 +1236,7 @@ VideoReceiver::_wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data
}
void
VideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
GstVideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data)
{
bool isRtpPad = false;
......@@ -1334,8 +1302,10 @@ VideoReceiver::_linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpoi
}
gboolean
VideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
GstVideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
{
Q_UNUSED(element)
int* probeRes = (int*)user_data;
*probeRes |= 1;
......@@ -1362,8 +1332,12 @@ VideoReceiver::_padProbe(GstElement* element, GstPad* pad, gpointer user_data)
}
gboolean
VideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
GstVideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
Q_UNUSED(bin)
Q_UNUSED(pad)
Q_UNUSED(element)
GstElement* glupload = (GstElement* )data;
GstPad* sinkpad;
......@@ -1393,8 +1367,12 @@ VideoReceiver::_autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* elem
}
gboolean
VideoReceiver::_autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
GstVideoReceiver::_autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
Q_UNUSED(bin)
Q_UNUSED(pad)
Q_UNUSED(element)
GstElement* glsink = (GstElement* )data;
GstPad* sinkpad;
......@@ -1413,7 +1391,7 @@ VideoReceiver::_autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* e
}
gboolean
VideoReceiver::_autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
GstVideoReceiver::_autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data)
{
gboolean ret;
......@@ -1433,13 +1411,13 @@ VideoReceiver::_autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element,
}
GstPadProbeReturn
VideoReceiver::_teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
GstVideoReceiver::_teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
Q_UNUSED(pad);
Q_UNUSED(pad)
Q_UNUSED(info)
if(user_data != nullptr) {
VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
pThis->_noteTeeFrame();
}
......@@ -1447,10 +1425,13 @@ VideoReceiver::_teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
}
GstPadProbeReturn
VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
GstVideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
Q_UNUSED(pad)
Q_UNUSED(info)
if(user_data != nullptr) {
VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
if (pThis->_resetVideoSink) {
pThis->_resetVideoSink = false;
......@@ -1486,7 +1467,7 @@ VideoReceiver::_videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user
}
GstPadProbeReturn
VideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
GstVideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
Q_UNUSED(pad);
Q_ASSERT(user_data != nullptr);
......@@ -1495,7 +1476,7 @@ VideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
GstEvent* event = gst_pad_probe_info_get_event(info);
if (GST_EVENT_TYPE(event) == GST_EVENT_EOS) {
VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
pThis->_noteEndOfStream();
}
}
......@@ -1504,7 +1485,7 @@ VideoReceiver::_eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
}
GstPadProbeReturn
VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
GstVideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{
if (info == nullptr || user_data == nullptr) {
qCCritical(VideoReceiverLog) << "Invalid arguments";
......@@ -1520,7 +1501,7 @@ VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_
// set media file '0' offset to current timeline position - we don't want to touch other elements in the graph, except these which are downstream!
gst_pad_set_offset(pad, -static_cast<gint64>(buf->pts));
VideoReceiver* pThis = static_cast<VideoReceiver*>(user_data);
GstVideoReceiver* pThis = static_cast<GstVideoReceiver*>(user_data);
qCDebug(VideoReceiverLog) << "Got keyframe, stop dropping buffers";
......@@ -1528,4 +1509,3 @@ VideoReceiver::_keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_
return GST_PAD_PROBE_REMOVE;
}
#endif
......@@ -16,81 +16,87 @@
#pragma once
#include "QGCLoggingCategory.h"
#include <QObject>
#include <QSize>
#include <QTimer>
#include <QTcpSocket>
#include <QThread>
#include <QWaitCondition>
#include <QMutex>
#include <QQueue>
#include <QQuickItem>
#include "VideoReceiver.h"
#if defined(QGC_GST_STREAMING)
#include <gst/gst.h>
typedef GstElement VideoSink;
#else
typedef void VideoSink;
#endif
Q_DECLARE_LOGGING_CATEGORY(VideoReceiverLog)
class VideoReceiver : public QThread
class Worker : public QThread
{
Q_OBJECT
public:
explicit VideoReceiver(QObject* parent = nullptr);
~VideoReceiver(void);
typedef enum {
FILE_FORMAT_MIN = 0,
FILE_FORMAT_MKV = FILE_FORMAT_MIN,
FILE_FORMAT_MOV,
FILE_FORMAT_MP4,
FILE_FORMAT_MAX
} FILE_FORMAT;
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
bool streaming(void) {
return _streaming;
bool needDispatch() {
return QThread::currentThread() != this;
}
bool decoding(void) {
return _decoding;
void dispatch(std::function<void()> t) {
QMutexLocker lock(&_taskQueueSync);
_taskQueue.enqueue(t);
_taskQueueUpdate.wakeOne();
}
bool recording(void) {
return _recording;
void shutdown() {
if (needDispatch()) {
dispatch([this](){
_shutdown = true;
});
QThread::wait();
} else {
QThread::terminate();
}
}
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
protected:
void run() {
while(!_shutdown) {
_taskQueueSync.lock();
while (_taskQueue.isEmpty()) {
_taskQueueUpdate.wait(&_taskQueueSync);
}
Task t = _taskQueue.dequeue();
_taskQueueSync.unlock();
t();
}
}
signals:
void timeout(void);
void streamingChanged(void);
void decodingChanged(void);
void recordingChanged(void);
void recordingStarted(void);
void videoSizeChanged(void);
void screenshotComplete(void);
private:
typedef std::function<void()> Task;
QWaitCondition _taskQueueUpdate;
QMutex _taskQueueSync;
QQueue<Task> _taskQueue;
bool _shutdown = false;
};
class GstVideoReceiver : public VideoReceiver
{
Q_OBJECT
public:
explicit GstVideoReceiver(QObject* parent = nullptr);
~GstVideoReceiver(void);
public slots:
virtual void start(const QString& uri, unsigned timeout);
virtual void stop(void);
virtual void startDecoding(VideoSink* videoSink);
virtual void startDecoding(void* sink);
virtual void stopDecoding(void);
virtual void startRecording(const QString& videoFile, FILE_FORMAT format);
virtual void stopRecording(void);
virtual void takeScreenshot(const QString& imageFile);
#if defined(QGC_GST_STREAMING)
protected slots:
virtual void _watchdog(void);
virtual void _handleEOS(void);
......@@ -116,11 +122,6 @@ protected:
virtual void _shutdownDecodingBranch (void);
virtual void _shutdownRecordingBranch(void);
typedef std::function<void(void)> Task;
bool _isOurThread(void);
void _post(Task t);
void run(void);
private:
static gboolean _onBusMessage(GstBus* bus, GstMessage* message, gpointer user_data);
static void _onNewPad(GstElement* element, GstPad* pad, gpointer data);
......@@ -158,18 +159,14 @@ private:
unsigned _timeout;
QWaitCondition _taskQueueUpdate;
QMutex _taskQueueSync;
QQueue<Task> _taskQueue;
bool _shutdown;
Worker _apiHandler;
Worker _notificationHandler;
static const char* _kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN];
#else
private:
#endif
bool _endOfStream;
std::atomic<bool> _streaming;
std::atomic<bool> _decoding;
std::atomic<bool> _recording;
std::atomic<quint32>_videoSize;
static const char* _kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN];
};
void* createVideoSink(void* widget);
void initializeVideoReceiver(int argc, char* argv[], int debuglevel);
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
/**
* @file
* @brief QGC Video Receiver
* @author Gus Grubba <gus@auterion.com>
*/
#pragma once
#include <QObject>
#include <QSize>
#include <QQuickItem>
#include <atomic>
class VideoReceiver : public QObject
{
Q_OBJECT
public:
explicit VideoReceiver(QObject* parent = nullptr)
: QObject(parent)
, _streaming(false)
, _decoding(false)
, _recording(false)
, _videoSize(0)
{}
virtual ~VideoReceiver(void) {}
typedef enum {
FILE_FORMAT_MIN = 0,
FILE_FORMAT_MKV = FILE_FORMAT_MIN,
FILE_FORMAT_MOV,
FILE_FORMAT_MP4,
FILE_FORMAT_MAX
} FILE_FORMAT;
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
bool streaming(void) {
return _streaming;
}
bool decoding(void) {
return _decoding;
}
bool recording(void) {
return _recording;
}
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
}
signals:
void timeout(void);
void streamingChanged(void);
void decodingChanged(void);
void recordingChanged(void);
void recordingStarted(void);
void videoSizeChanged(void);
void screenshotComplete(void);
public slots:
virtual void start(const QString& uri, unsigned timeout) = 0;
virtual void stop(void) = 0;
virtual void startDecoding(void* sink) = 0;
virtual void stopDecoding(void) = 0;
virtual void startRecording(const QString& videoFile, FILE_FORMAT format) = 0;
virtual void stopRecording(void) = 0;
virtual void takeScreenshot(const QString& imageFile) = 0;
protected:
std::atomic<bool> _streaming;
std::atomic<bool> _decoding;
std::atomic<bool> _recording;
std::atomic<quint32>_videoSize;
};
......@@ -122,22 +122,30 @@ VideoEnabled {
DEFINES += \
QGC_GST_STREAMING
INCLUDEPATH += \
$$PWD
iOSBuild {
OBJECTIVE_SOURCES += \
$$PWD/iOS/gst_ios_init.m
INCLUDEPATH += \
$$PWD/iOS
$$PWD/gst_ios_init.m
}
HEADERS += \
$$PWD/GStreamer.h \
$$PWD/GstVideoReceiver.h \
$$PWD/VideoReceiver.h
SOURCES += \
$$PWD/gstqgcvideosinkbin.c \
$$PWD/gstqgc.c
$$PWD/gstqgc.c \
$$PWD/GStreamer.cc \
$$PWD/GstVideoReceiver.cc
include($$PWD/../../qmlglsink.pri)
} else {
LinuxBuild|MacBuild|iOSBuild|WindowsBuild|AndroidBuild {
message("Skipping support for video streaming (GStreamer libraries not installed)")
message("Installation instructions here: https://github.com/mavlink/qgroundcontrol/blob/master/src/VideoStreaming/README.md")
message("Installation instructions here: https://github.com/mavlink/qgroundcontrol/blob/master/src/VideoReceiver/README.md")
} else {
message("Skipping support for video streaming (Unsupported platform)")
}
......
set(EXTRA_LIBRARIES)
if (GST_FOUND)
set(EXTRA_LIBRARIES ${GST_LIBRARIES})
endif()
add_library(VideoStreaming
GLVideoItemStub.cc
GLVideoItemStub.h
gstqgc.c
gstqgcvideosinkbin.c
SubtitleWriter.cc
SubtitleWriter.h
VideoReceiver.cc
VideoReceiver.h
VideoStreaming.cc
VideoStreaming.h
VideoManager.cc
VideoManager.h
)
target_link_libraries(VideoStreaming
PRIVATE
gst_plugins_good
PUBLIC
qgc
Qt5::Multimedia
Qt5::OpenGL
${EXTRA_LIBRARIES}
)
target_include_directories(VideoStreaming INTERFACE ${CMAKE_CURRENT_SOURCE_DIR})
/****************************************************************************
*
* (c) 2009-2020 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
/**
* @file
* @brief QGC Video Streaming Initialization
* @author Gus Grubba <gus@auterion.com>
*/
#pragma once
extern void initializeVideoStreaming (int &argc, char *argv[], int gstDebuglevel);
......@@ -16,7 +16,10 @@
#include "AppMessages.h"
#include "QmlObjectListModel.h"
#include "VideoManager.h"
#if defined(QGC_GST_STREAMING)
#include "GStreamer.h"
#include "VideoReceiver.h"
#endif
#include "QGCLoggingCategory.h"
#include "QGCCameraManager.h"
......@@ -440,7 +443,23 @@ VideoManager* QGCCorePlugin::createVideoManager(QGCApplication *app, QGCToolbox
VideoReceiver* QGCCorePlugin::createVideoReceiver(QObject* parent)
{
return new VideoReceiver(parent);
#if defined(QGC_GST_STREAMING)
return GStreamer::createVideoReceiver(parent);
#else
Q_UNUSED(parent)
return nullptr;
#endif
}
void* QGCCorePlugin::createVideoSink(QObject* parent, QQuickItem* widget)
{
#if defined(QGC_GST_STREAMING)
return GStreamer::createVideoSink(parent, widget);
#else
Q_UNUSED(parent)
Q_UNUSED(widget)
return nullptr;
#endif
}
bool QGCCorePlugin::guidedActionsControllerLogging() const
......
......@@ -32,9 +32,11 @@ class Vehicle;
class LinkInterface;
class QmlObjectListModel;
class VideoReceiver;
class VideoSink;
class PlanMasterController;
class QGCCameraManager;
class QGCCameraControl;
class QQuickItem;
class QGCCorePlugin : public QGCTool
{
......@@ -113,6 +115,8 @@ public:
virtual VideoManager* createVideoManager(QGCApplication* app, QGCToolbox* toolbox);
/// Allows the plugin to override the creation of VideoReceiver.
virtual VideoReceiver* createVideoReceiver(QObject* parent);
/// Allows the plugin to override the creation of VideoSink.
virtual void* createVideoSink(QObject* parent, QQuickItem* widget);
/// Allows the plugin to see all mavlink traffic to a vehicle
/// @return true: Allow vehicle to continue processing, false: Vehicle should not process message
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment