Commit afca96f8 authored by Jacob Walser's avatar Jacob Walser

Move recording button to flight view

parent 782c92c4
...@@ -260,6 +260,42 @@ QGCView { ...@@ -260,6 +260,42 @@ QGCView {
visible: singleVehicleView.checked visible: singleVehicleView.checked
} }
// Button to start/stop video recording
Item {
z: _flightVideoPipControl.z + 1
anchors.margins: ScreenTools.defaultFontPixelHeight / 2
anchors.bottom: _flightVideo.bottom
anchors.right: _flightVideo.right
height: ScreenTools.defaultFontPixelHeight * 2
width: height
visible: QGroundControl.videoManager.videoRunning
opacity: 0.75
Rectangle {
anchors.top: parent.top
anchors.bottom: parent.bottom
width: height
radius: QGroundControl.videoManager.videoReceiver.recording ? 0 : height
color: "red"
}
QGCColoredImage {
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.horizontalCenter: parent.horizontalCenter
width: height * 0.625
sourceSize.width: width
source: "/qmlimages/CameraIcon.svg"
fillMode: Image.PreserveAspectFit
color: "white"
}
MouseArea {
anchors.fill: parent
onClicked: QGroundControl.videoManager.videoReceiver.recording ? QGroundControl.videoManager.videoReceiver.stopRecording() : QGroundControl.videoManager.videoReceiver.startRecording()
}
}
MultiVehicleList { MultiVehicleList {
anchors.margins: _margins anchors.margins: _margins
anchors.top: singleMultiSelector.bottom anchors.top: singleMultiSelector.bottom
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
static const char* kVideoSourceKey = "VideoSource"; static const char* kVideoSourceKey = "VideoSource";
static const char* kVideoUDPPortKey = "VideoUDPPort"; static const char* kVideoUDPPortKey = "VideoUDPPort";
static const char* kVideoRTSPUrlKey = "VideoRTSPUrl"; static const char* kVideoRTSPUrlKey = "VideoRTSPUrl";
static const char* kVideoSavePathKey = "VideoSaveDir"; static const char* kVideoSavePathKey = "VideoSavePath";
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
static const char* kUDPStream = "UDP Video Stream"; static const char* kUDPStream = "UDP Video Stream";
static const char* kRTSPStream = "RTSP Video Stream"; static const char* kRTSPStream = "RTSP Video Stream";
...@@ -249,7 +249,6 @@ void VideoManager::_updateTimer() ...@@ -249,7 +249,6 @@ void VideoManager::_updateTimer()
} }
else else
{ {
// if(_videoSurface && _videoSurface->lastFrame()) {
if(_videoSurface && _videoReceiver->streaming()) { if(_videoSurface && _videoReceiver->streaming()) {
qDebug() << _videoSurface->lastFrame(); qDebug() << _videoSurface->lastFrame();
if(!_videoRunning) if(!_videoRunning)
......
...@@ -29,6 +29,7 @@ GstElement* VideoReceiver::_tee = NULL; ...@@ -29,6 +29,7 @@ GstElement* VideoReceiver::_tee = NULL;
// -EOS has appeared on the bus of the temporary pipeline // -EOS has appeared on the bus of the temporary pipeline
// -At this point all of the recoring elements have been flushed, and the video file has been finalized // -At this point all of the recoring elements have been flushed, and the video file has been finalized
// -Now we can remove the temporary pipeline and its elements // -Now we can remove the temporary pipeline and its elements
#if defined(QGC_GST_STREAMING)
gboolean VideoReceiver::_eosCB(GstBus* bus, GstMessage* message, gpointer user_data) gboolean VideoReceiver::_eosCB(GstBus* bus, GstMessage* message, gpointer user_data)
{ {
Q_UNUSED(bus); Q_UNUSED(bus);
...@@ -55,11 +56,13 @@ gboolean VideoReceiver::_eosCB(GstBus* bus, GstMessage* message, gpointer user_d ...@@ -55,11 +56,13 @@ gboolean VideoReceiver::_eosCB(GstBus* bus, GstMessage* message, gpointer user_d
return true; return true;
} }
#endif
// -Unlink the recording branch from the tee in the main pipeline // -Unlink the recording branch from the tee in the main pipeline
// -Create a second temporary pipeline, and place the recording branch elements into that pipeline // -Create a second temporary pipeline, and place the recording branch elements into that pipeline
// -Setup watch and handler for EOS event on the temporary pipeline's bus // -Setup watch and handler for EOS event on the temporary pipeline's bus
// -Send an EOS event at the beginning of that pipeline and set up a callback for // -Send an EOS event at the beginning of that pipeline and set up a callback for
#if defined(QGC_GST_STREAMING)
GstPadProbeReturn VideoReceiver::_unlinkCB(GstPad* pad, GstPadProbeInfo* info, gpointer user_data) GstPadProbeReturn VideoReceiver::_unlinkCB(GstPad* pad, GstPadProbeInfo* info, gpointer user_data)
{ {
Q_UNUSED(pad); Q_UNUSED(pad);
...@@ -101,6 +104,7 @@ GstPadProbeReturn VideoReceiver::_unlinkCB(GstPad* pad, GstPadProbeInfo* info, g ...@@ -101,6 +104,7 @@ GstPadProbeReturn VideoReceiver::_unlinkCB(GstPad* pad, GstPadProbeInfo* info, g
return GST_PAD_PROBE_REMOVE; return GST_PAD_PROBE_REMOVE;
} }
#endif
// When we finish our pipeline will look like this: // When we finish our pipeline will look like this:
// //
...@@ -115,16 +119,16 @@ GstPadProbeReturn VideoReceiver::_unlinkCB(GstPad* pad, GstPadProbeInfo* info, g ...@@ -115,16 +119,16 @@ GstPadProbeReturn VideoReceiver::_unlinkCB(GstPad* pad, GstPadProbeInfo* info, g
// +--------------------------------------+ // +--------------------------------------+
void VideoReceiver::startRecording(void) void VideoReceiver::startRecording(void)
{ {
#if defined(QGC_GST_STREAMING)
// exit immediately if we are already recording // exit immediately if we are already recording
if(_pipeline == NULL || _recording) { if(_pipeline == NULL || _recording) {
return; return;
} }
_sink = g_new0(Sink, 1); _sink = g_new0(Sink, 1);
_sink->teepad = gst_element_get_request_pad(_tee, "src_%u");
_sink->teepad = gst_element_get_request_pad(_tee, "src_%u"); _sink->queue = gst_element_factory_make("queue", NULL);
_sink->queue = gst_element_factory_make("queue", NULL); _sink->mux = gst_element_factory_make("matroskamux", NULL);
_sink->mux = gst_element_factory_make("matroskamux", NULL);
_sink->filesink = gst_element_factory_make("filesink", NULL); _sink->filesink = gst_element_factory_make("filesink", NULL);
_sink->removing = false; _sink->removing = false;
...@@ -154,10 +158,12 @@ void VideoReceiver::startRecording(void) ...@@ -154,10 +158,12 @@ void VideoReceiver::startRecording(void)
_recording = true; _recording = true;
emit recordingChanged(); emit recordingChanged();
#endif
} }
void VideoReceiver::stopRecording(void) void VideoReceiver::stopRecording(void)
{ {
#if defined(QGC_GST_STREAMING)
// exit immediately if we are not recording // exit immediately if we are not recording
if(_pipeline == NULL || !_recording) { if(_pipeline == NULL || !_recording) {
return; return;
...@@ -167,6 +173,7 @@ void VideoReceiver::stopRecording(void) ...@@ -167,6 +173,7 @@ void VideoReceiver::stopRecording(void)
_recording = false; _recording = false;
emit recordingChanged(); emit recordingChanged();
#endif
} }
VideoReceiver::VideoReceiver(QObject* parent) VideoReceiver::VideoReceiver(QObject* parent)
...@@ -211,16 +218,16 @@ void VideoReceiver::setVideoSink(GstElement* sink) ...@@ -211,16 +218,16 @@ void VideoReceiver::setVideoSink(GstElement* sink)
#endif #endif
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
static void newPadCB(GstElement * element, GstPad* pad, gpointer data) static void newPadCB(GstElement* element, GstPad* pad, gpointer data)
{ {
gchar *name; gchar* name;
name = gst_pad_get_name(pad); name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name); g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad); GstCaps* p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps); gchar* description = gst_caps_to_string(p_caps);
qDebug() << p_caps << ", " << description; qDebug() << p_caps << ", " << description;
g_free(description); g_free(description);
GstElement * p_rtph264depay = GST_ELEMENT(data); GstElement* p_rtph264depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, p_rtph264depay, "sink") == false) if(gst_element_link_pads(element, name, p_rtph264depay, "sink") == false)
qCritical() << "newPadCB : failed to link elements\n"; qCritical() << "newPadCB : failed to link elements\n";
g_free(name); g_free(name);
...@@ -272,7 +279,6 @@ void VideoReceiver::_timeout() ...@@ -272,7 +279,6 @@ void VideoReceiver::_timeout()
} }
#endif #endif
// When we finish our pipeline will look like this: // When we finish our pipeline will look like this:
// //
// +-->queue-->decoder-->_videosink // +-->queue-->decoder-->_videosink
...@@ -313,7 +319,7 @@ void VideoReceiver::start() ...@@ -313,7 +319,7 @@ void VideoReceiver::start()
GstElement* decoder = NULL; GstElement* decoder = NULL;
GstElement* queue1 = NULL; GstElement* queue1 = NULL;
// Pads to link queues and tee // Pads to link queue and tee
GstPad* teeSrc1 = NULL; // tee source pad 1 GstPad* teeSrc1 = NULL; // tee source pad 1
GstPad* q1Sink = NULL; // queue1 sink pad GstPad* q1Sink = NULL; // queue1 sink pad
...@@ -375,16 +381,17 @@ void VideoReceiver::start() ...@@ -375,16 +381,17 @@ void VideoReceiver::start()
gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue1, decoder, _videoSink, NULL); gst_bin_add_many(GST_BIN(_pipeline), dataSource, demux, parser, _tee, queue1, decoder, _videoSink, NULL);
// if(isUdp) { if(isUdp) {
// res = gst_element_link_many(dataSource, demux, parser, decoder, tee, _videoSink, NULL); // Link the pipeline in front of the tee
// } else { if(!gst_element_link_many(dataSource, demux, parser, _tee, NULL)) {
// res = gst_element_link_many(demux, parser, decoder, tee, _videoSink, NULL); qCritical() << "Unable to link datasource and tee.";
// } break;
}
// Link the pipeline in front of the tee } else {
if(!gst_element_link_many(dataSource, demux, parser, _tee, NULL)) { if(!gst_element_link_many(demux, parser, _tee, NULL)) {
qCritical() << "Unable to link datasource and tee."; qCritical() << "Unable to link datasource and tee.";
break; break;
}
} }
// Link the videostream to queue1 // Link the videostream to queue1
...@@ -498,11 +505,8 @@ void VideoReceiver::setVideoSavePath(const QString & path) ...@@ -498,11 +505,8 @@ void VideoReceiver::setVideoSavePath(const QString & path)
#if defined(QGC_GST_STREAMING) #if defined(QGC_GST_STREAMING)
void VideoReceiver::_onBusMessage(GstMessage* msg) void VideoReceiver::_onBusMessage(GstMessage* msg)
{ {
//qDebug() << "Got bus message";
switch (GST_MESSAGE_TYPE(msg)) { switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
qDebug() << "Got EOS";
stop(); stop();
break; break;
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
......
...@@ -541,48 +541,6 @@ Item { ...@@ -541,48 +541,6 @@ Item {
onClicked: flightModesMenu.popup() onClicked: flightModesMenu.popup()
} }
} // QGCLabel - Flight mode selector } // QGCLabel - Flight mode selector
Rectangle {
// anchors.margins: ScreenTools.defaultFontPixelHeight / 2
anchors.top: parent.top
anchors.bottom: parent.bottom
width: 1
color: qgcPal.text
visible: QGroundControl.videoManager.videoRunning
}
//-------------------------------------------------------------------------
//-- Video Recording
Item {
anchors.top: parent.top
anchors.bottom: parent.bottom
width: height
visible: QGroundControl.videoManager.videoRunning
Rectangle {
anchors.top: parent.top
anchors.bottom: parent.bottom
width: height
radius: QGroundControl.videoManager.videoReceiver.recording ? 0 : height
color: colorRed
}
QGCColoredImage {
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.horizontalCenter: parent.horizontalCenter
width: height * 0.625
sourceSize.width: width
source: "/qmlimages/CameraIcon.svg"
fillMode: Image.PreserveAspectFit
color: colorWhite
}
MouseArea {
anchors.fill: parent
onClicked: QGroundControl.videoManager.videoReceiver.recording? QGroundControl.videoManager.videoReceiver.stopRecording() : QGroundControl.videoManager.videoReceiver.startRecording()
}
}
} // Row - Vehicle indicators } // Row - Vehicle indicators
Image { Image {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment