Commit e685642b authored by Gus Grubba's avatar Gus Grubba

Move Text To Speech to QTextToSpeech

This needs Qt 5.9. Do not merge it yet.
parent 82484b49
......@@ -45,15 +45,13 @@ import android.widget.Toast;
import android.util.Log;
import android.os.PowerManager;
import android.view.WindowManager;
//-- Text To Speech
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import com.hoho.android.usbserial.driver.*;
import org.qtproject.qt5.android.bindings.QtActivity;
import org.qtproject.qt5.android.bindings.QtApplication;
public class QGCActivity extends QtActivity implements TextToSpeech.OnInitListener
public class QGCActivity extends QtActivity
{
public static int BAD_PORT = 0;
private static QGCActivity m_instance;
......@@ -66,7 +64,6 @@ public class QGCActivity extends QtActivity implements TextToSpeech.OnInitListen
private BroadcastReceiver m_UsbReceiver = null;
private final static ExecutorService m_Executor = Executors.newSingleThreadExecutor();
private static final String TAG = "QGC_QGCActivity";
private static TextToSpeech m_tts;
private static PowerManager.WakeLock m_wl;
public static Context m_context;
......@@ -111,17 +108,9 @@ public class QGCActivity extends QtActivity implements TextToSpeech.OnInitListen
Log.i(TAG, "Instance created");
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Text To Speech
// Pigback a ride for providing TTS to QGC
//
/////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
m_tts = new TextToSpeech(this,this);
PowerManager pm = (PowerManager)m_instance.getSystemService(Context.POWER_SERVICE);
m_wl = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "QGroundControl");
if(m_wl != null) {
......@@ -140,7 +129,6 @@ public class QGCActivity extends QtActivity implements TextToSpeech.OnInitListen
m_wl.release();
Log.i(TAG, "SCREEN_BRIGHT_WAKE_LOCK released.");
}
m_tts.shutdown();
} catch(Exception e) {
Log.e(TAG, "Exception onDestroy()");
}
......@@ -150,12 +138,6 @@ public class QGCActivity extends QtActivity implements TextToSpeech.OnInitListen
public void onInit(int status) {
}
public static void say(String msg)
{
Log.i(TAG, "Say: " + msg);
m_tts.speak(msg, TextToSpeech.QUEUE_FLUSH, null);
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Find all current devices that match the device filter described in the androidmanifest.xml and the
......
......@@ -224,7 +224,8 @@ QT += \
sql \
svg \
widgets \
xml
xml \
texttospeech
# Multimedia only used if QVC is enabled
!contains (DEFINES, QGC_DISABLE_UVC) {
......@@ -562,7 +563,6 @@ HEADERS += \
src/Settings/VideoSettings.h \
src/Vehicle/MAVLinkLogManager.h \
src/VehicleSetup/JoystickConfigController.h \
src/audio/QGCAudioWorker.h \
src/comm/LinkConfiguration.h \
src/comm/LinkInterface.h \
src/comm/LinkManager.h \
......@@ -657,7 +657,6 @@ HEADERS += \
iOSBuild {
OBJECTIVE_SOURCES += \
src/audio/QGCAudioWorker_iOS.mm \
src/MobileScreenMgr.mm \
}
......@@ -741,7 +740,6 @@ SOURCES += \
src/Settings/VideoSettings.cc \
src/Vehicle/MAVLinkLogManager.cc \
src/VehicleSetup/JoystickConfigController.cc \
src/audio/QGCAudioWorker.cpp \
src/comm/LinkConfiguration.cc \
src/comm/LinkInterface.cc \
src/comm/LinkManager.cc \
......
......@@ -7,73 +7,132 @@
*
****************************************************************************/
/**
* @file
* @brief Implementation of audio output
*
* @author Lorenz Meier <mavteam@student.ethz.ch>
* @author Thomas Gubler <thomasgubler@gmail.com>
*
*/
#include <QApplication>
#include <QDebug>
#include <QRegularExpression>
#include "GAudioOutput.h"
#include "QGCApplication.h"
#include "QGC.h"
#include "SettingsManager.h"
#if defined __android__
#include <QtAndroidExtras/QtAndroidExtras>
#include <QtAndroidExtras/QAndroidJniObject>
#endif
GAudioOutput::GAudioOutput(QGCApplication* app, QGCToolbox* toolbox)
: QGCTool(app, toolbox)
#ifndef __android__
, thread(new QThread())
, worker(new QGCAudioWorker())
#endif
{
#ifndef __android__
worker->moveToThread(thread);
connect(this, &GAudioOutput::textToSpeak, worker, &QGCAudioWorker::say);
connect(thread, &QThread::finished, thread, &QObject::deleteLater);
connect(thread, &QThread::finished, worker, &QObject::deleteLater);
thread->start();
#endif
_tts = new QTextToSpeech(this);
connect(_tts, &QTextToSpeech::stateChanged, this, &GAudioOutput::_stateChanged);
}
GAudioOutput::~GAudioOutput()
{
#ifndef __android__
thread->quit();
#endif
}
bool GAudioOutput::say(const QString& inText)
{
bool muted = qgcApp()->toolbox()->settingsManager()->appSettings()->audioMuted()->rawValue().toBool();
muted |= qgcApp()->runningUnitTests();
if (!muted && !qgcApp()->runningUnitTests()) {
#if defined __android__
#if defined QGC_SPEECH_ENABLED
static const char V_jniClassName[] {"org/mavlink/qgroundcontrol/QGCActivity"};
QAndroidJniEnvironment env;
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
QString text = fixTextMessageForAudio(inText);
if(_tts->state() == QTextToSpeech::Speaking) {
if(!_texts.contains(text)) {
//-- Some arbitrary limit
if(_texts.size() > 20) {
_texts.removeFirst();
}
_texts.append(text);
}
} else {
_tts->say(text);
}
QString text = QGCAudioWorker::fixTextMessageForAudio(inText);
QAndroidJniObject javaMessage = QAndroidJniObject::fromString(text);
QAndroidJniObject::callStaticMethod<void>(V_jniClassName, "say", "(Ljava/lang/String;)V", javaMessage.object<jstring>());
#endif
#else
emit textToSpeak(inText);
#endif
}
return true;
}
void GAudioOutput::_stateChanged(QTextToSpeech::State state)
{
if(state == QTextToSpeech::Ready) {
if(_texts.size()) {
QString text = _texts.first();
_texts.removeFirst();
_tts->say(text);
}
}
}
bool GAudioOutput::getMillisecondString(const QString& string, QString& match, int& number) {
static QRegularExpression re("([0-9]+ms)");
QRegularExpressionMatchIterator i = re.globalMatch(string);
while (i.hasNext()) {
QRegularExpressionMatch qmatch = i.next();
if (qmatch.hasMatch()) {
match = qmatch.captured(0);
number = qmatch.captured(0).replace("ms", "").toInt();
return true;
}
}
return false;
}
QString GAudioOutput::fixTextMessageForAudio(const QString& string) {
QString match;
QString newNumber;
QString result = string;
//-- Look for codified terms
if(result.contains("ERR ", Qt::CaseInsensitive)) {
result.replace("ERR ", "error ", Qt::CaseInsensitive);
}
if(result.contains("ERR:", Qt::CaseInsensitive)) {
result.replace("ERR:", "error.", Qt::CaseInsensitive);
}
if(result.contains("POSCTL", Qt::CaseInsensitive)) {
result.replace("POSCTL", "Position Control", Qt::CaseInsensitive);
}
if(result.contains("ALTCTL", Qt::CaseInsensitive)) {
result.replace("ALTCTL", "Altitude Control", Qt::CaseInsensitive);
}
if(result.contains("AUTO_RTL", Qt::CaseInsensitive)) {
result.replace("AUTO_RTL", "auto Return To Launch", Qt::CaseInsensitive);
} else if(result.contains("RTL", Qt::CaseInsensitive)) {
result.replace("RTL", "Return To Launch", Qt::CaseInsensitive);
}
if(result.contains("ACCEL ", Qt::CaseInsensitive)) {
result.replace("ACCEL ", "accelerometer ", Qt::CaseInsensitive);
}
if(result.contains("RC_MAP_MODE_SW", Qt::CaseInsensitive)) {
result.replace("RC_MAP_MODE_SW", "RC mode switch", Qt::CaseInsensitive);
}
if(result.contains("REJ.", Qt::CaseInsensitive)) {
result.replace("REJ.", "Rejected", Qt::CaseInsensitive);
}
if(result.contains("WP", Qt::CaseInsensitive)) {
result.replace("WP", "way point", Qt::CaseInsensitive);
}
if(result.contains("CMD", Qt::CaseInsensitive)) {
result.replace("CMD", "command", Qt::CaseInsensitive);
}
if(result.contains("COMPID", Qt::CaseInsensitive)) {
result.replace("COMPID", "component eye dee", Qt::CaseInsensitive);
}
if(result.contains(" params ", Qt::CaseInsensitive)) {
result.replace(" params ", " parameters ", Qt::CaseInsensitive);
}
if(result.contains(" id ", Qt::CaseInsensitive)) {
result.replace(" id ", " eye dee ", Qt::CaseInsensitive);
}
if(result.contains(" ADSB ", Qt::CaseInsensitive)) {
result.replace(" ADSB ", " Hey Dee Ess Bee ", Qt::CaseInsensitive);
}
int number;
if(getMillisecondString(string, match, number) && number > 1000) {
if(number < 60000) {
int seconds = number / 1000;
newNumber = QString("%1 second%2").arg(seconds).arg(seconds > 1 ? "s" : "");
} else {
int minutes = number / 60000;
int seconds = (number - (minutes * 60000)) / 1000;
if (!seconds) {
newNumber = QString("%1 minute%2").arg(minutes).arg(minutes > 1 ? "s" : "");
} else {
newNumber = QString("%1 minute%2 and %3 second%4").arg(minutes).arg(minutes > 1 ? "s" : "").arg(seconds).arg(seconds > 1 ? "s" : "");
}
}
result.replace(match, newNumber);
}
return result;
}
......@@ -7,75 +7,35 @@
*
****************************************************************************/
/**
* @file
* @brief Definition of audio output
*
* @author Lorenz Meier <mavteam@student.ethz.ch>
*
*/
#ifndef GAUDIOOUTPUT_H
#define GAUDIOOUTPUT_H
#pragma once
#include <QObject>
#include <QTimer>
#include <QThread>
#include <QStringList>
#include <QTextToSpeech>
#include "QGCAudioWorker.h"
#include "QGCToolbox.h"
class QGCApplication;
/**
* @brief Audio Output (speech synthesizer and "beep" output)
* This class follows the singleton design pattern
* @see http://en.wikipedia.org/wiki/Singleton_pattern
*/
class GAudioOutput : public QGCTool
{
Q_OBJECT
public:
GAudioOutput(QGCApplication* app, QGCToolbox* toolbox);
~GAudioOutput();
/** @brief List available voices */
QStringList listVoices(void);
enum
{
VOICE_MALE = 0,
VOICE_FEMALE
} QGVoice;
enum AUDIO_SEVERITY
{
AUDIO_SEVERITY_EMERGENCY = 0,
AUDIO_SEVERITY_ALERT = 1,
AUDIO_SEVERITY_CRITICAL = 2,
AUDIO_SEVERITY_ERROR = 3,
AUDIO_SEVERITY_WARNING = 4,
AUDIO_SEVERITY_NOTICE = 5,
AUDIO_SEVERITY_INFO = 6,
AUDIO_SEVERITY_DEBUG = 7
};
static bool getMillisecondString (const QString& string, QString& match, int& number);
static QString fixTextMessageForAudio (const QString& string);
public slots:
/** @brief Say this text */
bool say(const QString& text);
bool say (const QString& text);
signals:
bool textToSpeak(QString text);
void beepOnce();
private slots:
void _stateChanged (QTextToSpeech::State state);
protected:
#if !defined __android__
QThread* thread;
QGCAudioWorker* worker;
#endif
QTextToSpeech* _tts;
QStringList _texts;
};
#endif // AUDIOOUTPUT_H
#include <QSettings>
#include <QDebug>
#include <QCoreApplication>
#include <QFile>
#include <QRegularExpression>
#include "QGC.h"
#include "QGCAudioWorker.h"
#include "GAudioOutput.h"
#if (defined __macos__) && defined QGC_SPEECH_ENABLED
#include <ApplicationServices/ApplicationServices.h>
#include <MacTypes.h>
void macSpeak(const char* words)
{
static SpeechChannel sc = NULL;
while (SpeechBusy()) {
QGC::SLEEP::msleep(100);
}
if (sc == NULL) {
Float32 volume = 1.0;
NewSpeechChannel(NULL, &sc);
CFNumberRef volumeRef = CFNumberCreate(NULL, kCFNumberFloat32Type, &volume);
SetSpeechProperty(sc, kSpeechVolumeProperty, volumeRef);
CFRelease(volumeRef);
}
CFStringRef strRef = CFStringCreateWithCString(NULL, words, kCFStringEncodingUTF8);
SpeakCFString(sc, strRef, NULL);
CFRelease(strRef);
}
#endif
#if (defined __ios__) && defined QGC_SPEECH_ENABLED
extern void iOSSpeak(QString msg);
#endif
// Speech synthesis is only supported with MSVC compiler
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
// Documentation: http://msdn.microsoft.com/en-us/library/ee125082%28v=VS.85%29.aspx
#include <sapi.h>
#endif
#if defined Q_OS_LINUX && !defined __android__ && defined QGC_SPEECH_ENABLED
// Using eSpeak for speech synthesis: following https://github.com/mondhs/espeak-sample/blob/master/sampleSpeak.cpp
#include <espeak/speak_lib.h>
#endif
#define QGC_GAUDIOOUTPUT_KEY QString("QGC_AUDIOOUTPUT_")
QGCAudioWorker::QGCAudioWorker(QObject *parent) :
QObject(parent),
voiceIndex(0),
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
pVoice(NULL),
#endif
emergency(false),
muted(false)
{
// Load settings
QSettings settings;
muted = settings.value(QGC_GAUDIOOUTPUT_KEY + "muted", muted).toBool();
}
void QGCAudioWorker::init()
{
#if defined Q_OS_LINUX && !defined __android__ && defined QGC_SPEECH_ENABLED
espeak_Initialize(AUDIO_OUTPUT_PLAYBACK, 500, NULL, 0); // initialize for playback with 500ms buffer and no options (see speak_lib.h)
espeak_VOICE *espeak_voice = espeak_GetCurrentVoice();
espeak_voice->languages = "en-uk"; // Default to British English
espeak_voice->identifier = NULL; // no specific voice file specified
espeak_voice->name = "klatt"; // espeak voice name
espeak_voice->gender = 2; // Female
espeak_voice->age = 0; // age not specified
espeak_SetVoiceByProperties(espeak_voice);
#endif
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
if (FAILED(::CoInitialize(NULL)))
{
qDebug() << "ERROR: Creating COM object for audio output failed!";
}
else
{
HRESULT hr = CoCreateInstance(CLSID_SpVoice, NULL, CLSCTX_ALL, IID_ISpVoice, (void **)&pVoice);
if (FAILED(hr))
{
qDebug() << "ERROR: Initializing voice for audio output failed!";
}
}
#endif
}
QGCAudioWorker::~QGCAudioWorker()
{
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
if (pVoice) {
pVoice->Release();
pVoice = NULL;
}
::CoUninitialize();
#endif
}
void QGCAudioWorker::say(QString inText)
{
#ifdef __android__
Q_UNUSED(inText);
#else
static bool threadInit = false;
if (!threadInit) {
threadInit = true;
init();
}
if (!muted)
{
QString text = fixTextMessageForAudio(inText);
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
HRESULT hr = pVoice->Speak(text.toStdWString().c_str(), SPF_DEFAULT, NULL);
if (FAILED(hr)) {
qDebug() << "Speak failed, HR:" << QString("%1").arg(hr, 0, 16);
}
#elif defined Q_OS_LINUX && defined QGC_SPEECH_ENABLED
// Set size of string for espeak: +1 for the null-character
unsigned int espeak_size = strlen(text.toStdString().c_str()) + 1;
espeak_Synth(text.toStdString().c_str(), espeak_size, 0, POS_CHARACTER, 0, espeakCHARS_AUTO, NULL, NULL);
espeak_Synchronize();
#elif (defined __macos__) && defined QGC_SPEECH_ENABLED
macSpeak(text.toStdString().c_str());
#elif (defined __ios__) && defined QGC_SPEECH_ENABLED
iOSSpeak(text);
#else
// Make sure there isn't an unused variable warning when speech output is disabled
Q_UNUSED(inText);
#endif
}
#endif // __android__
}
void QGCAudioWorker::mute(bool mute)
{
if (mute != muted)
{
this->muted = mute;
QSettings settings;
settings.setValue(QGC_GAUDIOOUTPUT_KEY + "muted", this->muted);
// emit mutedChanged(muted);
}
}
bool QGCAudioWorker::isMuted()
{
return this->muted;
}
bool QGCAudioWorker::_getMillisecondString(const QString& string, QString& match, int& number) {
static QRegularExpression re("([0-9]+ms)");
QRegularExpressionMatchIterator i = re.globalMatch(string);
while (i.hasNext()) {
QRegularExpressionMatch qmatch = i.next();
if (qmatch.hasMatch()) {
match = qmatch.captured(0);
number = qmatch.captured(0).replace("ms", "").toInt();
return true;
}
}
return false;
}
QString QGCAudioWorker::fixTextMessageForAudio(const QString& string) {
QString match;
QString newNumber;
QString result = string;
//-- Look for codified terms
if(result.contains("ERR ", Qt::CaseInsensitive)) {
result.replace("ERR ", "error ", Qt::CaseInsensitive);
}
if(result.contains("ERR:", Qt::CaseInsensitive)) {
result.replace("ERR:", "error.", Qt::CaseInsensitive);
}
if(result.contains("POSCTL", Qt::CaseInsensitive)) {
result.replace("POSCTL", "Position Control", Qt::CaseInsensitive);
}
if(result.contains("ALTCTL", Qt::CaseInsensitive)) {
result.replace("ALTCTL", "Altitude Control", Qt::CaseInsensitive);
}
if(result.contains("AUTO_RTL", Qt::CaseInsensitive)) {
result.replace("AUTO_RTL", "auto Return To Launch", Qt::CaseInsensitive);
} else if(result.contains("RTL", Qt::CaseInsensitive)) {
result.replace("RTL", "Return To Launch", Qt::CaseInsensitive);
}
if(result.contains("ACCEL ", Qt::CaseInsensitive)) {
result.replace("ACCEL ", "accelerometer ", Qt::CaseInsensitive);
}
if(result.contains("RC_MAP_MODE_SW", Qt::CaseInsensitive)) {
result.replace("RC_MAP_MODE_SW", "RC mode switch", Qt::CaseInsensitive);
}
if(result.contains("REJ.", Qt::CaseInsensitive)) {
result.replace("REJ.", "Rejected", Qt::CaseInsensitive);
}
if(result.contains("WP", Qt::CaseInsensitive)) {
result.replace("WP", "way point", Qt::CaseInsensitive);
}
if(result.contains("CMD", Qt::CaseInsensitive)) {
result.replace("CMD", "command", Qt::CaseInsensitive);
}
if(result.contains("COMPID", Qt::CaseInsensitive)) {
result.replace("COMPID", "component eye dee", Qt::CaseInsensitive);
}
if(result.contains(" params ", Qt::CaseInsensitive)) {
result.replace(" params ", " parameters ", Qt::CaseInsensitive);
}
if(result.contains(" id ", Qt::CaseInsensitive)) {
result.replace(" id ", " eye dee ", Qt::CaseInsensitive);
}
if(result.contains(" ADSB ", Qt::CaseInsensitive)) {
result.replace(" ADSB ", " Hey Dee Ess Bee ", Qt::CaseInsensitive);
}
int number;
if(_getMillisecondString(string, match, number) && number > 1000) {
if(number < 60000) {
int seconds = number / 1000;
newNumber = QString("%1 second%2").arg(seconds).arg(seconds > 1 ? "s" : "");
} else {
int minutes = number / 60000;
int seconds = (number - (minutes * 60000)) / 1000;
if (!seconds) {
newNumber = QString("%1 minute%2").arg(minutes).arg(minutes > 1 ? "s" : "");
} else {
newNumber = QString("%1 minute%2 and %3 second%4").arg(minutes).arg(minutes > 1 ? "s" : "").arg(seconds).arg(seconds > 1 ? "s" : "");
}
}
result.replace(match, newNumber);
}
// qDebug() << "Speech: " << result;
return result;
}
#ifndef QGCAUDIOWORKER_H
#define QGCAUDIOWORKER_H
#include <QObject>
#include <QTimer>
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
// Documentation: http://msdn.microsoft.com/en-us/library/ee125082%28v=VS.85%29.aspx
#include <basetyps.h>
#include <sapi.h>
#endif
class QGCAudioWorker : public QObject
{
Q_OBJECT
public:
explicit QGCAudioWorker(QObject *parent = 0);
~QGCAudioWorker();
void mute(bool mute);
bool isMuted();
void init();
static QString fixTextMessageForAudio(const QString& string);
signals:
public slots:
/** @brief Say this text */
void say(QString text);
protected:
int voiceIndex; ///< The index of the flite voice to use (awb, slt, rms)
#if defined _MSC_VER && defined QGC_SPEECH_ENABLED
ISpVoice *pVoice;
#endif
bool emergency; ///< Emergency status flag
QTimer *emergencyTimer;
bool muted;
private:
static bool _getMillisecondString(const QString& string, QString& match, int& number);
};
#endif // QGCAUDIOWORKER_H
/****************************************************************************
*
* (c) 2009-2016 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
*
* QGroundControl is licensed according to the terms in the file
* COPYING.md in the root of the source code directory.
*
****************************************************************************/
/**
* @file
* @brief TTS for iOS
*
* @author Gus Grubba <mavlink@grubba.com>
*
*/
#include <QString>
#include "QGC.h"
#if (defined __ios__) && defined QGC_SPEECH_ENABLED
#import <AVFoundation/AVSpeechSynthesis.h>
class SpeakIOS
{
public:
SpeakIOS ();
~SpeakIOS ();
void speak (QString msg );
private:
AVSpeechSynthesizer *_synth;
};
SpeakIOS::SpeakIOS()
: _synth([[AVSpeechSynthesizer alloc] init])
{
}
SpeakIOS::~SpeakIOS()
{
[_synth release];
}
void SpeakIOS::speak(QString msg)
{
while ([_synth isSpeaking]) {
QGC::SLEEP::msleep(100);
}
NSString *msg_ns = [NSString stringWithCString:msg.toStdString().c_str() encoding:[NSString defaultCStringEncoding]];
AVSpeechUtterance *utterance = [[[AVSpeechUtterance alloc] initWithString: msg_ns] autorelease];
AVSpeechSynthesisVoice* currentVoice = [AVSpeechSynthesisVoice voiceWithLanguage:[AVSpeechSynthesisVoice currentLanguageCode]];
utterance.voice = currentVoice;
utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"en-US"];
utterance.rate = 0.5;
[_synth speakUtterance:utterance];
}
//-- The one and only static singleton
SpeakIOS kSpeakIOS;
void iOSSpeak(QString msg)
{
kSpeakIOS.speak(msg);
}
#endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment