diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index 9824752..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,13 +0,0 @@
-## [project-title] Changelog
-
-
-# x.y.z (yyyy-mm-dd)
-
-*Features*
-* ...
-
-*Bug Fixes*
-* ...
-
-*Breaking Changes*
-* ...
diff --git a/README.md b/README.md
index 677a51e..81b5f03 100644
--- a/README.md
+++ b/README.md
@@ -4,12 +4,18 @@ This project hosts the **samples** for the Cognitive Services Speech Devices SDK
## Features
-This repository hosts samples that help you to get started with several features of the Speech Devices SDK. Please note the Speech Devices SDK only works with the specific devices. At this point, it only works with [Roobo dev kits](http://ddk.roobo.com/).
+This repository hosts samples that help you to get started with several features of the Speech Devices SDK. Please note the Speech Devices SDK only works with the specific devices. At this point, it only works with [Roobo dev kits](http://ddk.roobo.com/) and [Azure Kinect DK](https://azure.microsoft.com/en-us/services/kinect-dk/).
## Getting Started
-Currently only samples for Android devices are provided in this repository. Check out the [sample code walkthrough](https://github.com/Azure-Samples/Cognitive-Services-Speech-Devices-SDK/blob/master/Samples/Android/Speech%20Devices%20SDK%20Starter%20App/Sample_Code_Walkthrough.md) for more details. More OS support will be added later.
+Check out the [sample code walkthrough](https://github.com/Azure-Samples/Cognitive-Services-Speech-Devices-SDK/blob/master/Samples/Android/Speech%20Devices%20SDK%20Starter%20App/Sample_Code_Walkthrough.md) for details on the sample code.
-Also here's how you can [get the Speech Devices SDK](https://aka.ms/sdsdk-get), and [get started with the Speech Devices SDK](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/speech-devices-sdk-qsg).
+Here's how you can [get the Speech Devices SDK](https://aka.ms/sdsdk-get).
+
+The following quickstarts demonstrate how to use sample applications.
+
+* [Android](https://docs.microsoft.com/en-us/azure/cognitive-services/Speech-Service/speech-devices-sdk-android-quickstart)
+* [Linux](https://docs.microsoft.com/en-us/azure/cognitive-services/Speech-Service/speech-devices-sdk-linux-quickstart)
+* [Windows](https://docs.microsoft.com/en-us/azure/cognitive-services/Speech-Service/speech-devices-sdk-windows-quickstart)
Cognitive Services Speech Devices SDK supports customized wake words for key word spotting. Check out this guide to [create a customized wake words](https://aka.ms/sdsdk-kws) for your device.
diff --git a/Samples/Android/Speech Devices SDK Starter App/CHANGELOG.md b/Samples/Android/Speech Devices SDK Starter App/CHANGELOG.md
deleted file mode 100644
index 9824752..0000000
--- a/Samples/Android/Speech Devices SDK Starter App/CHANGELOG.md
+++ /dev/null
@@ -1,13 +0,0 @@
-## [project-title] Changelog
-
-
-# x.y.z (yyyy-mm-dd)
-
-*Features*
-* ...
-
-*Bug Fixes*
-* ...
-
-*Breaking Changes*
-* ...
diff --git a/Samples/Android/Speech Devices SDK Starter App/README.md b/Samples/Android/Speech Devices SDK Starter App/README.md
deleted file mode 100644
index 677a51e..0000000
--- a/Samples/Android/Speech Devices SDK Starter App/README.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# Samples Repository for the Cognitive Services Speech Devices SDK
-
-This project hosts the **samples** for the Cognitive Services Speech Devices SDK.
-
-## Features
-
-This repository hosts samples that help you to get started with several features of the Speech Devices SDK. Please note the Speech Devices SDK only works with the specific devices. At this point, it only works with [Roobo dev kits](http://ddk.roobo.com/).
-
-## Getting Started
-
-Currently only samples for Android devices are provided in this repository. Check out the [sample code walkthrough](https://github.com/Azure-Samples/Cognitive-Services-Speech-Devices-SDK/blob/master/Samples/Android/Speech%20Devices%20SDK%20Starter%20App/Sample_Code_Walkthrough.md) for more details. More OS support will be added later.
-
-Also here's how you can [get the Speech Devices SDK](https://aka.ms/sdsdk-get), and [get started with the Speech Devices SDK](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/speech-devices-sdk-qsg).
-
-Cognitive Services Speech Devices SDK supports customized wake words for key word spotting. Check out this guide to [create a customized wake words](https://aka.ms/sdsdk-kws) for your device.
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/build.gradle b/Samples/Android/Speech Devices SDK Starter App/example/app/build.gradle
index 1b99257..17a17ab 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/build.gradle
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/build.gradle
@@ -9,7 +9,7 @@ android {
}
defaultConfig {
- applicationId "com.microsoft.coginitiveservices.speech.samples.sdsdkstarterapp"
+ applicationId "com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp"
minSdkVersion 22
targetSdkVersion 27
versionCode 1
@@ -40,4 +40,4 @@ dependencies {
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
implementation files('src/main/jniLibs/com.microsoft.cognitiveservices.speech.jar')
- }
+}
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/androidTest/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ExampleInstrumentedTest.java b/Samples/Android/Speech Devices SDK Starter App/example/app/src/androidTest/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ExampleInstrumentedTest.java
index 8ef90b3..cdb5118 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/androidTest/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ExampleInstrumentedTest.java
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/androidTest/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ExampleInstrumentedTest.java
@@ -28,13 +28,13 @@ public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
- assertEquals("com.microsoft.coginitiveservices.speech.samples.sdsdkstarterapp", appContext.getPackageName());
+ assertEquals("com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp", appContext.getPackageName());
}
@Test
public void runSpeechSDKtests() {
loadTestProperties("/data/local/tmp/tests/test-java-unittests.properties");
- //tests.runner.Runner.mainRunner("tests.unit.AllUnitTests");
+ tests.runner.Runner.mainRunner("tests.unit.AllUnitTests");
}
@SuppressWarnings("deprecation")
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/AndroidManifest.xml b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/AndroidManifest.xml
index 2d68ec3..2e05f29 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/AndroidManifest.xml
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/AndroidManifest.xml
@@ -5,7 +5,6 @@
-
+
@@ -24,14 +27,9 @@
-
-
\ No newline at end of file
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/conversation.java b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/Conversation.java
similarity index 89%
rename from Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/conversation.java
rename to Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/Conversation.java
index a0fe638..cdf69d9 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/conversation.java
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/Conversation.java
@@ -12,6 +12,7 @@
import android.util.Pair;
import android.view.Menu;
import android.view.MenuItem;
+import android.view.WindowManager;
import android.widget.ListView;
import android.widget.TextView;
@@ -27,6 +28,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
+import java.net.URI;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
@@ -37,10 +39,10 @@
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-public class conversation extends AppCompatActivity {
+public class Conversation extends AppCompatActivity {
private HashMap signatureMap = new HashMap<>();
private HashMap colorMap = new HashMap<>();
- private TextView IntermediateTextView;
+ private TextView intermediateTextView;
private static final String CTSKey = "";
private static final String CTSRegion="";// Region may be "centralus" or "eastasia"
private SpeechConfig speechConfig = null;
@@ -91,7 +93,7 @@ public boolean onOptionsItemSelected(MenuItem item)
Log.i(logTag, "Participants enrollment");
String[] keyArray = signatureMap.keySet().toArray(new String[signatureMap.size()]);
- colorMap.put("?", getColor());
+ colorMap.put("Guest", getColor());
for (int i = 1; i <= signatureMap.size(); i++)
{
while (colorMap.size() < i + 1)
@@ -129,8 +131,8 @@ protected void onCreate(Bundle savedInstanceState)
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_conversation);
Toolbar toolbar = findViewById(R.id.CTStoolbar);
- IntermediateTextView = findViewById(R.id.IntermediateView);
- IntermediateTextView.setMovementMethod(new ScrollingMovementMethod());
+ intermediateTextView = findViewById(R.id.IntermediateView);
+ intermediateTextView.setMovementMethod(new ScrollingMovementMethod());
setSupportActionBar(toolbar);
Properties prop = new Properties();
InputStream participantIs = null;
@@ -151,7 +153,7 @@ protected void onCreate(Bundle savedInstanceState)
// check if we have a valid endpoint
///////////////////////////////////////////////////
if (CTSRegion.startsWith("<") || CTSRegion.endsWith(">")) {
- appendTextLine( "Error: Replace CTSRegion with your speech subscription key's service region and re-compile!", true);
+ appendTextLine( "Error: Replace CTSRegion with your actual speech subscription key's service region and re-compile!", true);
return;
}
@@ -159,8 +161,8 @@ protected void onCreate(Bundle savedInstanceState)
{
// example/participants.properties is for storing participants' voice signatures, please push the file under folder /video on DDK device.
participantIs = new FileInputStream("/video/participants.properties");
- prop.load(participantIs);
- participantList = prop.getProperty("PARTICIPANTSLIST");
+ prop.load(participantIs);
+ participantList = prop.getProperty("PARTICIPANTSLIST");
}
catch (Exception io)
{
@@ -181,7 +183,7 @@ protected void onCreate(Bundle savedInstanceState)
if (participantList.length() == 0)
{
Log.i(logTag, "Please put participants file in /video/participants.properties");
- appendTextLine("Please save the participants' voice signatures in file-participants.properties, and push the file under folder /video", true);
+ appendTextLine("Please save the participants' voice signatures in file named participants.properties, and push the file under folder /video", true);
}
else
{
@@ -196,9 +198,10 @@ protected void onCreate(Bundle savedInstanceState)
}
}
- }
+ getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
+ }
- private void eventHandler(ConversationTranscriptionEventArgs e)
+ private void recognizingEventHandler(ConversationTranscriptionEventArgs e)
{
final String text = e.getResult().getText();
final String speakerId = e.getResult().getUserId().equals("Unidentified") ? "..." : e.getResult().getUserId();
@@ -216,9 +219,9 @@ private void eventHandler(ConversationTranscriptionEventArgs e)
{
if (transcriptions.containsKey(key))
{
- if (transcriptions.get(key).getResult().getReason() == ResultReason.RecognizingSpeech)
+ if (transcriptions.get(key).getResult().getReason() == ResultReason.RecognizedSpeech)
{
- Log.e(logTag, "Two utterances occur at the same time. Offset: " + offset + "; text: " + text);
+ Log.e(logTag, "Two utterances occurred at the same time. Offset: " + offset + "; text: " + text);
}
}
transcriptions.put(key, e);
@@ -232,7 +235,7 @@ private void startRecognizeMeeting(ConversationTranscriber t)
{
t.sessionStarted.addEventListener((o, e) -> Log.i(logTag, "Session started event. Start recognition"));
- t.recognizing.addEventListener((o, e) -> eventHandler(e));
+ t.recognizing.addEventListener((o, e) -> recognizingEventHandler(e));
t.recognized.addEventListener((o, e) -> {
final String text = e.getResult().getText();
@@ -244,7 +247,7 @@ private void startRecognizeMeeting(ConversationTranscriber t)
if (!text.isEmpty() && !speakerId.equals("$ref$"))
{
- final SpeakerData data = new SpeakerData(speakerId, colorMap.get(speakerId.equals("Guest") ? "?" : speakerId));
+ final SpeakerData data = new SpeakerData(speakerId, colorMap.get(speakerId));
final Transcription transcription = new Transcription(text, data, offset);
runOnUiThread(() ->
{
@@ -336,23 +339,23 @@ private void setRecognizedText()
private void appendTextLine(final String s, final Boolean erase)
{
- conversation.this.runOnUiThread(() -> {
+ Conversation.this.runOnUiThread(() -> {
if (erase)
{
- IntermediateTextView.setText(s);
+ intermediateTextView.setText(s);
}
else
{
- String txt = IntermediateTextView.getText().toString();
- IntermediateTextView.setText(String.format("%s\n%s", txt, s));
+ String txt = intermediateTextView.getText().toString();
+ intermediateTextView.setText(String.format("%s\n%s", txt, s));
}
- final Layout layout = IntermediateTextView.getLayout();
+ final Layout layout = intermediateTextView.getLayout();
if (layout != null) {
- int scrollDelta = layout.getLineBottom(IntermediateTextView.getLineCount() - 1)
- -IntermediateTextView.getScrollY() -IntermediateTextView.getHeight();
+ int scrollDelta = layout.getLineBottom(intermediateTextView.getLineCount() - 1)
+ -intermediateTextView.getScrollY() -intermediateTextView.getHeight();
if (scrollDelta > 0)
- IntermediateTextView.scrollBy(0, scrollDelta);
+ intermediateTextView.scrollBy(0, scrollDelta);
}
});
}
@@ -385,6 +388,6 @@ private interface OnTaskCompletedListener
private void displayException(Exception ex)
{
- IntermediateTextView.setText(String.format("%s\n%s", ex.getMessage(), TextUtils.join("\n", ex.getStackTrace())));
+ intermediateTextView.setText(String.format("%s\n%s", ex.getMessage(), TextUtils.join("\n", ex.getStackTrace())));
}
}
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/LanguageCode.java b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/LanguageCode.java
index 108be61..aa76faf 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/LanguageCode.java
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/LanguageCode.java
@@ -1,7 +1,7 @@
package com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp;
import java.util.HashMap;
-
+import java.util.Map;
public class LanguageCode {
private static HashMap mapRecolanguageCode = new HashMap(){
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/listLanguage.java b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ListLanguage.java
similarity index 98%
rename from Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/listLanguage.java
rename to Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ListLanguage.java
index ab57a9d..086dbc3 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/listLanguage.java
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/ListLanguage.java
@@ -14,7 +14,7 @@
import static com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp.MainActivity.SELECT_RECOGNIZE_LANGUAGE_REQUEST;
import static com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp.MainActivity.SELECT_TRANSLATE_LANGUAGE_REQUEST;
-public class listLanguage extends AppCompatActivity {
+public class ListLanguage extends AppCompatActivity {
private ListView listViewlanguage;
private final String[] recolanguage = {"English (United States)","German (Germany)","Chinese (Mandarin, simplified)","English (India)","Spanish (Spain)","French (France)","Italian (Italy)","Portuguese (Brazil)" ,"Russian (Russia)"};
private final String[] tranlanguage = {"Afrikaans", "Arabic", "Bangla", "Bosnian (Latin)", "Bulgarian", "Cantonese (Traditional)", "Catalan", "Chinese Simplified", "Chinese Traditional", "Croatian", "Czech", "Danish", "Dutch", "English", "Estonian", "Fijian", "Filipino", "Finnish", "French", "German", "Greek", "Haitian Creole", "Hebrew", "Hindi", "Hmong Daw", "Hungarian", "Indonesian", "Italian", "Japanese", "Kiswahili", "Klingon", "Klingon (plqaD)", "Korean", "Latvian", "Lithuanian", "Malagasy", "Malay", "Maltese", "Norwegian", "Persian", "Polish", "Portuguese", "Queretaro Otomi", "Romanian", "Russian", "Samoan", "Serbian (Cyrillic)", "Serbian (Latin)", "Slovak", "Slovenian", "Spanish", "Swedish", "Tahitian", "Tamil", "Thai", "Tongan", "Turkish", "Ukrainian", "Urdu", "Vietnamese", "Welsh", "Yucatec Maya"};
@@ -57,7 +57,6 @@ public void onItemClick(AdapterView> parent, final View view,
setResult(RESULT_OK, sendIntent);
finish();
}
-
});
}
}
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/MainActivity.java b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/MainActivity.java
index 08453bf..72316af 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/MainActivity.java
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/java/com/microsoft/cognitiveservices/speech/samples/sdsdkstarterapp/MainActivity.java
@@ -78,8 +78,8 @@ public class MainActivity extends AppCompatActivity {
private TextView translateLanguageTextView;
private Toolbar mainToolbar;
private final HashMap intentIdMap = new HashMap<>();
- private static String languageRecognition = "en-US";
- private static String translateLanguage = "zh-Hans";
+ private static String LanguageRecognition = "en-US";
+ private static String TranslateLanguage = "zh-Hans";
static final int SELECT_RECOGNIZE_LANGUAGE_REQUEST = 0;
static final int SELECT_TRANSLATE_LANGUAGE_REQUEST = 1;
@@ -101,7 +101,7 @@ public static SpeechConfig getSpeechConfig() {
// PMA parameters
speechConfig.setProperty("DeviceGeometry", DeviceGeometry);
speechConfig.setProperty("SelectedGeometry", SelectedGeometry);
- speechConfig.setSpeechRecognitionLanguage(languageRecognition);
+ speechConfig.setSpeechRecognitionLanguage(LanguageRecognition);
return speechConfig;
}
@@ -112,13 +112,13 @@ public boolean onCreateOptionsMenu(Menu menu){
public boolean onOptionsItemSelected(MenuItem item){
switch(item.getItemId()){
case R.id.RecoLanguage : {
- Intent selectLanguageIntent = new Intent(this,listLanguage.class);
+ Intent selectLanguageIntent = new Intent(this,ListLanguage.class);
selectLanguageIntent.putExtra("RecognizeOrTranslate", SELECT_RECOGNIZE_LANGUAGE_REQUEST);
startActivityForResult(selectLanguageIntent, SELECT_RECOGNIZE_LANGUAGE_REQUEST);
return true;
}
case R.id.TranLanguage :{
- Intent selectLanguageIntent = new Intent(this, listLanguage.class);
+ Intent selectLanguageIntent = new Intent(this, ListLanguage.class);
selectLanguageIntent.putExtra("RecognizeOrTranslate", SELECT_TRANSLATE_LANGUAGE_REQUEST);
startActivityForResult(selectLanguageIntent, SELECT_TRANSLATE_LANGUAGE_REQUEST);
return true;
@@ -153,7 +153,7 @@ protected void onCreate(Bundle savedInstanceState) {
// check if we have a valid key
///////////////////////////////////////////////////
if (SpeechSubscriptionKey.startsWith("<") || SpeechSubscriptionKey.endsWith(">")) {
- recognizedTextView.setText( "Error: Replace SpeechSubscriptionKey with your actual speech subscription key and re-compile!");
+ recognizedTextView.setText( "Error: Replace SpeechSubscriptionKey with your actual speech subscription key and re-compile!");
return;
}
///////////////////////////////////////////////////
@@ -186,7 +186,7 @@ protected void onCreate(Bundle savedInstanceState) {
try {
- Log.i(logTag, languageRecognition);
+ Log.i(logTag, LanguageRecognition);
final SpeechRecognizer reco = new SpeechRecognizer(this.getSpeechConfig(), this.getAudioConfig());
reco.recognizing.addEventListener((o, speechRecognitionResultEventArgs) -> {
@@ -398,7 +398,7 @@ public void onClick(View view) {
final SpeechConfig speechIntentConfig = SpeechConfig.fromSubscription(LuisSubscriptionKey, LuisRegion);
speechIntentConfig.setProperty("DeviceGeometry", DeviceGeometry);
speechIntentConfig.setProperty("SelectedGeometry", SelectedGeometry);
- speechIntentConfig.setSpeechRecognitionLanguage(languageRecognition);
+ speechIntentConfig.setSpeechRecognitionLanguage(LanguageRecognition);
IntentRecognizer reco = new IntentRecognizer(speechIntentConfig, getAudioConfig());
LanguageUnderstandingModel intentModel = LanguageUnderstandingModel.fromAppId(LuisAppId);
@@ -483,7 +483,7 @@ public void onClick(View view) {
content.add("");
try {
final SpeechConfig intentSpeechConfig = SpeechConfig.fromSubscription(LuisSubscriptionKey, LuisRegion);
- intentSpeechConfig.setSpeechRecognitionLanguage(languageRecognition);
+ intentSpeechConfig.setSpeechRecognitionLanguage(LanguageRecognition);
intentSpeechConfig.setProperty("DeviceGeometry", DeviceGeometry);
intentSpeechConfig.setProperty("SelectedGeometry", SelectedGeometry);
reco = new IntentRecognizer(intentSpeechConfig, getAudioConfig());
@@ -561,7 +561,7 @@ public void onClick(View view) {
///////////////////////////////////////////////////
ctsButton.setOnClickListener(view ->{
if(!checkSystemTime()) return;
- Intent meetingIntent = new Intent(this, conversation.class);
+ Intent meetingIntent = new Intent(this, Conversation.class);
startActivity(meetingIntent);
});
@@ -604,9 +604,9 @@ public void onClick(final View view) {
try {
content.clear();
final SpeechTranslationConfig translationSpeechConfig = SpeechTranslationConfig.fromSubscription(SpeechSubscriptionKey, SpeechRegion);
- translationSpeechConfig.addTargetLanguage(languageRecognition);
- translationSpeechConfig.addTargetLanguage(translateLanguage);
- translationSpeechConfig.setSpeechRecognitionLanguage(languageRecognition);
+ translationSpeechConfig.addTargetLanguage(LanguageRecognition);
+ translationSpeechConfig.addTargetLanguage(TranslateLanguage);
+ translationSpeechConfig.setSpeechRecognitionLanguage(LanguageRecognition);
translationSpeechConfig.setProperty("DeviceGeometry", DeviceGeometry);
translationSpeechConfig.setProperty("SelectedGeometry", SelectedGeometry);
reco = new TranslationRecognizer(translationSpeechConfig, getAudioConfig());
@@ -712,14 +712,14 @@ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == SELECT_RECOGNIZE_LANGUAGE_REQUEST) {
if (resultCode == RESULT_OK) {
String language = data.getStringExtra("language");
- languageRecognition = getCode(0,language);
+ LanguageRecognition = getCode(0,language);
recognizeLanguageTextView.setText(language);
}
}
if (requestCode == SELECT_TRANSLATE_LANGUAGE_REQUEST) {
if (resultCode == RESULT_OK) {
String language = data.getStringExtra("language");
- translateLanguage = getCode(1,language);
+ TranslateLanguage = getCode(1,language);
translateLanguageTextView.setText(language);
}
}
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/res/values/styles.xml b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/res/values/styles.xml
index ece794b..121c3d2 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/res/values/styles.xml
+++ b/Samples/Android/Speech Devices SDK Starter App/example/app/src/main/res/values/styles.xml
@@ -25,4 +25,5 @@
+
diff --git a/Samples/Android/Speech Devices SDK Starter App/example/settings.gradle b/Samples/Android/Speech Devices SDK Starter App/example/settings.gradle
index e7b4def..9d495b3 100644
--- a/Samples/Android/Speech Devices SDK Starter App/example/settings.gradle
+++ b/Samples/Android/Speech Devices SDK Starter App/example/settings.gradle
@@ -1 +1 @@
-include ':app'
+include ':app'
\ No newline at end of file
diff --git a/Samples/Windows_Linux/SampleDemo/.gitignore b/Samples/Windows_Linux/SampleDemo/.gitignore
new file mode 100644
index 0000000..fb5ea22
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/.gitignore
@@ -0,0 +1,5 @@
+/bin/
+/target/
+*.classpath
+*.project
+*.settings/
diff --git a/Samples/Windows_Linux/SampleDemo/CONTRIBUTING.md b/Samples/Windows_Linux/SampleDemo/CONTRIBUTING.md
new file mode 100644
index 0000000..64d27bb
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/CONTRIBUTING.md
@@ -0,0 +1,76 @@
+# Contributing to Cognitive Services Speech Devices SDK
+
+This project welcomes contributions and suggestions. Most contributions require you to agree to a
+Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
+the rights to use your contribution. For details, visit https://cla.microsoft.com.
+
+When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
+a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
+provided by the bot. You will only need to do this once across all repos using our CLA.
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
+contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
+
+ - [Code of Conduct](#coc)
+ - [Issues and Bugs](#issue)
+ - [Feature Requests](#feature)
+ - [Submission Guidelines](#submit)
+
+## Code of Conduct
+Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+
+## Found an Issue?
+If you find a bug in the source code or a mistake in the documentation, you can help us by
+[submitting an issue](#submit-issue) to the GitHub Repository. Even better, you can
+[submit a Pull Request](#submit-pr) with a fix.
+
+## Want a Feature?
+You can *request* a new feature by [submitting an issue](#submit-issue) to the GitHub
+Repository. If you would like to *implement* a new feature, please submit an issue with
+a proposal for your work first, to be sure that we can use it.
+
+* **Small Features** can be crafted and directly [submitted as a Pull Request](#submit-pr).
+
+## Submission Guidelines
+
+### Submitting an Issue
+Before you submit an issue, search the archive, maybe your question was already answered.
+
+If your issue appears to be a bug, and hasn't been reported, open a new issue.
+Help us to maximize the effort we can spend fixing issues and adding new
+features, by not reporting duplicate issues. Providing the following information will increase the
+chances of your issue being dealt with quickly:
+
+* **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps
+* **Version** - what version is affected (e.g. 0.1.2)
+* **Motivation for or Use Case** - explain what are you trying to do and why the current behavior is a bug for you
+* **Browsers and Operating System** - is this a problem with all browsers?
+* **Reproduce the Error** - provide a live example or a unambiguous set of steps
+* **Related Issues** - has a similar issue been reported before?
+* **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be
+ causing the problem (line of code or commit)
+
+You can file new issues by providing the above information at the corresponding repository's issues link: https://github.com/[organization-name]/[repository-name]/issues/new].
+
+### Submitting a Pull Request (PR)
+Before you submit your Pull Request (PR) consider the following guidelines:
+
+* Search the repository (https://github.com/[organization-name]/[repository-name]/pulls) for an open or closed PR
+ that relates to your submission. You don't want to duplicate effort.
+
+* Make your changes in a new git fork:
+
+* Commit your changes using a descriptive commit message
+* Push your fork to GitHub:
+* In GitHub, create a pull request
+* If we suggest changes then:
+ * Make the required updates.
+ * Rebase your fork and force push to your GitHub repository (this will update your Pull Request):
+
+ ```shell
+ git rebase master -i
+ git push -f
+ ```
+
+That's it! Thank you for your contribution!
diff --git a/Samples/Windows_Linux/SampleDemo/LICENSE.md b/Samples/Windows_Linux/SampleDemo/LICENSE.md
new file mode 100644
index 0000000..f140945
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/LICENSE.md
@@ -0,0 +1,77 @@
+# Speech Devices SDK license agreement
+
+## MICROSOFT SOFTWARE LICENSE TERMS
SPEECH DEVICES SOFTWARE DEVELOPMENT KIT (SDK)
+
+> IF YOU LIVE IN (OR ARE A BUSINESS WITH YOUR PRINCIPAL PLACE OF BUSINESS IN) THE UNITED STATES, PLEASE READ THE "BINDING ARBITRATION AND CLASS ACTION WAIVER" SECTION BELOW. IT AFFECTS HOW DISPUTES ARE RESOLVED.
+
+These license terms are an agreement between you and Microsoft Corporation (or one of its affiliates). They apply to the software named above and any Microsoft services or software updates (except to the extent such services or updates are accompanied by new or additional terms, in which case those different terms apply prospectively and do not alter your or Microsoft's rights relating to pre-updated software or services). **IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.**
+
+**1\. INSTALLATION AND USE RIGHTS.**
+
+a) General. You may install and use any number of copies of the software on your devices, solely to evaluate and test it for your internal business purposes. You may not use the software in a live operating environment unless Microsoft permits you to do so under another agreement.
+
+b) Included Microsoft Applications. The software may include other Microsoft applications. These license terms apply to those included applications, if any, unless other license terms are provided with the other Microsoft applications.
+
+c) Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the Third Party Notices file(s) accompanying the software.
+
+d) Package Managers. The software may include package managers, like NuGet, that give you the option to download other Microsoft and third-party software packages to use with your application. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute, license or provide any warranties for any of the third-party packages.
+
+e) If you are a direct competitor, and you access or use the software for purposes of competitive benchmarking, analysis, or intelligence gathering, you waive as against Microsoft, its subsidiaries, and its affiliated companies (including prospectively) any competitive use, access, and benchmarking test restrictions in the terms governing your software to the extent your terms of use are, or purport to be, more restrictive than Microsoft's terms. If you do not waive any such purported restrictions in the terms governing your software, you are not allowed to access or use this software, and will not do so.
+
+**2\. SCOPE OF LICENSE. The software is licensed, not sold. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you will not (and have no right to):**
+
+a) work around any technical limitations in the software that only allow you to use it in certain ways;
+
+b) reverse engineer, decompile or disassemble the software;
+
+c) remove, minimize, block, or modify any notices of Microsoft or its suppliers in the software;
+
+d) use the software in any way that is against the law or to create or propagate malware; or
+
+e) share, publish, or lend the software (except for any distributable code, and then subject to the applicable terms above), provide the software as a stand-alone hosted solution for others to use, or transfer the software or this agreement to any third party.
+
+f) You must use the software only in combination with Microsoft Azure Cognitive Services, Speech service
+
+**3\. DATA COLLECTION.** This software may interact with other Microsoft products that collect data that is transmitted to Microsoft. To learn more about how Microsoft processes personal data we collect, please see the Microsoft Privacy Statement at http://go.microsoft.com/fwlink/?LinkId=248681.
+
+**4\. PRE-RELEASE SOFTWARE.** The software is a pre-release version. It may not operate correctly. It may be different from the commercially released version.
+
+**5\. FEEDBACK.** If you give feedback about the software to Microsoft, you give to Microsoft, without charge, the right to use, share and commercialize your feedback in any way and for any purpose. You will not give feedback that is subject to a license that requires Microsoft to license its software or documentation to third parties because Microsoft includes your feedback in them. These rights survive this agreement.
+
+**6\. PERIOD.** This agreement is effective on your acceptance and may be terminated by you or Microsoft at any time.
+
+**7\. TERMINATION.** Without prejudice to any other rights, Microsoft may terminate this agreement if you fail to comply with any of its terms or conditions. In such event, you must destroy all copies of the software and all of its component parts.
+
+**8\.EXPORT RESTRICTIONS.** You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end users, and end use. For further information on export restrictions, visit (aka.ms/exporting).
+
+**9\. SUPPORT SERVICES.** Microsoft is not obligated under this agreement to provide any support services for the software. Any support provided is "as is", "with all faults", and without warranty of any kind.
+
+**10\. UPDATES.** The software may periodically check for updates, and download and install them for you. You may obtain updates only from Microsoft or authorized sources. Microsoft may need to update your system to provide you with updates. You agree to receive these automatic updates without any additional notice. Updates may not include or support all existing software features, services, or peripheral devices.
+
+**11\. BINDING ARBITRATION AND CLASS ACTION WAIVER.** This Section applies if you live in (or, if a business, your principal place of business is in) the United States. If you and Microsoft have a dispute, you and Microsoft agree to try for 60 days to resolve it informally. If you and Microsoft can't, you and Microsoft agree to binding individual arbitration before the American Arbitration Association under the Federal Arbitration Act ("FAA"), and not to sue in court in front of a judge or jury. Instead, a neutral arbitrator will decide. Class action lawsuits, class-wide arbitrations, private attorney-general actions, and any other proceeding where someone acts in a representative capacity are not allowed; nor is combining individual proceedings without the consent of all parties. The complete Arbitration Agreement contains more terms and is at aka.ms/arb-agreement-1. You and Microsoft agree to these terms.
+
+**12\. ENTIRE AGREEMENT.** This agreement, and any other terms Microsoft may provide for supplements, updates, or third-party applications, is the entire agreement for the software.
+
+**13\. APPLICABLE LAW AND PLACE TO RESOLVE DISPUTES.** If you acquired the software in the United States or Canada, the laws of the state or province where you live (or, if a business, where your principal place of business is located) govern the interpretation of this agreement, claims for its breach, and all other claims (including consumer protection, unfair competition, and tort claims), regardless of conflict of laws principles, except that the FAA governs everything related to arbitration. If you acquired the software in any other country, its laws apply, except that the FAA governs everything related to arbitration. If U.S. federal jurisdiction exists, you and Microsoft consent to exclusive jurisdiction and venue in the federal court in King County, Washington for all disputes heard in court (excluding arbitration). If not, you and Microsoft consent to exclusive jurisdiction and venue in the Superior Court of King County, Washington for all disputes heard in court (excluding arbitration).
+
+**14\. CONSUMER RIGHTS; REGIONAL VARIATIONS.** This agreement describes certain legal rights. You may have other rights, including consumer rights, under the laws of your state, province, or country. Separate and apart from your relationship with Microsoft, you may also have rights with respect to the party from which you acquired the software. This agreement does not change those other rights if the laws of your state, province, or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or mandatory country law applies, then the following provisions apply to you:
+
+a) Australia. You have statutory guarantees under the Australian Consumer Law and nothing in this agreement is intended to affect those rights.
+
+b) Canada. If you acquired this software in Canada, you may stop receiving updates by turning off the automatic update feature, disconnecting your device from the Internet (if and when you re-connect to the Internet, however, the software will resume checking for and installing updates), or uninstalling the software. The product documentation, if any, may also specify how to turn off updates for your specific device or software.
+
+c) Germany and Austria.
+
+> (i) Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the licensed software.
+
+> (ii) Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in case of death or personal or physical injury, Microsoft is liable according to the statutory law.
+
+> Subject to the foregoing clause (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence.
+
+**15\. DISCLAIMER OF WARRANTY. THE SOFTWARE IS LICENSED "AS IS." YOU BEAR THE RISK OF USING IT. MICROSOFT GIVES NO EXPRESS WARRANTIES, GUARANTEES, OR CONDITIONS. TO THE EXTENT PERMITTED UNDER APPLICABLE LAWS, MICROSOFT EXCLUDES ALL IMPLIED WARRANTIES, INCLUDING MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT.**
+
+**16\. LIMITATION ON AND EXCLUSION OF DAMAGES. IF YOU HAVE ANY BASIS FOR RECOVERING DAMAGES DESPITE THE PRECEDING DISCLAIMER OF WARRANTY, YOU CAN RECOVER FROM MICROSOFT AND ITS SUPPLIERS ONLY DIRECT DAMAGES UP TO U.S. $5.00. YOU CANNOT RECOVER ANY OTHER DAMAGES, INCLUDING CONSEQUENTIAL, LOST PROFITS, SPECIAL, INDIRECT OR INCIDENTAL DAMAGES.**
+
+**This limitation applies to (a) anything related to the software, services, content (including code) on third party Internet sites, or third-party applications; and (b) claims for breach of contract, warranty, guarantee, or condition; strict liability, negligence, or other tort; or any other claim; in each case to the extent permitted by applicable law.**
+
+**It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your state, province, or country may not allow the exclusion or limitation of incidental, consequential, or other damages.**
\ No newline at end of file
diff --git a/Samples/Windows_Linux/SampleDemo/LUIS-example.json b/Samples/Windows_Linux/SampleDemo/LUIS-example.json
new file mode 100644
index 0000000..e9ad501
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/LUIS-example.json
@@ -0,0 +1,84 @@
+{
+ "luis_schema_version": "3.0.0",
+ "versionId": "0.1",
+ "name": "ddkmusicdemo",
+ "desc": "ddk music demo",
+ "culture": "en-us",
+ "intents": [
+ {
+ "name": "None"
+ },
+ {
+ "name": "play music"
+ },
+ {
+ "name": "stop"
+ }
+ ],
+ "entities": [
+ {
+ "name": "song",
+ "roles": []
+ }
+ ],
+ "composites": [],
+ "closedLists": [],
+ "patternAnyEntities": [],
+ "regex_entities": [],
+ "prebuiltEntities": [],
+ "model_features": [],
+ "regex_features": [],
+ "patterns": [],
+ "utterances": [
+ {
+ "text": "pause",
+ "intent": "stop",
+ "entities": []
+ },
+ {
+ "text": "play music",
+ "intent": "play music",
+ "entities": []
+ },
+ {
+ "text": "play music for me",
+ "intent": "play music",
+ "entities": []
+ },
+ {
+ "text": "play music please",
+ "intent": "play music",
+ "entities": []
+ },
+ {
+ "text": "play song",
+ "intent": "play music",
+ "entities": []
+ },
+ {
+ "text": "shut up",
+ "intent": "stop",
+ "entities": []
+ },
+ {
+ "text": "start music",
+ "intent": "play music",
+ "entities": []
+ },
+ {
+ "text": "stop",
+ "intent": "stop",
+ "entities": []
+ },
+ {
+ "text": "stop playing",
+ "intent": "stop",
+ "entities": []
+ },
+ {
+ "text": "turn off",
+ "intent": "stop",
+ "entities": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Samples/Windows_Linux/SampleDemo/ThirdPartyNotices.md b/Samples/Windows_Linux/SampleDemo/ThirdPartyNotices.md
new file mode 100644
index 0000000..d5ea41a
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/ThirdPartyNotices.md
@@ -0,0 +1,544 @@
+THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
+Do Not Translate or Localize
+
+This file provides informationregarding components that are being relicensed to you by Microsoft under Microsoft's software licensing terms. Microsoft reserves all rights not expressly granted herein, whether by implication, estoppel or otherwise.
+
+Microsoft is offering you a license to use the following components with Speech Devices SDKsubject to the terms of the Microsoft software license terms for Speech Devices SDK products (the �Microsoft Program�).
+
+1. kgabis/parson version b87a27c
+2. Azure/azure-c-shared-utility version ed84cdb
+3. catchorg/Catch2 version d2d8455
+4. curl/curl version 6d7d0eb
+5. curl/curl version 7.21.3
+6. openssl/openssl version b2758a2
+7. nlohmann/json version d2dd27d
+8. tinyalsa/tinyalsa version df11091
+9. xianyi/OpenBLAS version 5f998ef
+10. openfst version 1.6.5
+11. kaldi-asr/kaldi version eba50e4
+
+
+1. %% kgabis/parson NOTICES AND INFORMATION BEGIN HERE
+=========================================
+MIT License
+
+Copyright (c) 2012 - 2017 Krzysztof Gabis
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+=========================================
+END OF kgabis/parson NOTICES AND INFORMATION
+
+2. %% Azure/azure-c-shared-utility NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Microsoft Azure IoT SDKs
+Copyright (c) Microsoft Corporation
+All rights reserved.
+MIT License
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the ""Software""), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+=========================================
+END OF Azure/azure-c-shared-utility NOTICES AND INFORMATION
+
+3. %% catchorg/Catch2 NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Boost Software License - Version 1.0 - August 17th, 2003
+
+Permission is hereby granted, free of charge, to any person or organization
+obtaining a copy of the software and accompanying documentation covered by
+this license (the "Software") to use, reproduce, display, distribute,
+execute, and transmit the Software, and to prepare derivative works of the
+Software, and to permit third-parties to whom the Software is furnished to
+do so, all subject to the following:
+
+The copyright notices in the Software and this entire statement, including
+the above license grant, this restriction and the following disclaimer,
+must be included in all copies of the Software, in whole or in part, and
+all derivative works of the Software, unless such copies or derivative
+works are solely in the form of machine-executable object code generated by
+a source language processor.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+
+=========================================
+END OF catchorg/Catch2NOTICES AND INFORMATION
+
+
+4. %% curl/curl NOTICES AND INFORMATION BEGIN HERE
+=========================================
+COPYRIGHT AND PERMISSION NOTICE
+
+Copyright (c) 1996 - 2018, Daniel Stenberg, , and many
+contributors, see the THANKS file.
+
+All rights reserved.
+
+Permission to use, copy, modify, and distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright
+notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN
+NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the name of a copyright holder shall not
+be used in advertising or otherwise to promote the sale, use or other dealings
+in this Software without prior written authorization of the copyright holder.
+
+
+=========================================
+END OF curl/curl NOTICES AND INFORMATION
+
+
+5. %% curl/curl NOTICES AND INFORMATION BEGIN HERE
+=========================================
+COPYRIGHT AND PERMISSION NOTICE
+
+Copyright (c) 1996 - 2010, Daniel Stenberg, .
+
+All rights reserved.
+
+Permission to use, copy, modify, and distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright
+notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN
+NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the name of a copyright holder shall not
+be used in advertising or otherwise to promote the sale, use or other dealings
+in this Software without prior written authorization of the copyright holder.
+
+=========================================
+END OF curl/curl NOTICES AND INFORMATION5. %% openssl/openssl NOTICES AND INFORMATION BEGIN HERE
+
+
+6. %% openssl/openssl NOTICES AND INFORMATION BEGIN HERE
+=========================================
+The OpenSSL toolkit stays under a double license, i.e. both the conditions of
+ the OpenSSL License and the original SSLeay license apply to the toolkit.
+ See below for the actual license texts.
+
+ OpenSSL License
+ ---------------
+
+/* ====================================================================
+ * Copyright (c) 1998-2017 The OpenSSL Project. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * 3. All advertising materials mentioning features or use of this
+ * software must display the following acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+ *
+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+ * endorse or promote products derived from this software without
+ * prior written permission. For written permission, please contact
+ * openssl-core@openssl.org.
+ *
+ * 5. Products derived from this software may not be called "OpenSSL"
+ * nor may "OpenSSL" appear in their names without prior written
+ * permission of the OpenSSL Project.
+ *
+ * 6. Redistributions of any form whatsoever must retain the following
+ * acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ * ====================================================================
+ *
+ * This product includes cryptographic software written by Eric Young
+ * (eay@cryptsoft.com). This product includes software written by Tim
+ * Hudson (tjh@cryptsoft.com).
+ *
+ */
+
+ Original SSLeay License
+ -----------------------
+
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+=========================================
+END OF openssl/openssl NOTICES AND INFORMATION
+
+7. %% nlohmann/json NOTICES AND INFORMATION BEGIN HERE
+=========================================
+MIT License
+
+Copyright (c) 2013-2018 Niels Lohmann
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+range-v3
+Boost Software License - Version 1.0 - August 17th, 2003
+
+Copyright Eric Niebler 2013-2014
+
+Permission is hereby granted, free of charge, to any person or organization
+obtaining a copy of the software and accompanying documentation covered by
+this license (the "Software") to use, reproduce, display, distribute,
+execute, and transmit the Software, and to prepare derivative works of the
+Software, and to permit third-parties to whom the Software is furnished to
+do so, all subject to the following:
+
+The copyright notices in the Software and this entire statement, including
+the above license grant, this restriction and the following disclaimer,
+must be included in all copies of the Software, in whole or in part, and
+all derivative works of the Software, unless such copies or derivative
+works are solely in the form of machine-executable object code generated by
+a source language processor.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+Files with sample code from IETF RFC 7049
+
+Copyright (c) 2013 IETF Trust and the persons identified as authors
+of the code. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+Redistributions of source code must retain the above copyright notice,
+this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+Neither the name of Internet Society, IETF or IETF Trust, nor the names of
+specific contributors, may be used to endorse or promote products derived
+from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS �AS IS�
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+=========================================
+END OF nlohmann/json NOTICES AND INFORMATION
+
+8. %% tinyalsa/tinyalsa NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Copyright 2011, The Android Open Source Project
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of The Android Open Source Project nor the names of
+ its contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+THIS SOFTWARE IS PROVIDED BY The Android Open Source Project ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL The Android Open Source Project BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+
+=========================================
+END OF tinyalsa/tinyalsa NOTICES AND INFORMATION
+
+9. %% xianyi/OpenBLAS version #1e729d7ba20cee417259934da9424b50e2d41548 NOTICES AND INFORMATION BEGIN HERE
+=========================================
+OpenBLAS
+
+Copyright (c) 2011-2014, The OpenBLAS Project
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ 3. Neither the name of the OpenBLAS project nor the names of
+ its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+GotoBLAS2
+
+Copyright 2009, 2010 The University of Texas at Austin.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT AUSTIN ``AS IS''
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT
+AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+The views and conclusions contained in the software and documentation
+are those of the authors and should not be interpreted as representing
+official policies, either expressed or implied, of The University of
+Texas at Austin.
+
+
+File with code "adapted from" Google performance tools
+* Copyright (c) 2007, Google Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * ---
+ * Author: Craig Silverstein
+ */
+
+=========================================
+END OF xianyi/OpenBLAS version #1e729d7ba20cee417259934da9424b50e2d41548 NOTICES AND INFORMATION
+
+10. %% openfst NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use these files except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Copyright 2005-2015 Google, Inc.
+
+=========================================
+END OF openfst NOTICES AND INFORMATION
+
+11. %% kaldi-asr/kaldi NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Copyright (c) kaldi-asr contributors
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use these files except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+=========================================
+END OF kaldi-asr/kaldi NOTICES AND INFORMATION
+
diff --git a/Samples/Windows_Linux/SampleDemo/kws.table b/Samples/Windows_Linux/SampleDemo/kws.table
new file mode 100644
index 0000000..f7ca328
Binary files /dev/null and b/Samples/Windows_Linux/SampleDemo/kws.table differ
diff --git a/Samples/Windows_Linux/SampleDemo/participants.properties b/Samples/Windows_Linux/SampleDemo/participants.properties
new file mode 100644
index 0000000..ac47024
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/participants.properties
@@ -0,0 +1,2 @@
+PARTICIPANTSLIST=
+
\ No newline at end of file
diff --git a/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/Cts.java b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/Cts.java
new file mode 100644
index 0000000..901bd03
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/Cts.java
@@ -0,0 +1,451 @@
+package com.microsoft.cognitiveservices.speech.samples;
+
+import java.awt.Color;
+import java.awt.Dimension;
+import java.awt.EventQueue;
+
+import javax.swing.JFrame;
+import javax.swing.JPanel;
+import javax.swing.JScrollPane;
+import javax.swing.border.EmptyBorder;
+
+import com.microsoft.cognitiveservices.speech.PropertyId;
+import com.microsoft.cognitiveservices.speech.ResultReason;
+import com.microsoft.cognitiveservices.speech.SpeechConfig;
+import com.microsoft.cognitiveservices.speech.audio.AudioConfig;
+import com.microsoft.cognitiveservices.speech.conversation.ConversationTranscriber;
+import com.microsoft.cognitiveservices.speech.conversation.ConversationTranscriptionEventArgs;
+import com.microsoft.cognitiveservices.speech.conversation.Participant;
+import com.microsoft.cognitiveservices.speech.conversation.User;
+
+import java.awt.GridBagLayout;
+import java.awt.SystemColor;
+
+import javax.swing.JMenu;
+import java.awt.GridBagConstraints;
+import javax.swing.JTextArea;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import javax.swing.JMenuBar;
+import javax.swing.JMenuItem;
+import javax.swing.border.BevelBorder;
+import javax.swing.ScrollPaneConstants;
+import java.awt.Font;
+
+public class Cts extends JFrame {
+
+ private static final long serialVersionUID = 1L;
+
+ private JPanel contentPane;
+ private JTextArea interResultTextArea;
+ private ConversationTranscriber transcriber = null;
+ private boolean meetingStarted = false;
+ private final HashMap, ConversationTranscriptionEventArgs> transcriptions = new HashMap<>();
+ private JMenuItem startMenuItem, stopMenuItem;
+ private static final String CTSKey = "";
+ private static final String CTSRegion = "centralus";// Region may be "centralus" or "eastasia"
+ private static final Boolean DefaultGeometry = false;// Set to false for "Circular6+1" microphone device
+ private static String DeviceGeometry = "Circular6+1"; // "Circular6+1", "Linear4",
+ private static String SelectedGeometry = "Raw"; // "Raw"
+ private SpeechConfig speechConfig = null;
+ private HashMap signatureMap = new HashMap<>();
+ public ArrayList finalResultsList = new ArrayList<>();
+ public ArrayList finalTranscriptions = new ArrayList<>();
+ private JTextArea finalResultTextArea;
+ private final File jarLocation = new File(ClassLoader.getSystemClassLoader().getResource(".").getPath());
+ private final int finalTextArea = 0;
+ private final int intermediaTextArea = 1;
+
+ /**
+ * Launch the application.
+ */
+ public static void main(String[] args) {
+ EventQueue.invokeLater(new Runnable() {
+ public void run() {
+ try {
+ Cts CtsFrame = new Cts();
+ CtsFrame.setVisible(true);
+ CtsFrame.setLocationRelativeTo(null);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ });
+ }
+
+ /**
+ * Create the frame.
+ */
+ public Cts() {
+ // put the participants.properties in the same path as this application runnable
+ // Jar file
+ String participantsProp = jarLocation.getAbsolutePath() + File.separator + "participants.properties";
+ // log.text file will save in the same path as this application runnable Jar file
+ String logPath = jarLocation.getAbsolutePath() + File.separator + "log.text";
+
+ setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+
+ FrameDisplay fd = new FrameDisplay();
+ setBounds(100, 100, fd.getWidth(), fd.getLength());
+ Font menuFont = fd.getMenuFont();
+ Font textFont = fd.getTextFont();
+ Font boldTextFont = fd.getBoldTextFont();
+
+
+ JMenuBar menuBar = new JMenuBar();
+ setJMenuBar(menuBar);
+ JMenu sessionMenu = new JMenu("Session");
+ sessionMenu.getAccessibleContext().setAccessibleDescription("Start or Stop Session");
+ menuBar.add(sessionMenu);
+ menuBar.setFont(menuFont);
+ sessionMenu.setFont(menuFont);
+
+ startMenuItem = new JMenuItem("Start");
+ startMenuItem.setFont(menuFont);
+ sessionMenu.add(startMenuItem);
+
+ stopMenuItem = new JMenuItem("Stop");
+ stopMenuItem.setEnabled(false);
+ stopMenuItem.setFont(menuFont);
+ sessionMenu.add(stopMenuItem);
+
+ JMenu returnMenu = new JMenu("Return");
+ returnMenu.getAccessibleContext().setAccessibleDescription("Return");
+ returnMenu.setFont(menuFont);
+ menuBar.add(returnMenu);
+
+ JMenuItem returnMenuItem = new JMenuItem("Return");
+ returnMenuItem.setFont(menuFont);
+ returnMenu.add(returnMenuItem);
+
+ contentPane = new JPanel();
+ contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
+ setContentPane(contentPane);
+ GridBagLayout gblContentPane = new GridBagLayout();
+ gblContentPane.columnWidths = new int[] { fd.getWidth() };
+ gblContentPane.rowHeights = new int[] { fd.getLength() * 3 / 5, fd.getLength() * 2 / 5 };
+
+ gblContentPane.columnWeights = new double[] { 1.0, Double.MIN_VALUE };
+ gblContentPane.rowWeights = new double[] { 1.0, 1.0 };
+ contentPane.setLayout(gblContentPane);
+
+ finalResultTextArea = new JTextArea(40, 1);
+ finalResultTextArea.setLineWrap(true);
+ finalResultTextArea.setWrapStyleWord(true);
+ finalResultTextArea.setEditable(false);
+ finalResultTextArea.setFont(boldTextFont);
+ finalResultTextArea.setForeground(Color.BLACK);
+ finalResultTextArea.setBackground(new Color(211, 211, 211));
+
+ JScrollPane finalscrollPane = new JScrollPane(finalResultTextArea);
+ finalscrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
+ finalscrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
+ finalscrollPane.setViewportBorder(new BevelBorder(BevelBorder.LOWERED, null, null, null, null));
+ finalscrollPane.setPreferredSize(new Dimension(0, 0));
+ GridBagConstraints gbcScrollPane = new GridBagConstraints();
+ gbcScrollPane.anchor = GridBagConstraints.NORTH;
+ gbcScrollPane.gridy = 0;
+ gbcScrollPane.gridwidth = 2;
+ gbcScrollPane.fill = GridBagConstraints.BOTH;
+ gbcScrollPane.gridx = 0;
+ getContentPane().add(finalscrollPane, gbcScrollPane);
+
+ interResultTextArea = new JTextArea(25, 1);
+ interResultTextArea.setFont(textFont);
+ interResultTextArea.setLineWrap(true);
+ interResultTextArea.setWrapStyleWord(true);
+ interResultTextArea.setEditable(false);
+ interResultTextArea.setForeground(SystemColor.DARK_GRAY);
+ interResultTextArea.setBackground(new Color(255, 255, 255));
+
+ JScrollPane interScrollPane = new JScrollPane(interResultTextArea);
+ interScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
+ interScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
+ interScrollPane.setViewportBorder(new BevelBorder(BevelBorder.LOWERED, null, null, null, null));
+ interScrollPane.setPreferredSize(new Dimension(0, 0));
+ GridBagConstraints gbcInterScrollPane = new GridBagConstraints();
+ gbcInterScrollPane.gridwidth = 2;
+ gbcInterScrollPane.anchor = GridBagConstraints.NORTH;
+ gbcInterScrollPane.fill = GridBagConstraints.BOTH;
+ gbcInterScrollPane.gridy = 1;
+ gbcInterScrollPane.gridx = 0;
+ getContentPane().add(interScrollPane, gbcInterScrollPane);
+
+ startMenuItem.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+
+ meetingStarted = true;
+ clearTextBox(intermediaTextArea);
+ clearTextBox(finalTextArea);
+ finalResultsList.clear();
+ transcriptions.clear();
+ finalTranscriptions.clear();
+
+ speechConfig = SpeechConfig.fromSubscription(CTSKey, CTSRegion);
+ if (!DefaultGeometry) {
+ speechConfig.setProperty("DeviceGeometry", DeviceGeometry);
+ speechConfig.setProperty("SelectedGeometry", SelectedGeometry);
+ }
+ speechConfig.setProperty(PropertyId.Speech_LogFilename, logPath);
+
+ try {
+ transcriber = new ConversationTranscriber(speechConfig, AudioConfig.fromDefaultMicrophoneInput());
+
+ transcriber.setConversationId("MeetingTest");
+ System.out.println("Participants enrollment");
+
+ for (String userId : signatureMap.keySet()) {
+ User user = User.fromUserId(userId);
+ transcriber.addParticipant(user);
+ Participant participant = Participant.from(userId, "en-US", signatureMap.get(userId));
+ transcriber.addParticipant(participant);
+ System.out.println("add participant: " + userId);
+ }
+ startRecognizeMeeting(transcriber);
+ startMenuItem.setEnabled(false);
+ stopMenuItem.setEnabled(true);
+ meetingStarted = true;
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+ });
+
+ stopMenuItem.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ stopClicked();
+ meetingStarted = false;
+ }
+ });
+
+ returnMenuItem.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ if (meetingStarted) {
+ stopClicked();
+ meetingStarted = false;
+ }
+ FunctionsList.MainFrame.setVisible(true);
+ dispose();
+ }
+ });
+
+ FileInputStream participantIs = null;
+ String participantList = new String();
+ Properties prop = new Properties();
+ try {
+
+ participantIs = new FileInputStream(participantsProp);
+ prop.load(participantIs);
+ participantList = prop.getProperty("PARTICIPANTSLIST");
+ } catch (Exception io) {
+ io.printStackTrace();
+ } finally {
+ if (participantIs != null) {
+ try {
+ participantIs.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ if (participantList.length() == 0) {
+ System.out.println("Cannot find participants' voice signatures");
+ setRecognizedText("Please save the participants' voice signatures in file named participants.properties",
+ intermediaTextArea);
+ } else {
+ while (participantList.length() != 0) {
+ String aName = participantList.substring(participantList.indexOf('<') + 1,
+ participantList.indexOf('@'));
+ String aSign = participantList.substring(participantList.indexOf('@') + 1,
+ participantList.indexOf('>'));
+ signatureMap.put(aName, aSign);
+ System.out.println(aName);
+ System.out.println(aSign);
+ participantList = participantList.substring(participantList.indexOf('>') + 1);
+ }
+ }
+
+ }
+
+ private void startRecognizeMeeting(ConversationTranscriber t) {
+ try {
+ t.sessionStarted.addEventListener((o, e) -> System.out.println("Session started event. Start recognition"));
+
+ t.recognizing.addEventListener((o, e) -> recognizingEventHandler(e));
+
+ t.recognized.addEventListener((o, e) -> {
+ final String text = e.getResult().getText();
+ final String speakerId = e.getResult().getUserId().equals("Unidentified") ? "Guest"
+ : e.getResult().getUserId();
+ final BigInteger offset = e.getResult().getOffset();
+
+ final String result = speakerId + " : " + text;
+ System.out.println("TransResult final result received: " + result + "; Tick: " + offset);
+
+ if (!text.isEmpty() && !speakerId.equals("$ref$")) {
+ final CtsResult finalResult = new CtsResult(offset, speakerId + " : " + text, speakerId);
+ finalResultsList.add(finalResult);
+ Collections.sort(finalResultsList);
+ System.out.println("Display final result : " + Integer.toString(finalResultsList.size()));
+ finalTranscriptions.clear();
+
+ for (int i = 0; i < finalResultsList.size(); i++) {
+ finalTranscriptions.add(finalResultsList.get(i).getResult());
+ }
+ appendTextLine(FunctionsList.TextJoin("\n\n", finalTranscriptions), true, finalTextArea);
+
+ }
+ });
+
+ t.canceled.addEventListener((o, e) -> {
+ System.out.println("CANCELED: Reason=" + e.getReason() + ", ErrorCode=" + e.getErrorCode()
+ + ", ErrorDetails=" + e.getErrorDetails());
+ });
+
+ t.sessionStopped.addEventListener((o, e) -> System.out.println("Session stopped event. Stop recognition"));
+
+ final Future task = t.startTranscribingAsync();
+ setOnTaskCompletedListener(task, result -> {
+ long currentTime = Calendar.getInstance().getTimeInMillis();
+ System.out.println("Recognition started. " + currentTime);
+
+ });
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+
+ private void recognizingEventHandler(ConversationTranscriptionEventArgs e) {
+ final String text = e.getResult().getText();
+ final String speakerId = e.getResult().getUserId().equals("Unidentified") ? "..." : e.getResult().getUserId();
+ final BigInteger offset = e.getResult().getOffset();
+
+ System.out.println(
+ "TransResult " + "Intermediate" + " result received: " + speakerId + ": " + text + "; Tick: " + offset);
+
+ Pair key = new Pair<>(speakerId, offset);
+
+ if (text.isEmpty() || speakerId.equals("$ref$")) {
+ transcriptions.remove(key);
+ } else {
+ if (transcriptions.containsKey(key)) {
+ if (transcriptions.get(key).getResult().getReason() == ResultReason.RecognizingSpeech) {
+ System.out.println("Two utterances occurred at the same time. Offset: " + offset + "; text: " + text);
+ }
+ }
+ transcriptions.put(key, e);
+ }
+ setRecognizingText();
+ }
+
+ private void setRecognizingText() {
+ ArrayList> outputEvent = new ArrayList<>();
+
+ for (ConversationTranscriptionEventArgs event : transcriptions.values()) {
+ final String speakerId = event.getResult().getUserId().equals("Unidentified") ? "..."
+ : event.getResult().getUserId();
+ final BigInteger offset = event.getResult().getOffset();
+ outputEvent.add(new Pair<>(offset, speakerId + " : " + event.getResult().getText()));
+ }
+
+ Collections.sort(outputEvent,
+ (bigIntegerStringPair, t1) -> bigIntegerStringPair.getKey().compareTo(t1.getKey()));
+
+ ArrayList outputMessage = new ArrayList<>();
+ for (Pair event : outputEvent) {
+ outputMessage.add(event.getValue());
+ }
+ appendTextLine(FunctionsList.TextJoin("\n", outputMessage), true, intermediaTextArea);
+
+ }
+
+ private void stopClicked() {
+ try {
+ transcriber.stopTranscribingAsync().get();
+ final Future task = transcriber.endConversationAsync();
+ setOnTaskCompletedListener(task, result -> {
+ System.out.println("Recognition stopped.");
+ meetingStarted = false;
+ stopMenuItem.setEnabled(false);
+ startMenuItem.setEnabled(true);
+ });
+
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+
+ private void clearTextBox(int textAreaId) {
+ appendTextLine("", true, textAreaId);
+ }
+
+ private void setRecognizedText(String s, int textAreaId) {
+ appendTextLine(s, true, textAreaId);
+ }
+
+ private void appendTextLine(final String s, final Boolean erase, int textAreaId) {
+ switch (textAreaId) {
+ case 0: {
+ if (erase) {
+ finalResultTextArea.setText(s);
+ } else {
+ String txt = finalResultTextArea.getText().toString();
+ finalResultTextArea.setText(String.format("%s\n%s", txt, s));
+ }
+ finalResultTextArea.setCaretPosition(finalResultTextArea.getDocument().getLength());
+ break;
+ }
+ case 1: {
+ if (erase) {
+ interResultTextArea.setText(s);
+ } else {
+ String txt = interResultTextArea.getText().toString();
+ interResultTextArea.setText(String.format("%s\n%s", txt, s));
+ }
+ interResultTextArea.setCaretPosition(interResultTextArea.getDocument().getLength());
+ break;
+ }
+ default:
+ }
+ }
+
+ private void setOnTaskCompletedListener(Future task, OnTaskCompletedListener listener) {
+ s_executorService.submit(() -> {
+ T result = task.get();
+ listener.onCompleted(result);
+ return null;
+ });
+ }
+
+ private interface OnTaskCompletedListener {
+ void onCompleted(T taskResult);
+ }
+
+ protected static ExecutorService s_executorService;
+ static {
+ s_executorService = Executors.newCachedThreadPool();
+ }
+
+ private void displayException(Exception ex) {
+ interResultTextArea.setText(String.format("%s\n%s", ex.getMessage(), ex.getStackTrace().toString()));
+ }
+
+}
diff --git a/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/CtsResult.java b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/CtsResult.java
new file mode 100644
index 0000000..d67a7ba
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/CtsResult.java
@@ -0,0 +1,33 @@
+package com.microsoft.cognitiveservices.speech.samples;
+
+import java.math.BigInteger;
+
+public class CtsResult implements Comparable {
+ BigInteger offset;
+ String result;
+ String speakerId;
+
+ public CtsResult(BigInteger a, String b, String c) {
+ offset = a;
+ result = b;
+ speakerId = c;
+ }
+
+ public BigInteger getOffset() {
+ return offset;
+ }
+
+ public String getResult() {
+ return result;
+ }
+
+ public String getSpeakerId() {
+ return speakerId;
+ }
+
+ @Override
+ public int compareTo(CtsResult o) {
+ return offset.compareTo(o.getOffset());
+ }
+
+}
diff --git a/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/FrameDisplay.java b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/FrameDisplay.java
new file mode 100644
index 0000000..1ab4002
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/FrameDisplay.java
@@ -0,0 +1,45 @@
+package com.microsoft.cognitiveservices.speech.samples;
+
+import java.awt.Dimension;
+import java.awt.Font;
+import java.awt.Toolkit;
+
+class FrameDisplay {
+ private Dimension screenSize;
+ private int frameLength;
+ private int frameWidth;
+
+ public FrameDisplay() {
+ screenSize = Toolkit.getDefaultToolkit().getScreenSize();
+ }
+
+ public int getLength() {
+ frameLength = (int) Math.round(screenSize.getHeight() * 0.75);
+ return frameLength;
+ }
+
+ public int getWidth() {
+ frameWidth = (int) Math.round(screenSize.getWidth() * 0.3);
+ return frameWidth;
+ }
+
+ public Font getButtonFont() {
+ return new Font("Tahoma", Font.BOLD, frameWidth / 30);
+ }
+
+ public Font getLabelFont() {
+ return new Font("Tahoma", Font.BOLD, frameWidth / 35);
+ }
+
+ public Font getTextFont() {
+ return new Font("Monospaced", Font.PLAIN, frameWidth / 35);
+ }
+
+ public Font getBoldTextFont() {
+ return new Font("Monospaced", Font.BOLD, frameWidth / 35);
+ }
+
+ public Font getMenuFont() {
+ return new Font("Tahoma", Font.BOLD, frameWidth / 35);
+ }
+}
\ No newline at end of file
diff --git a/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/FunctionsList.java b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/FunctionsList.java
new file mode 100644
index 0000000..9aa92a0
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/FunctionsList.java
@@ -0,0 +1,910 @@
+package com.microsoft.cognitiveservices.speech.samples;
+
+import static com.microsoft.cognitiveservices.speech.ResultReason.RecognizedKeyword;
+import static com.microsoft.cognitiveservices.speech.ResultReason.RecognizingSpeech;
+
+import java.awt.Color;
+import java.awt.Dimension;
+import java.awt.EventQueue;
+import java.awt.Font;
+import java.awt.GridBagConstraints;
+import java.awt.GridBagLayout;
+import java.awt.Insets;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import javax.swing.JButton;
+import javax.swing.JComboBox;
+import javax.swing.JFrame;
+import javax.swing.JLabel;
+import javax.swing.JPanel;
+import javax.swing.JScrollPane;
+import javax.swing.JTextArea;
+import javax.swing.ScrollPaneConstants;
+import javax.swing.SwingConstants;
+import javax.swing.border.BevelBorder;
+import javax.swing.border.EmptyBorder;
+
+import com.microsoft.cognitiveservices.speech.CancellationDetails;
+import com.microsoft.cognitiveservices.speech.KeywordRecognitionModel;
+import com.microsoft.cognitiveservices.speech.PropertyId;
+import com.microsoft.cognitiveservices.speech.ResultReason;
+import com.microsoft.cognitiveservices.speech.SpeechConfig;
+import com.microsoft.cognitiveservices.speech.SpeechRecognitionResult;
+import com.microsoft.cognitiveservices.speech.SpeechRecognizer;
+import com.microsoft.cognitiveservices.speech.audio.AudioConfig;
+import com.microsoft.cognitiveservices.speech.intent.IntentRecognitionResult;
+import com.microsoft.cognitiveservices.speech.intent.IntentRecognizer;
+import com.microsoft.cognitiveservices.speech.intent.LanguageUnderstandingModel;
+import com.microsoft.cognitiveservices.speech.translation.SpeechTranslationConfig;
+import com.microsoft.cognitiveservices.speech.translation.TranslationRecognizer;
+
+public class FunctionsList extends JFrame {
+
+ private static final long serialVersionUID = 1L;
+ static FunctionsList MainFrame;
+ private GridBagLayout gblContentPane;
+ private JLabel recoLanguageLabel;
+ private JComboBox recoLanguageComboBox;
+ private JButton recoOnceButton;
+ private JButton recoContButton;
+ private JButton recoKwsButton;
+ private JButton recoIntentButton;
+ private JButton recoIntentkwsButton;
+ private JButton ctsButton;
+ private JButton translateButton;
+ private JComboBox tranLanguageComboBox;
+ private JTextArea recoResultTextArea;
+ private static String RecoLanguage;
+ private final HashMap intentIdMap = new HashMap<>();
+ private static String LogPath = new String();
+ private final static File JarLocation = new File(ClassLoader.getSystemClassLoader().getResource(".").getPath());
+
+ // Subscription
+ private static final String SpeechSubscriptionKey = "";
+ private static final String SpeechRegion = "westus"; // You can change this if your speech region is different.
+ private static final String LuisSubscriptionKey = "";
+ private static final String LuisRegion = "westus2"; // you can change this, if you want to test the intent, and your
+ // LUIS region is different.
+ private static final String LuisAppId = "";
+
+ private static String Keyword = "computer";
+ private static String KeywordModel = new String();
+ private static final Boolean DefaultGeometry = false;
+ private static String DeviceGeometry = "Circular6+1"; // "Circular6+1", "Linear4",
+ private static String SelectedGeometry = "Circular6+1"; // "Circular6+1", "Circular3+1", "Linear4", "Linear2"
+
+ private static String SampleAudioInput = new String();
+
+ private JPanel contentPane;
+ private final String[] recolanguage = { "English (United States)", "German (Germany)",
+ "Chinese (Mandarin, simplified)", "English (India)", "Spanish (Spain)", "French (France)",
+ "Italian (Italy)", "Portuguese (Brazil)", "Russian (Russia)" };
+ private final String[] tranlanguage = { "Afrikaans", "Arabic", "Bangla", "Bosnian (Latin)", "Bulgarian",
+ "Cantonese (Traditional)", "Catalan", "Chinese Simplified", "Chinese Traditional", "Croatian", "Czech",
+ "Danish", "Dutch", "English", "Estonian", "Fijian", "Filipino", "Finnish", "French", "German", "Greek",
+ "Haitian Creole", "Hebrew", "Hindi", "Hmong Daw", "Hungarian", "Indonesian", "Italian", "Japanese",
+ "Kiswahili", "Klingon", "Klingon (plqaD)", "Korean", "Latvian", "Lithuanian", "Malagasy", "Malay",
+ "Maltese", "Norwegian", "Persian", "Polish", "Portuguese", "Queretaro Otomi", "Romanian", "Russian",
+ "Samoan", "Serbian (Cyrillic)", "Serbian (Latin)", "Slovak", "Slovenian", "Spanish", "Swedish", "Tahitian",
+ "Tamil", "Thai", "Tongan", "Turkish", "Ukrainian", "Urdu", "Vietnamese", "Welsh", "Yucatec Maya" };
+
+ private AudioConfig getAudioConfig() {
+
+ if (new File(SampleAudioInput).exists()) {
+ recoResultTextArea.setText(recoResultTextArea.getText() + "\nInfo: Using AudioFile " + SampleAudioInput);
+
+ // run from a file
+ return AudioConfig.fromWavFileInput(SampleAudioInput);
+ }
+
+ // run from the microphone
+ return AudioConfig.fromDefaultMicrophoneInput();
+ }
+
+ public static SpeechConfig getSpeechConfig() {
+ SpeechConfig speechConfig = SpeechConfig.fromSubscription(SpeechSubscriptionKey, SpeechRegion);
+ if (!DefaultGeometry) {
+ speechConfig.setProperty("DeviceGeometry", DeviceGeometry);
+ speechConfig.setProperty("SelectedGeometry", SelectedGeometry);
+ }
+ speechConfig.setSpeechRecognitionLanguage(RecoLanguage);
+ speechConfig.setProperty(PropertyId.Speech_LogFilename, LogPath);
+
+ return speechConfig;
+ }
+
+ /**
+ * Launch the application.
+ */
+ public static void main(String[] args) {
+ EventQueue.invokeLater(new Runnable() {
+ public void run() {
+ try {
+ MainFrame = new FunctionsList();
+ MainFrame.setVisible(true);
+ MainFrame.setLocationRelativeTo(null);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ });
+ }
+
+ /**
+ * Create the frame.
+ */
+ public FunctionsList() {
+
+ // put the keyword model file-kws.table in the same path as this application
+ // runnable Jar file
+ KeywordModel = JarLocation.getAbsolutePath() + File.separator + "kws.table";
+ // log.text file will save in the same path as this application runnable Jar file
+ LogPath = JarLocation.getAbsolutePath() + File.separator + "log.text";
+ // Note: point this to a wav file in case you don't want to
+ // use the microphone. It will be used automatically, if
+ // the file exists on disk.
+ SampleAudioInput = JarLocation.getAbsolutePath() + File.separator + "kws-computer.wav";
+
+ FrameDisplay fd = new FrameDisplay();
+
+ setTitle("Speech Recognition Application");
+ setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ setBounds(100, 100, fd.getWidth(), fd.getLength());
+ Font buttonFont = fd.getButtonFont();
+ Font labelFont = fd.getLabelFont();
+ Font textFont = fd.getTextFont();
+
+ contentPane = new JPanel();
+ contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
+ setContentPane(contentPane);
+ gblContentPane = new GridBagLayout();
+
+ gblContentPane.columnWidths = new int[] { fd.getWidth() / 2 - 2, 0, fd.getWidth() / 2 - 2, 0 };
+ gblContentPane.rowHeights = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
+ gblContentPane.columnWeights = new double[] { 1.0, 1.0, 0.0, Double.MIN_VALUE };
+ gblContentPane.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, Double.MIN_VALUE };
+ contentPane.setLayout(gblContentPane);
+
+ recoLanguageLabel = new JLabel("Recognize Language: ");
+ recoLanguageLabel.setVerticalAlignment(SwingConstants.TOP);
+ recoLanguageLabel.setForeground(Color.GRAY);
+ recoLanguageLabel.setFont(labelFont);
+ recoLanguageLabel.setHorizontalAlignment(JLabel.LEFT);
+ GridBagConstraints gbcRecoLanguageLabel = new GridBagConstraints();
+ gbcRecoLanguageLabel.anchor = GridBagConstraints.EAST;
+ gbcRecoLanguageLabel.insets = new Insets(0, 0, 5, 5);
+ gbcRecoLanguageLabel.gridx = 0;
+ gbcRecoLanguageLabel.gridy = 0;
+ contentPane.add(recoLanguageLabel, gbcRecoLanguageLabel);
+
+ recoLanguageComboBox = new JComboBox(recolanguage);
+ recoLanguageComboBox.setFont(labelFont);
+ GridBagConstraints gbcRecoLanguageComboBox = new GridBagConstraints();
+ gbcRecoLanguageComboBox.gridwidth = 2;
+ gbcRecoLanguageComboBox.insets = new Insets(0, 0, 5, 0);
+ gbcRecoLanguageComboBox.fill = GridBagConstraints.HORIZONTAL;
+ gbcRecoLanguageComboBox.gridx = 1;
+ gbcRecoLanguageComboBox.gridy = 0;
+ contentPane.add(recoLanguageComboBox, gbcRecoLanguageComboBox);
+
+ recoOnceButton = new JButton("Recognize Once");
+ recoOnceButton.setFont(buttonFont);
+ GridBagConstraints gbcRecoOnceButton = new GridBagConstraints();
+ gbcRecoOnceButton.insets = new Insets(0, 0, 5, 0);
+ gbcRecoOnceButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcRecoOnceButton.gridwidth = 3;
+ gbcRecoOnceButton.gridx = 0;
+ gbcRecoOnceButton.gridy = 1;
+
+ contentPane.add(recoOnceButton, gbcRecoOnceButton);
+
+ recoContButton = new JButton("Recognize Continuously");
+ recoContButton.setFont(buttonFont);
+ GridBagConstraints gbcRecoContButton = new GridBagConstraints();
+ gbcRecoContButton.insets = new Insets(0, 0, 5, 0);
+ gbcRecoContButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcRecoContButton.gridwidth = 3;
+ gbcRecoContButton.gridx = 0;
+ gbcRecoContButton.gridy = 2;
+ contentPane.add(recoContButton, gbcRecoContButton);
+
+ recoKwsButton = new JButton("Recognize With Wake Word");
+ recoKwsButton.setFont(buttonFont);
+ GridBagConstraints gbcRecoKwsButton = new GridBagConstraints();
+ gbcRecoKwsButton.insets = new Insets(0, 0, 5, 0);
+ gbcRecoKwsButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcRecoKwsButton.gridwidth = 3;
+ gbcRecoKwsButton.gridx = 0;
+ gbcRecoKwsButton.gridy = 3;
+ contentPane.add(recoKwsButton, gbcRecoKwsButton);
+
+ recoIntentButton = new JButton("Recognize Intent");
+ recoIntentButton.setFont(buttonFont);
+ GridBagConstraints gbcRecoIntentButton = new GridBagConstraints();
+ gbcRecoIntentButton.insets = new Insets(0, 0, 5, 0);
+ gbcRecoIntentButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcRecoIntentButton.gridwidth = 3;
+ gbcRecoIntentButton.gridx = 0;
+ gbcRecoIntentButton.gridy = 4;
+ contentPane.add(recoIntentButton, gbcRecoIntentButton);
+
+ recoIntentkwsButton = new JButton("Recognize Intent With Wake Word");
+ recoIntentkwsButton.setFont(buttonFont);
+ GridBagConstraints gbcRecoIntentkwsButton = new GridBagConstraints();
+ gbcRecoIntentkwsButton.insets = new Insets(0, 0, 5, 0);
+ gbcRecoIntentkwsButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcRecoIntentkwsButton.gridwidth = 3;
+ gbcRecoIntentkwsButton.gridx = 0;
+ gbcRecoIntentkwsButton.gridy = 5;
+ contentPane.add(recoIntentkwsButton, gbcRecoIntentkwsButton);
+
+ ctsButton = new JButton("Conversation Transcription");
+ ctsButton.setFont(buttonFont);
+ GridBagConstraints gbcCtsButton = new GridBagConstraints();
+ gbcCtsButton.insets = new Insets(0, 0, 5, 0);
+ gbcCtsButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcCtsButton.gridwidth = 3;
+ gbcCtsButton.gridx = 0;
+ gbcCtsButton.gridy = 6;
+ contentPane.add(ctsButton, gbcCtsButton);
+
+ translateButton = new JButton("Translate to: ");
+ translateButton.setFont(buttonFont);
+ GridBagConstraints gbcTranslateButton = new GridBagConstraints();
+ gbcTranslateButton.fill = GridBagConstraints.HORIZONTAL;
+ gbcTranslateButton.insets = new Insets(0, 0, 5, 5);
+ gbcTranslateButton.gridx = 0;
+ gbcTranslateButton.gridy = 7;
+ contentPane.add(translateButton, gbcTranslateButton);
+
+ tranLanguageComboBox = new JComboBox(tranlanguage);
+ tranLanguageComboBox.setSelectedIndex(7);
+ tranLanguageComboBox.setFont(labelFont);
+ GridBagConstraints gbcTranLanguageComboBox = new GridBagConstraints();
+ gbcTranLanguageComboBox.insets = new Insets(0, 0, 5, 0);
+ gbcTranLanguageComboBox.gridwidth = 2;
+ gbcTranLanguageComboBox.fill = GridBagConstraints.HORIZONTAL;
+ gbcTranLanguageComboBox.gridx = 1;
+ gbcTranLanguageComboBox.gridy = 7;
+ contentPane.add(tranLanguageComboBox, gbcTranLanguageComboBox);
+
+ recoResultTextArea = new JTextArea();
+ recoResultTextArea.setEditable(false);
+ recoResultTextArea.setLineWrap(true);
+ recoResultTextArea.setWrapStyleWord(true);
+ recoResultTextArea.setFont(textFont);
+ GridBagConstraints gbcRecoResultTextArea = new GridBagConstraints();
+ gbcRecoResultTextArea.gridwidth = 3;
+ gbcRecoResultTextArea.insets = new Insets(0, 0, 0, 5);
+ gbcRecoResultTextArea.fill = GridBagConstraints.BOTH;
+ gbcRecoResultTextArea.gridx = 0;
+ gbcRecoResultTextArea.gridy = 8;
+
+ JScrollPane scrollPane = new JScrollPane(recoResultTextArea);
+
+ scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
+ scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
+ scrollPane.setViewportBorder(new BevelBorder(BevelBorder.LOWERED, null, null, null, null));
+ scrollPane.setPreferredSize(new Dimension(0, 0));
+
+ contentPane.add(scrollPane, gbcRecoResultTextArea);
+
+ ///////////////////////////////////////////////////
+ // check if we have a valid key
+ ///////////////////////////////////////////////////
+ if (SpeechSubscriptionKey.startsWith("<") || SpeechSubscriptionKey.endsWith(">")) {
+ recoResultTextArea.setText(
+ "Error: Replace SpeechSubscriptionKey with your actual speech subscription key and re-compile!");
+ return;
+ }
+ ///////////////////////////////////////////////////
+ // check if we have a valid microphone parameter
+ ///////////////////////////////////////////////////
+ if (!DefaultGeometry) {
+ if (DeviceGeometry.startsWith("<") || DeviceGeometry.endsWith(">")) {
+ recoResultTextArea
+ .setText("Error: Replace DeviceGeometry with your actual microphone parameter and re-compile");
+ return;
+ }
+ if (SelectedGeometry.startsWith("<") || SelectedGeometry.endsWith(">")) {
+ recoResultTextArea
+ .setText("Error: Replace SelectedGeometry with your actual select parameter and re-compile!");
+ return;
+ }
+ }
+ if (LuisSubscriptionKey.startsWith("<") || LuisSubscriptionKey.endsWith(">")) {
+ recoResultTextArea.setText(recoResultTextArea.getText()
+ + "\nWarning: Replace LuisSubscriptionKey with your actual Luis subscription key to use Intents!");
+ }
+
+ ///////////////////////////////////////////////////
+ // recognize Once with intermediate results
+ ///////////////////////////////////////////////////
+
+ recoOnceButton.addActionListener(new ActionListener() {
+ public void actionPerformed(ActionEvent e) {
+
+ RecoLanguage = LanguageCode.getCode(0,
+ recoLanguageComboBox.getItemAt(recoLanguageComboBox.getSelectedIndex()).toString());
+ disableButtons();
+ clearTextBox();
+ try {
+ System.out.println(" Speech Recognize Once , recognize language: " + RecoLanguage);
+ final SpeechRecognizer reco = new SpeechRecognizer(getSpeechConfig(), getAudioConfig());
+ reco.recognizing.addEventListener((o, speechRecognitionResultEventArgs) -> {
+ final String s = speechRecognitionResultEventArgs.getResult().getText();
+ System.out.println("Intermediate result received: " + s);
+ setRecognizedText(s);
+ });
+
+ final Future task = reco.recognizeOnceAsync();
+ setOnTaskCompletedListener(task, result -> {
+ final String s = result.getText();
+ reco.close();
+ System.out.println("Recognizer returned: " + s);
+ setRecognizedText(s);
+ enableButtons();
+ });
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+
+ }
+ });
+
+ ///////////////////////////////////////////////////
+ // recognize continuously
+ ///////////////////////////////////////////////////
+ recoContButton.addActionListener(new ActionListener() {
+ private boolean continuousListeningStarted = false;
+ private SpeechRecognizer reco = null;
+ private String buttonText = "";
+ private ArrayList content = new ArrayList<>();
+
+ public void actionPerformed(ActionEvent e) {
+ RecoLanguage = LanguageCode.getCode(0,
+ recoLanguageComboBox.getItemAt(recoLanguageComboBox.getSelectedIndex()).toString());
+ disableButtons();
+
+ if (continuousListeningStarted) {
+ if (reco != null) {
+ final Future task = reco.stopContinuousRecognitionAsync();
+ setOnTaskCompletedListener(task, result -> {
+ System.out.println("Continuous recognition stopped.");
+ recoContButton.setText(buttonText);
+ enableButtons();
+ continuousListeningStarted = false;
+ });
+ } else {
+ continuousListeningStarted = false;
+ }
+
+ return;
+ }
+
+ clearTextBox();
+
+ try {
+ content.clear();
+ reco = new SpeechRecognizer(getSpeechConfig(), getAudioConfig());
+ reco.recognizing.addEventListener((o, speechRecognitionResultEventArgs) -> {
+ final String s = speechRecognitionResultEventArgs.getResult().getText();
+ System.out.println("Intermediate result received: " + s);
+ content.add(s);
+ setRecognizedText(TextJoin(" ", content));
+ content.remove(content.size() - 1);
+ });
+
+ reco.recognized.addEventListener((o, speechRecognitionResultEventArgs) -> {
+ final String s = speechRecognitionResultEventArgs.getResult().getText();
+ System.out.println("Final result received: " + s);
+ content.add(s);
+ setRecognizedText(TextJoin(" ", content));
+ });
+
+ final Future task = reco.startContinuousRecognitionAsync();
+ setOnTaskCompletedListener(task, result -> {
+ continuousListeningStarted = true;
+ buttonText = recoContButton.getText().toString();
+ recoContButton.setText("Stop");
+ recoContButton.setEnabled(true);
+
+ });
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+ });
+
+ ///////////////////////////////////////////////////
+ // recognize with wake word
+ ///////////////////////////////////////////////////
+ recoKwsButton.addActionListener(new ActionListener() {
+
+ private static final String delimiter = "\n";
+ private final ArrayList content = new ArrayList<>();
+ private boolean continuousListeningStarted = false;
+ private SpeechRecognizer reco = null;
+ private String buttonText = "";
+
+ public void actionPerformed(ActionEvent e) {
+
+ RecoLanguage = LanguageCode.getCode(0,
+ recoLanguageComboBox.getItemAt(recoLanguageComboBox.getSelectedIndex()).toString());
+ disableButtons();
+
+ if (continuousListeningStarted) {
+ if (reco != null) {
+ final Future task = reco.stopKeywordRecognitionAsync();
+ setOnTaskCompletedListener(task, result -> {
+ System.out.println("Continuous recognition stopped.");
+ recoKwsButton.setText(buttonText);
+ enableButtons();
+ continuousListeningStarted = false;
+ });
+ } else {
+ continuousListeningStarted = false;
+ }
+ return;
+ }
+
+ clearTextBox();
+
+ content.clear();
+ content.add("");
+ content.add("");
+ try {
+ reco = new SpeechRecognizer(getSpeechConfig(), getAudioConfig());
+ reco.sessionStarted.addEventListener((o, sessionEventArgs) -> {
+ System.out.println(
+ "got a session (" + sessionEventArgs.getSessionId() + ")event: sessionStarted");
+ content.set(0, "KeywordModel `" + Keyword + "` detected");
+ setRecognizedText(TextJoin(delimiter, content));
+
+ });
+
+ reco.sessionStopped.addEventListener((o, sessionEventArgs) -> System.out
+ .println("got a session (" + sessionEventArgs.getSessionId() + ")event: sessionStopped"));
+
+ reco.recognizing.addEventListener((o, intermediateResultEventArgs) -> {
+ final String s = intermediateResultEventArgs.getResult().getText();
+ ResultReason rr = intermediateResultEventArgs.getResult().getReason();
+ System.out.println("got a intermediate result: " + s + " result reason:" + rr.toString());
+ if (rr == RecognizingSpeech) {
+ Integer index = content.size() - 2;
+ content.set(index + 1, index.toString() + ". " + s);
+ setRecognizedText(TextJoin(delimiter, content));
+ }
+ });
+ reco.recognized.addEventListener((o, finalResultEventArgs) -> {
+ String s = finalResultEventArgs.getResult().getText();
+ ResultReason rr = finalResultEventArgs.getResult().getReason();
+
+ if (rr == RecognizedKeyword) {
+ content.add("");
+ }
+
+ if (!s.isEmpty()) {
+ Integer index = content.size() - 2;
+ content.set(index + 1, index.toString() + ". " + s);
+ content.set(0, "say `" + Keyword + "`...");
+ setRecognizedText(TextJoin(delimiter, content));
+ System.out.println("got a final result: " + " " + Integer.toString(index + 1) + " " + s
+ + " result reason:" + rr.toString());
+ }
+
+ });
+
+ File kwsFile = new File(KeywordModel);
+ if (kwsFile.exists() && kwsFile.isFile()) {
+ final KeywordRecognitionModel keywordRecognitionModel = KeywordRecognitionModel
+ .fromFile(KeywordModel);
+ final Future task = reco.startKeywordRecognitionAsync(keywordRecognitionModel);
+ setOnTaskCompletedListener(task, result -> {
+ content.set(0, "say `" + Keyword + "`...");
+ setRecognizedText(TextJoin(delimiter, content));
+ continuousListeningStarted = true;
+
+ buttonText = recoKwsButton.getText().toString();
+ recoKwsButton.setText("Stop");
+ recoKwsButton.setEnabled(true);
+ });
+ } else {
+ recoResultTextArea.setText("Error: can not find the keyword table file");
+ return;
+ }
+
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+
+ }
+ }
+ });
+
+ intentIdMap.put("1", "play music");
+ intentIdMap.put("2", "stop");
+ ///////////////////////////////////////////////////
+ // recognize intent
+ ///////////////////////////////////////////////////
+ recoIntentButton.addActionListener(new ActionListener() {
+
+ public void actionPerformed(ActionEvent e) {
+ final ArrayList content = new ArrayList<>();
+
+ RecoLanguage = LanguageCode.getCode(0,
+ recoLanguageComboBox.getItemAt(recoLanguageComboBox.getSelectedIndex()).toString());
+ disableButtons();
+ clearTextBox();
+
+ content.add("");
+ content.add("");
+ try {
+ final SpeechConfig speechIntentConfig = SpeechConfig.fromSubscription(LuisSubscriptionKey,
+ LuisRegion);
+ if (!DefaultGeometry) {
+ speechIntentConfig.setProperty("DeviceGeometry", DeviceGeometry);
+ speechIntentConfig.setProperty("SelectedGeometry", SelectedGeometry);
+ }
+ speechIntentConfig.setSpeechRecognitionLanguage(RecoLanguage);
+ speechIntentConfig.setProperty(PropertyId.Speech_LogFilename, LogPath);
+ IntentRecognizer reco = new IntentRecognizer(speechIntentConfig, getAudioConfig());
+
+ LanguageUnderstandingModel intentModel = LanguageUnderstandingModel.fromAppId(LuisAppId);
+ for (Map.Entry entry : intentIdMap.entrySet()) {
+ reco.addIntent(intentModel, entry.getValue(), entry.getKey());
+ System.out.println("intent: " + entry.getValue() + " Intent Id: " + entry.getKey());
+ }
+
+ reco.recognizing.addEventListener((o, intentRecognitionResultEventArgs) -> {
+ final String s = intentRecognitionResultEventArgs.getResult().getText();
+ System.out.println("Intermediate result received: " + s);
+ content.set(0, s);
+ setRecognizedText(TextJoin("\n", content));
+ });
+
+ final Future task = reco.recognizeOnceAsync();
+ setOnTaskCompletedListener(task, result -> {
+ System.out.println("Intent recognition stopped.");
+ String s = result.getText();
+
+ if (result.getReason() != ResultReason.RecognizedIntent) {
+ String errorDetails = (result.getReason() == ResultReason.Canceled)
+ ? CancellationDetails.fromResult(result).getErrorDetails()
+ : "";
+ s = "Intent failed with " + result.getReason()
+ + ". Did you enter your Language Understanding subscription?"
+ + System.lineSeparator() + errorDetails;
+ }
+ String intentId = result.getIntentId();
+ System.out.println("IntentId: " + intentId);
+ String intent = "";
+ if (intentIdMap.containsKey(intentId)) {
+ intent = intentIdMap.get(intentId);
+ }
+
+ System.out.println("S: " + s + ", intent: " + intent);
+ content.set(0, s);
+ content.set(1, " [intent: " + intent + "]");
+ reco.close();
+ setRecognizedText(TextJoin("\n", content));
+ enableButtons();
+ });
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+ });
+
+ ///////////////////////////////////////////////////
+ // recognize intent with wake word
+ ///////////////////////////////////////////////////
+ recoIntentkwsButton.addActionListener(new ActionListener() {
+
+ private static final String delimiter = "\n";
+ private final ArrayList content = new ArrayList<>();
+ private boolean continuousListeningStarted = false;
+ private IntentRecognizer reco = null;
+ private String buttonText = "";
+
+ public void actionPerformed(ActionEvent e) {
+
+ RecoLanguage = LanguageCode.getCode(0,
+ recoLanguageComboBox.getItemAt(recoLanguageComboBox.getSelectedIndex()).toString());
+ disableButtons();
+
+ if (continuousListeningStarted) {
+ if (reco != null) {
+ final Future task = reco.stopKeywordRecognitionAsync();
+ setOnTaskCompletedListener(task, result -> {
+ System.out.println("Continuous recognition stopped.");
+ recoIntentkwsButton.setText(buttonText);
+ enableButtons();
+ continuousListeningStarted = false;
+ });
+ } else {
+ continuousListeningStarted = false;
+ }
+
+ return;
+ }
+
+ clearTextBox();
+
+ content.clear();
+ content.add("");
+ content.add("");
+ try {
+ final SpeechConfig intentSpeechConfig = SpeechConfig.fromSubscription(LuisSubscriptionKey,
+ LuisRegion);
+ intentSpeechConfig.setSpeechRecognitionLanguage(RecoLanguage);
+ if (!DefaultGeometry) {
+ intentSpeechConfig.setProperty("DeviceGeometry", DeviceGeometry);
+ intentSpeechConfig.setProperty("SelectedGeometry", SelectedGeometry);
+ }
+ intentSpeechConfig.setProperty(PropertyId.Speech_LogFilename, LogPath);
+ reco = new IntentRecognizer(intentSpeechConfig, getAudioConfig());
+
+ LanguageUnderstandingModel intentModel = LanguageUnderstandingModel.fromAppId(LuisAppId);
+ for (Map.Entry entry : intentIdMap.entrySet()) {
+ reco.addIntent(intentModel, entry.getValue(), entry.getKey());
+ System.out.println("intent: " + entry.getValue() + " Intent Id: " + entry.getKey());
+ }
+
+ reco.sessionStarted.addEventListener((o, sessionEventArgs) -> {
+ System.out.println(
+ "got a session (" + sessionEventArgs.getSessionId() + ")event: sessionStarted");
+ content.set(0, "KeywordModel `" + Keyword + "` detected");
+ setRecognizedText(TextJoin(delimiter, content));
+ });
+
+ reco.sessionStopped.addEventListener((o, sessionEventArgs) -> System.out
+ .println("got a session (" + sessionEventArgs.getSessionId() + ")event: sessionStopped"));
+
+ reco.recognizing.addEventListener((o, intermediateResultEventArgs) -> {
+ final String s = intermediateResultEventArgs.getResult().getText();
+ ResultReason rr = intermediateResultEventArgs.getResult().getReason();
+ System.out.println("got a intermediate result: " + s + " result reason:" + rr.toString());
+ if (rr == RecognizingSpeech) {
+ Integer index = content.size() - 2;
+ content.set(index + 1, index.toString() + ". " + s);
+ setRecognizedText(TextJoin(delimiter, content));
+ }
+ });
+
+ reco.recognized.addEventListener((o, finalResultEventArgs) -> {
+ String s = finalResultEventArgs.getResult().getText();
+ String intentId = finalResultEventArgs.getResult().getIntentId();
+ System.out.println("IntentId: " + intentId);
+ String intent = "";
+ if (intentIdMap.containsKey(intentId)) {
+ intent = intentIdMap.get(intentId);
+ }
+
+ ResultReason rr = finalResultEventArgs.getResult().getReason();
+ System.out.println("got a final result: " + s + " result reason:" + rr.toString());
+ if (rr == RecognizedKeyword) {
+ content.add("");
+ }
+ if (!s.isEmpty()) {
+ Integer index = content.size() - 2;
+ content.set(index + 1, index.toString() + ". " + s + " [intent: " + intent + "]");
+ content.set(0, "say `" + Keyword + "`...");
+ setRecognizedText(TextJoin(delimiter, content));
+ }
+ });
+
+ File kwsFile = new File(KeywordModel);
+ if (kwsFile.exists() && kwsFile.isFile()) {
+
+ final KeywordRecognitionModel keywordRecognitionModel = KeywordRecognitionModel
+ .fromFile(KeywordModel);
+ final Future task = reco.startKeywordRecognitionAsync(keywordRecognitionModel);
+ setOnTaskCompletedListener(task, result -> {
+ content.set(0, "say `" + Keyword + "`...");
+ setRecognizedText(TextJoin(delimiter, content));
+ continuousListeningStarted = true;
+ buttonText = recoIntentkwsButton.getText().toString();
+ recoIntentkwsButton.setText("Stop");
+ recoIntentkwsButton.setEnabled(true);
+ });
+ } else {
+ recoResultTextArea.setText("Error: can not find the keyword table file");
+ return;
+ }
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+ });
+
+ ///////////////////////////////////////////////////
+ // Conversation Transcription
+ ///////////////////////////////////////////////////
+ ctsButton.addActionListener(new ActionListener() {
+ public void actionPerformed(ActionEvent e) {
+
+ Cts CtsFrame = new Cts();
+ CtsFrame.setVisible(true);
+ CtsFrame.setLocationRelativeTo(null);
+ MainFrame.setVisible(false);
+ }
+ });
+
+ ///////////////////////////////////////////////////
+ // recognize and translate
+ ///////////////////////////////////////////////////
+ translateButton.addActionListener(new ActionListener() {
+
+ private boolean continuousListeningStarted = false;
+ private TranslationRecognizer reco = null;
+ private String buttonText = "";
+ private ArrayList content = new ArrayList<>();
+ private String TranLanguage;
+
+ public void actionPerformed(ActionEvent e) {
+
+ RecoLanguage = LanguageCode.getCode(0,
+ recoLanguageComboBox.getItemAt(recoLanguageComboBox.getSelectedIndex()).toString());
+ TranLanguage = LanguageCode.getCode(1,
+ tranLanguageComboBox.getItemAt(tranLanguageComboBox.getSelectedIndex()).toString());
+ disableButtons();
+ if (continuousListeningStarted) {
+ if (reco != null) {
+ final Future task = reco.stopContinuousRecognitionAsync();
+ setOnTaskCompletedListener(task, result -> {
+ System.out.println("Continuous recognition stopped.");
+ translateButton.setText(buttonText);
+ enableButtons();
+ continuousListeningStarted = false;
+ });
+ } else {
+ continuousListeningStarted = false;
+ }
+ return;
+ }
+
+ clearTextBox();
+
+ try {
+ content.clear();
+ final SpeechTranslationConfig translationSpeechConfig = SpeechTranslationConfig
+ .fromSubscription(SpeechSubscriptionKey, SpeechRegion);
+ if (!DefaultGeometry) {
+ translationSpeechConfig.setProperty("DeviceGeometry", DeviceGeometry);
+ translationSpeechConfig.setProperty("SelectedGeometry", SelectedGeometry);
+ }
+ translationSpeechConfig.addTargetLanguage(RecoLanguage);
+ translationSpeechConfig.addTargetLanguage(TranLanguage);
+ translationSpeechConfig.setSpeechRecognitionLanguage(RecoLanguage);
+ translationSpeechConfig.setProperty(PropertyId.Speech_LogFilename, LogPath);
+
+ reco = new TranslationRecognizer(translationSpeechConfig, getAudioConfig());
+ reco.recognizing.addEventListener((o, speechRecognitionResultEventArgs) -> {
+ final Map translations = speechRecognitionResultEventArgs.getResult()
+ .getTranslations();
+ StringBuffer sb = new StringBuffer();
+ for (String key : translations.keySet()) {
+ sb.append(key + " -> '" + translations.get(key) + "'\n");
+ }
+ final String s = sb.toString();
+
+ System.out.println("Intermediate result received: " + s);
+ content.add(s);
+ setRecognizedText(TextJoin(" ", content));
+ content.remove(content.size() - 1);
+ });
+
+ reco.recognized.addEventListener((o, speechRecognitionResultEventArgs) -> {
+ final Map translations = speechRecognitionResultEventArgs.getResult()
+ .getTranslations();
+ StringBuffer sb = new StringBuffer();
+ for (String key : translations.keySet()) {
+ if (!translations.get(key).isEmpty()) {
+ sb.append(key + " -> '" + translations.get(key) + "'\n");
+ }
+ }
+ final String s = sb.toString();
+ System.out.println("Final result received: " + s);
+ if (!s.isEmpty()) {
+ content.add(s);
+ }
+ setRecognizedText(TextJoin(" ", content));
+ });
+
+ final Future task = reco.startContinuousRecognitionAsync();
+ setOnTaskCompletedListener(task, result -> {
+ continuousListeningStarted = true;
+ buttonText = translateButton.getText().toString();
+ translateButton.setText("Stop");
+ translateButton.setEnabled(true);
+
+ });
+ } catch (Exception ex) {
+ System.out.println(ex.getMessage());
+ displayException(ex);
+ }
+ }
+ });
+ }
+
+ private void displayException(Exception ex) {
+ recoResultTextArea.setText(ex.getMessage() + "\n" + ex.getStackTrace().toString());
+ }
+
+ private void clearTextBox() {
+ setTextbox("");
+ }
+
+ private void setRecognizedText(final String s) {
+ setTextbox(s);
+ }
+
+ private void setTextbox(final String s) {
+ recoResultTextArea.setText(s);
+ recoResultTextArea.setCaretPosition(recoResultTextArea.getDocument().getLength());
+ }
+
+ private void disableButtons() {
+ recoOnceButton.setEnabled(false);
+ recoContButton.setEnabled(false);
+ recoKwsButton.setEnabled(false);
+ recoIntentButton.setEnabled(false);
+ recoIntentkwsButton.setEnabled(false);
+ ctsButton.setEnabled(false);
+ translateButton.setEnabled(false);
+ }
+
+ private void enableButtons() {
+ recoOnceButton.setEnabled(true);
+ recoContButton.setEnabled(true);
+ recoKwsButton.setEnabled(true);
+ recoIntentButton.setEnabled(true);
+ recoIntentkwsButton.setEnabled(true);
+ ctsButton.setEnabled(true);
+ translateButton.setEnabled(true);
+ }
+
+ public static String TextJoin(CharSequence delimiter, ArrayList tokens) {
+ final int length = tokens.size();
+ if (length == 0) {
+ return "";
+ }
+ final StringBuilder sb = new StringBuilder();
+ sb.append(tokens.get(0));
+ for (int i = 1; i < length; i++) {
+ sb.append(delimiter);
+ sb.append(tokens.get(i));
+ }
+ return sb.toString();
+ }
+
+ private void setOnTaskCompletedListener(Future task, OnTaskCompletedListener listener) {
+ s_executorService.submit(() -> {
+ T result = task.get();
+ listener.onCompleted(result);
+ return null;
+ });
+ }
+
+ private interface OnTaskCompletedListener {
+ void onCompleted(T taskResult);
+ }
+
+ protected static ExecutorService s_executorService;
+ static {
+ s_executorService = Executors.newCachedThreadPool();
+ }
+}
diff --git a/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/LanguageCode.java b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/LanguageCode.java
new file mode 100644
index 0000000..c415566
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/LanguageCode.java
@@ -0,0 +1,106 @@
+package com.microsoft.cognitiveservices.speech.samples;
+
+import java.util.HashMap;
+
+public class LanguageCode {
+ private static HashMap mapRecolanguageCode = new HashMap() {
+
+ private static final long serialVersionUID = 1L;
+
+ {
+ put("English (United States)", "en-US");
+ put("German (Germany)", "de-DE");
+ put("Chinese (Mandarin, simplified)", "zh-CN");
+ put("English (India)", "en-IN");
+ put("Spanish (Spain)", "es-ES");
+ put("French (France)", "fr-FR");
+ put("Italian (Italy)", "it-IT");
+ put("Portuguese (Brazil)", "pt-BR");
+ put("Russian (Russia)", "ru-RU");
+ }
+ };
+ private static HashMap mapTranlanguageCode = new HashMap() {
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ {
+ put("Afrikaans", "af");
+ put("Arabic", "ar");
+ put("Bangla", "bn");
+ put("Bosnian (Latin)", "bs");
+ put("Bulgarian", "bg");
+ put("Cantonese (Traditional)", "yue");
+ put("Catalan", "ca");
+ put("Chinese Simplified", "zh-Hans");
+ put("Chinese Traditional", "zh-Hant");
+ put("Croatian", "hr");
+ put("Czech", "cs");
+ put("Danish", "da");
+ put("Dutch", "nl");
+ put("English", "en");
+ put("Estonian", "et");
+ put("Fijian", "fj");
+ put("Filipino", "fil");
+ put("Finnish", "fi");
+ put("French", "fr");
+ put("German", "de");
+ put("Greek", "el");
+ put("Haitian Creole", "ht");
+ put("Hebrew", "he");
+ put("Hindi", "hi");
+ put("Hmong Daw", "mww");
+ put("Hungarian", "hu");
+ put("Indonesian", "id");
+ put("Italian", "it");
+ put("Japanese", "ja");
+ put("Kiswahili", "sw");
+ put("Klingon", "tlh");
+ put("Klingon (plqaD)", "tlh-Qaak");
+ put("Korean", "ko");
+ put("Latvian", "lv");
+ put("Lithuanian", "lt");
+ put("Malagasy", "mg");
+ put("Malay", "ms");
+ put("Maltese", "mt");
+ put("Norwegian", "nb");
+ put("Persian", "fa");
+ put("Polish", "pl");
+ put("Portuguese", "pt");
+ put("Queretaro Otomi", "otq");
+ put("Romanian", "ro");
+ put("Russian", "ru");
+ put("Samoan", "sm");
+ put("Serbian (Cyrillic)", "sr-Cyrl");
+ put("Serbian (Latin)", "sr-Latn");
+ put("Slovak", "sk");
+ put("Slovenian", "sl");
+ put("Spanish", "es");
+ put("Swedish", "sv");
+ put("Tahitian", "ty");
+ put("Tamil", "ta");
+ put("Thai", "th");
+ put("Tongan", "to");
+ put("Turkish", "tr");
+ put("Ukrainian", "uk");
+ put("Urdu", "ur");
+ put("Vietnamese", "vi");
+ put("Welsh", "cy");
+ put("Yucatec Maya", "yua");
+ }
+ };
+
+ public static String getCode(int recOrTran, String language) {
+ switch (recOrTran) {
+ case 0: {
+ return mapRecolanguageCode.get(language);
+ }
+ case 1: {
+ return mapTranlanguageCode.get(language);
+ }
+ }
+ return null;
+ }
+
+}
diff --git a/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/Pair.java b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/Pair.java
new file mode 100644
index 0000000..9370399
--- /dev/null
+++ b/Samples/Windows_Linux/SampleDemo/src/com/microsoft/cognitiveservices/speech/samples/Pair.java
@@ -0,0 +1,50 @@
+package com.microsoft.cognitiveservices.speech.samples;
+
+import java.io.Serializable;
+
+public class Pair implements Serializable {
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+ private Key key;
+ private Value value;
+
+ public Key getKey() {
+ return key;
+ }
+
+ public Value getValue() {
+ return value;
+ }
+
+ public Pair(Key key, Value value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public String toString() {
+ return key + "/" + value;
+ }
+
+ public int hashCode() {
+ int hash = 7;
+ hash = 31 * hash + (key != null ? key.hashCode() : 0);
+ hash = 31 * hash + (value != null ? value.hashCode() : 0);
+ return hash;
+ }
+
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o instanceof Pair) {
+ Pair, ?> pair = (Pair, ?>) o;
+ if (key != null ? !key.equals(pair.key) : pair.key != null)
+ return false;
+ if (value != null ? !value.equals(pair.value) : pair.value != null)
+ return false;
+ return true;
+ }
+ return false;
+ }
+}