Skip to content
This repository has been archived by the owner on Nov 16, 2023. It is now read-only.

Commit

Permalink
Merge pull request #26 from Azure-Samples/jychoudh/1.6.0_sample_update
Browse files Browse the repository at this point in the history
Update sample code for version 1.6.0
  • Loading branch information
JingliLin2018 authored Jul 10, 2019
2 parents fd28246 + d9306c5 commit 74e0067
Show file tree
Hide file tree
Showing 26 changed files with 2,448 additions and 99 deletions.
13 changes: 0 additions & 13 deletions CHANGELOG.md

This file was deleted.

12 changes: 9 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,18 @@ This project hosts the **samples** for the Cognitive Services Speech Devices SDK

## Features

This repository hosts samples that help you to get started with several features of the Speech Devices SDK. Please note the Speech Devices SDK only works with the specific devices. At this point, it only works with [Roobo dev kits](http://ddk.roobo.com/).
This repository hosts samples that help you to get started with several features of the Speech Devices SDK. Please note the Speech Devices SDK only works with the specific devices. At this point, it only works with [Roobo dev kits](http://ddk.roobo.com/) and [Azure Kinect DK](https://azure.microsoft.com/en-us/services/kinect-dk/).

## Getting Started

Currently only samples for Android devices are provided in this repository. Check out the [sample code walkthrough](https://github.com/Azure-Samples/Cognitive-Services-Speech-Devices-SDK/blob/master/Samples/Android/Speech%20Devices%20SDK%20Starter%20App/Sample_Code_Walkthrough.md) for more details. More OS support will be added later.
Check out the [sample code walkthrough](https://github.com/Azure-Samples/Cognitive-Services-Speech-Devices-SDK/blob/master/Samples/Android/Speech%20Devices%20SDK%20Starter%20App/Sample_Code_Walkthrough.md) for details on the sample code.

Also here's how you can [get the Speech Devices SDK](https://aka.ms/sdsdk-get), and [get started with the Speech Devices SDK](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/speech-devices-sdk-qsg).
Here's how you can [get the Speech Devices SDK](https://aka.ms/sdsdk-get).

The following quickstarts demonstrate how to use sample applications.

* [Android](https://docs.microsoft.com/en-us/azure/cognitive-services/Speech-Service/speech-devices-sdk-android-quickstart)
* [Linux](https://docs.microsoft.com/en-us/azure/cognitive-services/Speech-Service/speech-devices-sdk-linux-quickstart)
* [Windows](https://docs.microsoft.com/en-us/azure/cognitive-services/Speech-Service/speech-devices-sdk-windows-quickstart)

Cognitive Services Speech Devices SDK supports customized wake words for key word spotting. Check out this guide to [create a customized wake words](https://aka.ms/sdsdk-kws) for your device.
13 changes: 0 additions & 13 deletions Samples/Android/Speech Devices SDK Starter App/CHANGELOG.md

This file was deleted.

15 changes: 0 additions & 15 deletions Samples/Android/Speech Devices SDK Starter App/README.md

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ android {

}
defaultConfig {
applicationId "com.microsoft.coginitiveservices.speech.samples.sdsdkstarterapp"
applicationId "com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp"
minSdkVersion 22
targetSdkVersion 27
versionCode 1
Expand Down Expand Up @@ -40,4 +40,4 @@ dependencies {
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
implementation files('src/main/jniLibs/com.microsoft.cognitiveservices.speech.jar')
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();

assertEquals("com.microsoft.coginitiveservices.speech.samples.sdsdkstarterapp", appContext.getPackageName());
assertEquals("com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp", appContext.getPackageName());
}

@Test
public void runSpeechSDKtests() {
loadTestProperties("/data/local/tmp/tests/test-java-unittests.properties");
//tests.runner.Runner.mainRunner("tests.unit.AllUnitTests");
tests.runner.Runner.mainRunner("tests.unit.AllUnitTests");
}

@SuppressWarnings("deprecation")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />


<application
android:allowBackup="true"
Expand All @@ -14,6 +13,10 @@
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AppCompat.Light.NoActionBar">
<activity
android:name=".Conversation"
android:label="Conversation"
android:theme="@style/NoActionBar"></activity>
<activity
android:name=".MainActivity"
android:theme="@style/AppTheme.NoActionBar">
Expand All @@ -24,14 +27,9 @@
</intent-filter>
</activity>
<activity
android:name=".listLanguage"
android:label="@string/title_activity_list_language"
android:name=".ListLanguage"
android:label="ListLanguage"
android:theme="@style/AppTheme.NoActionBar" />
<activity
android:name=".conversation"
android:label="@string/title_activity_conversation"
android:theme="@style/NoActionBar">
</activity>
</application>

</manifest>
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import android.util.Pair;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
import android.widget.ListView;
import android.widget.TextView;

Expand All @@ -27,6 +28,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.net.URI;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
Expand All @@ -37,10 +39,10 @@
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

public class conversation extends AppCompatActivity {
public class Conversation extends AppCompatActivity {
private HashMap<String, String> signatureMap = new HashMap<>();
private HashMap<String, Integer> colorMap = new HashMap<>();
private TextView IntermediateTextView;
private TextView intermediateTextView;
private static final String CTSKey = "<Conversation Transcription Service Key>";
private static final String CTSRegion="<Conversation Transcription Service Region>";// Region may be "centralus" or "eastasia"
private SpeechConfig speechConfig = null;
Expand Down Expand Up @@ -91,7 +93,7 @@ public boolean onOptionsItemSelected(MenuItem item)
Log.i(logTag, "Participants enrollment");

String[] keyArray = signatureMap.keySet().toArray(new String[signatureMap.size()]);
colorMap.put("?", getColor());
colorMap.put("Guest", getColor());
for (int i = 1; i <= signatureMap.size(); i++)
{
while (colorMap.size() < i + 1)
Expand Down Expand Up @@ -129,8 +131,8 @@ protected void onCreate(Bundle savedInstanceState)
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_conversation);
Toolbar toolbar = findViewById(R.id.CTStoolbar);
IntermediateTextView = findViewById(R.id.IntermediateView);
IntermediateTextView.setMovementMethod(new ScrollingMovementMethod());
intermediateTextView = findViewById(R.id.IntermediateView);
intermediateTextView.setMovementMethod(new ScrollingMovementMethod());
setSupportActionBar(toolbar);
Properties prop = new Properties();
InputStream participantIs = null;
Expand All @@ -151,16 +153,16 @@ protected void onCreate(Bundle savedInstanceState)
// check if we have a valid endpoint
///////////////////////////////////////////////////
if (CTSRegion.startsWith("<") || CTSRegion.endsWith(">")) {
appendTextLine( "Error: Replace CTSRegion with your speech subscription key's service region and re-compile!", true);
appendTextLine( "Error: Replace CTSRegion with your actual speech subscription key's service region and re-compile!", true);
return;
}

try
{
// example/participants.properties is for storing participants' voice signatures, please push the file under folder /video on DDK device.
participantIs = new FileInputStream("/video/participants.properties");
prop.load(participantIs);
participantList = prop.getProperty("PARTICIPANTSLIST");
prop.load(participantIs);
participantList = prop.getProperty("PARTICIPANTSLIST");
}
catch (Exception io)
{
Expand All @@ -181,7 +183,7 @@ protected void onCreate(Bundle savedInstanceState)
if (participantList.length() == 0)
{
Log.i(logTag, "Please put participants file in /video/participants.properties");
appendTextLine("Please save the participants' voice signatures in file-participants.properties, and push the file under folder /video", true);
appendTextLine("Please save the participants' voice signatures in file named participants.properties, and push the file under folder /video", true);
}
else
{
Expand All @@ -196,9 +198,10 @@ protected void onCreate(Bundle savedInstanceState)
}
}

}
getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
}

private void eventHandler(ConversationTranscriptionEventArgs e)
private void recognizingEventHandler(ConversationTranscriptionEventArgs e)
{
final String text = e.getResult().getText();
final String speakerId = e.getResult().getUserId().equals("Unidentified") ? "..." : e.getResult().getUserId();
Expand All @@ -216,9 +219,9 @@ private void eventHandler(ConversationTranscriptionEventArgs e)
{
if (transcriptions.containsKey(key))
{
if (transcriptions.get(key).getResult().getReason() == ResultReason.RecognizingSpeech)
if (transcriptions.get(key).getResult().getReason() == ResultReason.RecognizedSpeech)
{
Log.e(logTag, "Two utterances occur at the same time. Offset: " + offset + "; text: " + text);
Log.e(logTag, "Two utterances occurred at the same time. Offset: " + offset + "; text: " + text);
}
}
transcriptions.put(key, e);
Expand All @@ -232,7 +235,7 @@ private void startRecognizeMeeting(ConversationTranscriber t)
{
t.sessionStarted.addEventListener((o, e) -> Log.i(logTag, "Session started event. Start recognition"));

t.recognizing.addEventListener((o, e) -> eventHandler(e));
t.recognizing.addEventListener((o, e) -> recognizingEventHandler(e));

t.recognized.addEventListener((o, e) -> {
final String text = e.getResult().getText();
Expand All @@ -244,7 +247,7 @@ private void startRecognizeMeeting(ConversationTranscriber t)

if (!text.isEmpty() && !speakerId.equals("$ref$"))
{
final SpeakerData data = new SpeakerData(speakerId, colorMap.get(speakerId.equals("Guest") ? "?" : speakerId));
final SpeakerData data = new SpeakerData(speakerId, colorMap.get(speakerId));
final Transcription transcription = new Transcription(text, data, offset);
runOnUiThread(() ->
{
Expand Down Expand Up @@ -336,23 +339,23 @@ private void setRecognizedText()

private void appendTextLine(final String s, final Boolean erase)
{
conversation.this.runOnUiThread(() -> {
Conversation.this.runOnUiThread(() -> {
if (erase)
{
IntermediateTextView.setText(s);
intermediateTextView.setText(s);
}
else
{
String txt = IntermediateTextView.getText().toString();
IntermediateTextView.setText(String.format("%s\n%s", txt, s));
String txt = intermediateTextView.getText().toString();
intermediateTextView.setText(String.format("%s\n%s", txt, s));
}

final Layout layout = IntermediateTextView.getLayout();
final Layout layout = intermediateTextView.getLayout();
if (layout != null) {
int scrollDelta = layout.getLineBottom(IntermediateTextView.getLineCount() - 1)
-IntermediateTextView.getScrollY() -IntermediateTextView.getHeight();
int scrollDelta = layout.getLineBottom(intermediateTextView.getLineCount() - 1)
-intermediateTextView.getScrollY() -intermediateTextView.getHeight();
if (scrollDelta > 0)
IntermediateTextView.scrollBy(0, scrollDelta);
intermediateTextView.scrollBy(0, scrollDelta);
}
});
}
Expand Down Expand Up @@ -385,6 +388,6 @@ private interface OnTaskCompletedListener<T>

private void displayException(Exception ex)
{
IntermediateTextView.setText(String.format("%s\n%s", ex.getMessage(), TextUtils.join("\n", ex.getStackTrace())));
intermediateTextView.setText(String.format("%s\n%s", ex.getMessage(), TextUtils.join("\n", ex.getStackTrace())));
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp;

import java.util.HashMap;

import java.util.Map;

public class LanguageCode {
private static HashMap<String, String> mapRecolanguageCode = new HashMap<String, String>(){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import static com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp.MainActivity.SELECT_RECOGNIZE_LANGUAGE_REQUEST;
import static com.microsoft.cognitiveservices.speech.samples.sdsdkstarterapp.MainActivity.SELECT_TRANSLATE_LANGUAGE_REQUEST;

public class listLanguage extends AppCompatActivity {
public class ListLanguage extends AppCompatActivity {
private ListView listViewlanguage;
private final String[] recolanguage = {"English (United States)","German (Germany)","Chinese (Mandarin, simplified)","English (India)","Spanish (Spain)","French (France)","Italian (Italy)","Portuguese (Brazil)" ,"Russian (Russia)"};
private final String[] tranlanguage = {"Afrikaans", "Arabic", "Bangla", "Bosnian (Latin)", "Bulgarian", "Cantonese (Traditional)", "Catalan", "Chinese Simplified", "Chinese Traditional", "Croatian", "Czech", "Danish", "Dutch", "English", "Estonian", "Fijian", "Filipino", "Finnish", "French", "German", "Greek", "Haitian Creole", "Hebrew", "Hindi", "Hmong Daw", "Hungarian", "Indonesian", "Italian", "Japanese", "Kiswahili", "Klingon", "Klingon (plqaD)", "Korean", "Latvian", "Lithuanian", "Malagasy", "Malay", "Maltese", "Norwegian", "Persian", "Polish", "Portuguese", "Queretaro Otomi", "Romanian", "Russian", "Samoan", "Serbian (Cyrillic)", "Serbian (Latin)", "Slovak", "Slovenian", "Spanish", "Swedish", "Tahitian", "Tamil", "Thai", "Tongan", "Turkish", "Ukrainian", "Urdu", "Vietnamese", "Welsh", "Yucatec Maya"};
Expand Down Expand Up @@ -57,7 +57,6 @@ public void onItemClick(AdapterView<?> parent, final View view,
setResult(RESULT_OK, sendIntent);
finish();
}

});
}
}
Expand Down
Loading

0 comments on commit 74e0067

Please sign in to comment.