private void decideDirection(ArrayList<String> matches) { int position = -1; for (String phrase : matches) { position = commands.indexOf(phrase.toLowerCase()); if (position != -1) { break; } } if (position != -1) { Utilities.setDirectionImage(commands.get(position).toUpperCase(), ivDirection, bt); textToSpeech.speak( "EXECUTING COMMAND " + commands.get(position).toUpperCase(), TextToSpeech.QUEUE_FLUSH, null); ((TextView) findViewById(R.id.tvResults)).setText(commands.get(position).toUpperCase()); command = commands.get(position).toUpperCase(); } else { Utilities.setDirectionImage("STOP", ivDirection, bt); textToSpeech.speak("NO COMMAND DETECTED STOPPING", TextToSpeech.QUEUE_FLUSH, null); ((TextView) findViewById(R.id.tvResults)).setText("STOP"); command = "STOP"; } }
@Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { Log.d("flow", "success"); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { textToSpeech.speak(voiceResponse, TextToSpeech.QUEUE_FLUSH, null, null); } else { textToSpeech.speak(voiceResponse, TextToSpeech.QUEUE_ADD, null); } } }
public void speak(String string, Runnable onDone) { Log.d( TAG, "speak:" + string + ",onDone=" + onDone + ",this.onDone=" + this.onDone + ",this=" + this); if (onDone == null) { throw new IllegalArgumentException(); } synchronized (utteranceProgressListener) { if (this.onDone != null) { throw new IllegalStateException(); } if (speechCanceled) { skipButton.post(onDone); return; } this.onDone = onDone; speechStarted = false; } skipButton.post(showSkipButton); HashMap<String, String> params = new HashMap<String, String>(); params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, string); if (textToSpeech.speak(string, TextToSpeech.QUEUE_ADD, params) != TextToSpeech.SUCCESS) { synchronized (utteranceProgressListener) { skipButton.post(hideSkipButton); if (this.onDone != null) { skipButton.post(this.onDone); this.onDone = null; } } } }
private void sendComment() { String comment = getComment(); String userinput = getUserInput(); if (userinput != null) { appendComment(BotAdapter.Type.Voice, userinput); etComment.setText(null); btnSendComment.setCurrentState(SendRequestButton.STATE_DONE); } if (comment != null) { appendComment(BotAdapter.Type.Voice, comment); etComment.setText(null); btnSendComment.setCurrentState(SendRequestButton.STATE_DONE); /*if (comment.equals("Calling")) // let's try and open Skype... { callSkype(); } else*/ // TODO remove after fix message tts.speak(comment, TextToSpeech.QUEUE_FLUSH, null, "1"); } }
public void onHandleMessage(Message msg) { if (msg.sendingUid == 1) // TODO replace 1 with enum { if (dialogManager.getPrintCamera() == dialogManager.PRINT_CAMERA) appendComment(BotAdapter.Type.Camera, msg.getData().getString("message")); else // do all this only if no camera demo { if (lastpicture != null) // relevant on every second picture asumess 2 pictures per round!! { if (lastpicture.equals(msg.getData().getString("message"))) // no change { appendComment(BotAdapter.Type.Camera, msg.getData().getString("message") + "no change"); } else // this one reflects emergency - so switch to help { appendComment(BotAdapter.Type.Camera, "change -> help"); tts.speak("Please say help if help is needed", TextToSpeech.QUEUE_FLUSH, null, "1"); dialogManager.setHelpFromCamera(); } lastpicture = null; // for next round of two TODO handle reset event } else // first picture in round { lastpicture = msg.getData().getString("message"); } } } if (msg.sendingUid == 2) // TODO replace 2 { // place holder } if (msg.sendingUid == 3) // TODO replace 3 lastpicture = null; }
public void speak() { String say = ""; for (String s : words) say += (s.toLowerCase()); Log.d("SAY: ", say); txtS.speak(say, TextToSpeech.QUEUE_FLUSH, null); }
@Override public void handleMessage(Message message) { switch (message.what) { case WHAT_SPEAK: String utterance = (String) message.obj; mTts.speak(utterance, QUEUING_MODE_INTERRUPT, null); return; case WHAT_STOP_SPEAK: mTts.stop(); return; case WHAT_START_TTS: mTts = new TextToSpeech( mContext, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { // register here since to add earcons the TTS must be initialized // the receiver is called immediately with the current ringer mode registerBroadCastReceiver(); } }); return; case WHAT_SHUTDOWN_TTS: mTts.shutdown(); return; case WHAT_PLAY_EARCON: int resourceId = message.arg1; playEarcon(resourceId); return; case WHAT_STOP_PLAY_EARCON: mTts.stop(); return; } }
public void onClick(View v) { Log.i("3", "3"); this.UpdateView(); Log.i("3", "3"); if (v == nextone) { if (currentnum < numoflist) { currentnum++; this.UpdateView(); } } else if (v == add) { DataAccess data = new DataAccess(studyWord.this); ArrayList<Word> attention = new ArrayList<Word>(); attention = data.QueryAttention("SPELLING ='" + list.get(currentnum).getSpelling() + "'", null); if (attention.size() == 0) { data.InsertIntoAttention(list.get(currentnum)); Toast.makeText(studyWord.this, "已加入生词本", Toast.LENGTH_SHORT).show(); } else Toast.makeText(studyWord.this, "生词本中已包含这个单词!", Toast.LENGTH_SHORT).show(); } else if (v == beforeone) { currentnum--; this.UpdateView(); } Log.i("3", "3"); if (v == speak) { tts.speak(list.get(currentnum).getSpelling(), TextToSpeech.QUEUE_ADD, null); } Log.i("3", "3"); }
private void handleClick(int index) { String text = ESLPhrases.get(index); if (ttsLoaded) { tts.setSpeechRate(0.6f); tts.speak(text, TextToSpeech.QUEUE_FLUSH, null); } }
int speak(String text, int queueMode, HashMap<String, String> params) { if (queueMode == TextToSpeech.QUEUE_FLUSH) { if (trace) { System.err.println("speak (mute: " + mute + "): " + text); } // speak directly if (mute) { return speakWithMute(text, queueMode, params); } else { return textToSpeech.speak(text, queueMode, params); } } else { if (!cueSet.contains(text)) { if (trace) { System.err.println("buffer speak: " + text); } cueSet.add(text); cueList.add(new Entry(text, params)); } else { if (trace) { System.err.println("skip buffer (duplicate) speak: " + text); } } return 0; } }
public void repeat() { if (flag) { text = (EditText) findViewById(R.id.tts); String toSpeak = text.getText().toString(); Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show(); t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null, "mytts"); } }
private void sayHello() { // Select a random hello. int helloLength = HELLOS.length; String hello = HELLOS[RANDOM.nextInt(helloLength)]; mTts.speak( hello, TextToSpeech.QUEUE_FLUSH, // Drop all pending entries in the playback queue. null); }
private void displayAlert(String displayMessage) { alertMessage.setMessage(displayMessage); if (TTS == 1) { talker.setLanguage(Locale.US); talker.speak(displayMessage, TextToSpeech.QUEUE_ADD, null); } alertMessage.show(); }
@SuppressWarnings("deprecation") public void speakThis(String message) { Integer currentMessageId = ++lastMessageQueued; // Toast.makeText(appContect, message, Toast.LENGTH_SHORT).show(); HashMap<String, String> map = new HashMap<String, String>(); map.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, currentMessageId.toString()); ttobj.speak(message, TextToSpeech.QUEUE_ADD, map); }
@Override public void increment() { mRepCounter += 0.5; tvCounter.setText("" + mRepCounter); if (mRepCounter % 1 == 0) { mTextToSpeech.speak("" + (int) mRepCounter, TextToSpeech.QUEUE_FLUSH, null); } }
public void onInit(final int status) { if (status == TextToSpeech.SUCCESS) { final int result = mTts.setLanguage(Locale.US); mTts.setSpeechRate(0.9F); mTts.setPitch(0.9F); if (result != TextToSpeech.LANG_MISSING_DATA && result != TextToSpeech.LANG_NOT_SUPPORTED) { mTts.speak(spokenText, TextToSpeech.QUEUE_FLUSH, null); } } }
public void onInit(int status) { Toast t = Toast.makeText(getApplicationContext(), A_dire, Toast.LENGTH_SHORT); t.show(); mTts.speak(A_dire, TextToSpeech.QUEUE_FLUSH, null); if (last_init == false) { android.os.SystemClock.sleep(1000); startVoiceRecognitionCycle(); } else { fin(); } }
// We need setOnUtteranceCompletedListener for compatibility with Android 2.x @SuppressWarnings("deprecation") public void sayText( final String s, final OnTextToSpeechCompletedListener onTextToSpeechCompletedListener) { /*if there is a user defined audio, speak it and return */ if (speakWord.speakWord(s)) { // This enables auto speak for user defined audio files. speakWord.setOnCompletedListener( new OnCompletedListener() { @Override public void onCompleted() { if (onTextToSpeechCompletedListener != null) { onTextToSpeechCompletedListener.onTextToSpeechCompleted(s); } } }); return; } /*otherwise, speak the content*/ Log.v(TAG, "say it!"); // This is slightly different from AMStringUtils.stripHTML since we replace the <br> with // a period to let it have a short pause. // Replace break with period String processed_str = s.replaceAll("\\<br\\>", ". "); // Remove HTML processed_str = processed_str.replaceAll("\\<.*?>", ""); // Remove () [] and their content processed_str = processed_str.replaceAll("\\[.*?\\]", ""); // Remove the XML special character processed_str = processed_str.replaceAll("\\[.*?\\]", ""); processed_str = processed_str.replaceAll("&.*?;", ""); HashMap<String, String> params = new HashMap<String, String>(); params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "id"); myTTS.setOnUtteranceCompletedListener( new TextToSpeech.OnUtteranceCompletedListener() { @Override public void onUtteranceCompleted(String utteranceId) { if (onTextToSpeechCompletedListener != null) { onTextToSpeechCompletedListener.onTextToSpeechCompleted(s); } } }); speakLock.lock(); Log.i(TAG, "processed_str is \"" + processed_str + "\""); myTTS.speak(processed_str, 0, params); speakLock.unlock(); }
/* * (non-Javadoc) * @see android.app.Activity#onKeyDown(int, android.view.KeyEvent) */ @Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_DPAD_CENTER: case KeyEvent.KEYCODE_ENTER: Log.v(TAG, "reading battery state"); speech.speak(statusText, TextToSpeech.QUEUE_FLUSH, null); return true; default: return super.onKeyDown(keyCode, event); } }
@Override public boolean handleMessage(Message msg) { switch (msg.what) { case Bluetooth.MESSAGE_STATE_CHANGE: Log.d(TAG, "MESSAGE_STATE_CHANGE: " + msg.arg1); break; case Bluetooth.MESSAGE_WRITE: Log.d(TAG, "MESSAGE_WRITE "); break; case Bluetooth.MESSAGE_READ: byte[] readBuf = (byte[]) msg.obj; String strIncom = new String(readBuf, 0, msg.arg1); sb.append(strIncom); int endOfLineIndex = sb.indexOf("\r\n"); // determine the end-of-line if (endOfLineIndex > 0) { // if end-of-line, String sbprint = sb.substring(0, endOfLineIndex); // extract string sb.delete(0, sb.length()); // and clear Log.d("READ_FROM_ARDUINO", sbprint + ""); try { distanceToObstacle = Utilities.normalizeReadingsFromDistanceSensor( Integer.parseInt(sbprint), distanceToObstacle); Log.d("READ_FROM_ARDUINO_NORM", distanceToObstacle + ""); tvDistance.setText(distanceToObstacle + "cm"); if (distanceToObstacle < 40 && !(command.equals("STOP"))) { Utilities.setDirectionImage("STOP", ivDirection, bt); textToSpeech.speak( "YOU ARE ABOUT TO HIT AN OBSTACLE STOPPING", TextToSpeech.QUEUE_FLUSH, null); ((TextView) findViewById(R.id.tvResults)).setText("STOP"); command = "STOP"; } // distanceToObstacle = Integer.parseInt(sbprint); } catch (Exception e) { e.printStackTrace(); Log.e(TAG, "handleMessage: CRASH CONVERSION"); } } break; case Bluetooth.MESSAGE_DEVICE_NAME: Log.d(TAG, "MESSAGE_DEVICE_NAME " + msg); break; case Bluetooth.MESSAGE_TOAST: Log.d(TAG, "MESSAGE_TOAST " + msg); break; } return false; }
private void speak(String string, boolean append, String utteranceId) { if (tts != null) { if (shouldEnableTTS()) { final int queueType = append ? TextToSpeech.QUEUE_ADD : TextToSpeech.QUEUE_FLUSH; mTtsParams.clear(); if (utteranceId != null) { mTtsParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utteranceId); } tts.speak(string, queueType, mTtsParams); } } }
public void emit() { if (cueSet.isEmpty()) { return; } if (mute && requestFocus() == true) { for (Entry e : cueList) { final String utId = getId(e.text); outstanding.add(utId); HashMap<String, String> params = e.params; if (params == null) { params = new HashMap<String, String>(); } params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utId); int res = textToSpeech.speak(e.text, TextToSpeech.QUEUE_ADD, params); if (res == TextToSpeech.ERROR) { System.err.println( "res == ERROR emit() text: " + e.text + ", utId: " + utId + ") outstanding.size(): " + outstanding.size()); outstanding.remove(utId); } } if (outstanding.isEmpty()) { audioManager.abandonAudioFocus(null); } } else { for (Entry e : cueList) { textToSpeech.speak(e.text, TextToSpeech.QUEUE_ADD, e.params); } } cueSet.clear(); cueList.clear(); }
public void readMessage(Message msg) { StringBuilder ttsText = new StringBuilder(); if (msg.channelIds > 0) { ttsText.append(getString(R.string.channel) + " "); } if (msg.actor != null) ttsText.append(msg.actor.name); else ttsText.append(getString(R.string.server)); ttsText.append(": "); ttsText.append(Html.fromHtml(msg.message).toString()); tts.speak(ttsText.toString(), TextToSpeech.QUEUE_ADD, null); }
private void displayWord(Word word) { // Set Values on the Screen tvWord.setText(word.getWord()); tvDefinition.setText(word.getDefinition()); // Set User Specific Values on the Screen rbMyRating.setRating(dbh.getMyWord(word).getMyRating()); // Set Progress Bar pbProgress.setProgress(index + 1); // Speak Up the word if (settings.getBoolean("prefPronunce", false)) { tts.speak(word.getWord().toString(), TextToSpeech.QUEUE_FLUSH, null); } }
/** * Updates the selection rectangle and attempts to shift focus away from the provided view. Clears * active TTS. * * @param view */ private void announceSelectionLost(View view) { // TODO(alanv): Add an additional TYPE_VIEW_HOVER_OFF event type that // fires a KickBack vibration and (probably) clears active TalkBack // utterances. if (mSpeechAvailable) { mTTS.speak("", TextToSpeech.QUEUE_FLUSH, null); } if (mVibrator != null) { if (view.isFocusable()) { mVibrator.vibrate(mFocusLostFocusablePattern, -1); } else { mVibrator.vibrate(mFocusLostPattern, -1); } } mSelectedRect.setEmpty(); }
/** * Attempt to start speaking an utterance. If it returns true, will call back on start and end. * * @param utteranceId A unique id for this utterance so that callbacks can be tied to a particular * utterance. * @param text The text to speak. * @param lang The language code for the text (e.g., "en-US"). * @param rate The speech rate, in the units expected by Android TextToSpeech. * @param pitch The speech pitch, in the units expected by Android TextToSpeech. * @param volume The speech volume, in the units expected by Android TextToSpeech. * @return true on success. */ @CalledByNative private boolean speak( int utteranceId, String text, String lang, float rate, float pitch, float volume) { assert mInitialized == true; if (!lang.equals(mCurrentLanguage)) { mTextToSpeech.setLanguage(new Locale(lang)); mCurrentLanguage = lang; } mTextToSpeech.setSpeechRate(rate); mTextToSpeech.setPitch(pitch); HashMap<String, String> params = new HashMap<String, String>(); if (volume != 1.0) { params.put(TextToSpeech.Engine.KEY_PARAM_VOLUME, Double.toString(volume)); } params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, Integer.toString(utteranceId)); int result = mTextToSpeech.speak(text, TextToSpeech.QUEUE_FLUSH, params); return (result == TextToSpeech.SUCCESS); }
public static void speak(String text, String loc) { int result = mTts.setLanguage(new Locale(loc)); if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Toast.makeText( mReviewer.get(), mReviewer.get().getString(R.string.no_tts_available_message) + " (" + loc + ")", Toast.LENGTH_LONG) .show(); Timber.e("Error loading locale " + loc); } else { if (mTts.isSpeaking()) { Timber.d("tts engine appears to be busy... clearing queue"); stopTts(); // sTextQueue.add(new String[] { text, loc }); } Timber.d("tts text '%s' to be played for locale (%s)", text, loc); mTts.speak(mTextToSpeak, TextToSpeech.QUEUE_FLUSH, mTtsParams); } }
@Override public void onInit(int status) { // TODO Auto-generated method stub if (status == TextToSpeech.SUCCESS) { int result = textTalker.setLanguage(Locale.US); if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Toast.makeText(this, "Language not supported", Toast.LENGTH_LONG).show(); } else { } } else { Log.e("TTS", "Initilization Failed"); } textTalker.speak(yy, TextToSpeech.QUEUE_FLUSH, null); t.setText(yy); }
/** * Requests audio focus before speaking, if no focus is given nothing is said. * * @param text * @param queueMode * @param params * @return */ private int speakWithMute(String text, int queueMode, HashMap<String, String> params) { if (requestFocus()) { final String utId = getId(text); outstanding.add(utId); if (params == null) { params = new HashMap<String, String>(); } params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utId); int res = textToSpeech.speak(text, queueMode, params); if (res == TextToSpeech.ERROR) { outstanding.remove(utId); } if (outstanding.isEmpty()) { audioManager.abandonAudioFocus(null); } return res; } System.err.println("Could not get audio focus."); return TextToSpeech.ERROR; }
private void handleControls() { // @todo add code to read joysticks // Run wheels in tank mode (note: The joystick goes negative when pushed forwards, so negate it) double left = 0.0; double right = 0.0; left = -gamepad1 .left_stick_y; // (note: The joystick goes negative when pushed forwards, so negate it) right = -gamepad1.right_stick_y; if (debugmode) telemetry.addData("LJoystickRaw", "%.2f", left); if (debugmode) telemetry.addData("RJoystickRaw", "%.2f", right); left = scaleMotorPower(enforceDeadZone(left)); // don't move unless far enough from zero right = scaleMotorPower( enforceDeadZone(right)); // because physical 'dead stick' may not be seen as zero if (debugmode) telemetry.addData("LMotorSpeed", "%.2f", left); if (debugmode) telemetry.addData("RMotorSpeed", "%.2f", right); leftMotorSpeed = left; rightMotorSpeed = right; // now handle buttons if (gamepad1.y) { // if button is down now if (gamepad1YisReleased) { // was it previously released? // so this is done only once for each press and release gamepad1YisReleased = false; // if so, remember that it is down, not released debugmode = !debugmode; // and toggle the debug mode String sentence = String.format("%s is %s.", " Debug ", debugmode); textToSpeech.speak(sentence, TextToSpeech.QUEUE_FLUSH, null); } } else { // if button is not down now gamepad1YisReleased = true; // remember that button has been released } }