예제 #1
0
        @Override
        public void onResults(Bundle results) {
          lawg.d("[RecognitionListener] onResults()");
          ArrayList<String> resultsArray =
              results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
          float[] confidenceScores = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);
          StringBuilder sb = new StringBuilder();
          for (int i = 0; i < resultsArray.size(); i++) {
            String word = resultsArray.get(i);
            sb.append(word);
            if (i != resultsArray.size() - 1) sb.append(", ");
            if (TextUtils.equals(word.toLowerCase(), "cheese")) {
              captureStillPicture();
            }
          }
          lawg.d("Words detected: \n" + sb.toString());
          mDisplayText1.setText(sb.toString());

          for (int i = 0; i < resultsArray.size(); i++) {
            String word = resultsArray.get(i);
            addFloatingWord(word, confidenceScores[i]);
          }

          // repeat listen
          startListening();
        }
예제 #2
0
 private void startSpeechRecognizer() {
   lawg.d("startSpeechRecognizer()");
   if (SpeechRecognizer.isRecognitionAvailable(this)
       && hasPermission(Manifest.permission.RECORD_AUDIO)) {
     lawg.d("Speech Recognition detected as being available");
     startListening();
   } else {
     Toast.makeText(this, "Speech Recognition Not Available", Toast.LENGTH_SHORT).show();
   }
 }
예제 #3
0
 @Override
 public void onClick(View v) {
   switch (v.getId()) {
     case R.id.start_speech_button:
       startListening();
       break;
     case R.id.stop_speech_button:
       mSpeechRecognizer.stopListening();
       break;
     case R.id.start_camera_button:
       if (Build.VERSION.SDK_INT >= CAMERA_2_API_LIMIT) {
         lawg.d("Has camera permission? " + hasPermission(Manifest.permission.CAMERA));
         if (hasPermission(Manifest.permission.CAMERA)) {
           startCamera2(mTextureView.getWidth(), mTextureView.getHeight());
         } else {
           ActivityCompat.requestPermissions(
               CameraActivity.this,
               new String[] {Manifest.permission.CAMERA},
               CAMERA_PERMISSIONS_REQUEST);
         }
       }
       break;
     case R.id.stop_camera_button:
       stopCamera2();
       break;
     case R.id.capture_camera_button:
       captureStillPicture();
       break;
   }
 }
예제 #4
0
 @Override
 public void onError(int error) {
   if (DEBUG_RECOGNITION) lawg.d("[RecognitionListener] onError() errorCode: " + error);
   Toast.makeText(CameraActivity.this, "Speech Recognition Error", Toast.LENGTH_SHORT)
       .show();
   startListening();
 }
예제 #5
0
 public void startListening() {
   lawg.d("startListening()");
   //    mAudioManager.setStreamMute(AudioManager.STREAM_SYSTEM, true);
   //    mAudioManager.setStreamMute(AudioManager.STREAM_NOTIFICATION, true);
   //    mAudioManager.setStreamMute(AudioManager.STREAM_ALARM, true);
   //    mAudioManager.setStreamMute(AudioManager.STREAM_MUSIC, true);
   if (mSpeechRecognizer != null) {
     mSpeechRecognizer.destroy();
   }
   mAudioManager.setStreamMute(AudioManager.STREAM_MUSIC, true);
   mAudioManager.setStreamMute(AudioManager.STREAM_ALARM, true);
   mAudioManager.setStreamMute(AudioManager.STREAM_NOTIFICATION, true);
   mAudioManager.setStreamMute(AudioManager.STREAM_SYSTEM, true);
   mAudioManager.setStreamMute(AudioManager.STREAM_RING, true);
   mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
   mSpeechRecognizer.setRecognitionListener(mRecognitionListener);
   Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
   recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
   //
   // recognizerIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
   // 1000 * 2);
   //
   // recognizerIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, 1000 * 2);
   //    recognizerIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 1000
   // * 2);
   mSpeechRecognizer.startListening(recognizerIntent);
 }
예제 #6
0
 @Override
 public void onRequestPermissionsResult(
     int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
   boolean granted =
       grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED;
   switch (requestCode) {
     case CAMERA_PERMISSIONS_REQUEST:
       lawg.d(
           "CAMERA permission response "
               + grantResults
               + ((grantResults.length > 0) ? grantResults[0] : ""));
       break;
     case RECORD_AUDIO_PERMISSIONS_REQUEST:
       lawg.d(
           "RECORD_AUDIO permission response "
               + grantResults
               + ((grantResults.length > 0) ? grantResults[0] : ""));
       startSpeechRecognizer();
       break;
   }
 }
예제 #7
0
 @Override
 public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
   lawg.d("Has camera permission? " + hasPermission(Manifest.permission.CAMERA));
   if (hasPermission(Manifest.permission.CAMERA)) {
     startCamera2(width, height);
   } else {
     ActivityCompat.requestPermissions(
         CameraActivity.this,
         new String[] {Manifest.permission.CAMERA},
         CAMERA_PERMISSIONS_REQUEST);
   }
 }
예제 #8
0
 private void checkPermissions() {
   lawg.d("checkPermissions()");
   if (!hasPermission(Manifest.permission.CAMERA)) {
     lawg.d("No camera permissions granted, requesting now -");
     ActivityCompat.requestPermissions(
         CameraActivity.this,
         new String[] {Manifest.permission.CAMERA},
         CAMERA_PERMISSIONS_REQUEST);
   }
   if (!hasPermission(Manifest.permission.RECORD_AUDIO)) {
     lawg.d("No record audio permissions granted, requesting now -");
     ActivityCompat.requestPermissions(
         CameraActivity.this,
         new String[] {Manifest.permission.RECORD_AUDIO},
         RECORD_AUDIO_PERMISSIONS_REQUEST);
   }
   if (!hasPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
     lawg.d("No write external files permissions granted, requesting now -");
     ActivityCompat.requestPermissions(
         CameraActivity.this,
         new String[] {Manifest.permission.WRITE_EXTERNAL_STORAGE},
         WRITE_EXTERNAL_STORAGE_PERMISSIONS_REQUEST);
   }
 }
예제 #9
0
 @Override
 public void onPartialResults(Bundle partialResults) {
   ArrayList<String> resultsArray =
       partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
   StringBuilder sb = new StringBuilder();
   for (int i = 0; i < resultsArray.size(); i++) {
     String word = resultsArray.get(i);
     sb.append(word);
     if (i != resultsArray.size() - 1) sb.append(", ");
     if (TextUtils.equals(word.toLowerCase(), "cheese")) {
       captureStillPicture();
     }
   }
   if (sb.length() > 0) {
     lawg.d("onPartialResults() Words detected: \n" + sb.toString());
   }
 }
예제 #10
0
  @TargetApi(CAMERA_2_API_LIMIT)
  private void startCamera2(int width, int height) {
    lawg.d("startCamera2()");

    setUpCameraOutputs(width, height);
    configureTransform(width, height);
    mCamera2Manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {
      if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
        throw new RuntimeException("Time out waiting to lock camera opening.");
      }
      mCamera2Manager.openCamera(mCameraId, mCamera2StateCallback, mBackgroundHandler);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    } catch (InterruptedException e) {
      throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
    } catch (SecurityException se) {
      lawg.e("SecurityException " + se);
      se.printStackTrace();
    }
  }
예제 #11
0
  @Override
  protected void onResume() {
    super.onResume();
    startBackgroundThread();
    startSpeechRecognizer();

    // When the screen is turned off and turned back on, the SurfaceTexture is already
    // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
    // a camera and start preview from here (otherwise, we wait until the surface is ready in
    // the SurfaceTextureListener).
    if (mTextureView.isAvailable()) {
      lawg.d("Has camera permission? " + hasPermission(Manifest.permission.CAMERA));
      if (hasPermission(Manifest.permission.CAMERA)) {
        startCamera2(mTextureView.getWidth(), mTextureView.getHeight());
      } else {
        ActivityCompat.requestPermissions(
            CameraActivity.this,
            new String[] {Manifest.permission.CAMERA},
            CAMERA_PERMISSIONS_REQUEST);
      }
    } else {
      mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
    }
  }
예제 #12
0
 @Override
 public void onReadyForSpeech(Bundle params) {
   if (DEBUG_RECOGNITION) lawg.d("[RecognitionListener] onReadyForSpeech()");
 }
예제 #13
0
 @Override
 public void onBufferReceived(byte[] buffer) {
   if (DEBUG_RECOGNITION) lawg.d("onBufferReceived()");
 }
예제 #14
0
 @Override
 public void onEndOfSpeech() {
   if (DEBUG_RECOGNITION) lawg.d("[RecognitionListener] onEndOfSpeech()");
   //      startListening();
 }
예제 #15
0
 @Override
 public void onEvent(int eventType, Bundle params) {
   if (DEBUG_RECOGNITION) lawg.d("onEvent()");
 }
예제 #16
0
 @Override
 public void onBeginningOfSpeech() {
   if (DEBUG_RECOGNITION) lawg.d("[RecognitionListener] onBeginningOfSpeech()");
 }
예제 #17
0
  @TargetApi(CAMERA_2_API_LIMIT)
  private void initCaptureCallback() {
    lawg.d("initCaptureCallback()");
    mCapture2Callback =
        new CameraCaptureSession.CaptureCallback() {
          private void process(CaptureResult result) {
            switch (mState) {
              case STATE_PREVIEW:
                {
                  // We have nothing to do when the camera preview is working normally.
                  break;
                }
              case STATE_WAITING_LOCK:
                {
                  Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
                  if (afState == null) {
                    captureStillPicture();
                  } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState
                      || CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
                    // CONTROL_AE_STATE can be null on some devices
                    Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
                    if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
                      mState = STATE_PICTURE_TAKEN;
                      captureStillPicture();
                    } else {
                      runPrecaptureSequence();
                    }
                  }
                  break;
                }
              case STATE_WAITING_PRECAPTURE:
                {
                  // CONTROL_AE_STATE can be null on some devices
                  Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
                  if (aeState == null
                      || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
                      || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
                    mState = STATE_WAITING_NON_PRECAPTURE;
                  }
                  break;
                }
              case STATE_WAITING_NON_PRECAPTURE:
                {
                  // CONTROL_AE_STATE can be null on some devices
                  Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
                  if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
                    mState = STATE_PICTURE_TAKEN;
                    captureStillPicture();
                  }
                  break;
                }
            }
          }

          @Override
          public void onCaptureProgressed(
              @NonNull CameraCaptureSession session,
              @NonNull CaptureRequest request,
              @NonNull CaptureResult partialResult) {
            process(partialResult);
          }

          @Override
          public void onCaptureCompleted(
              @NonNull CameraCaptureSession session,
              @NonNull CaptureRequest request,
              @NonNull TotalCaptureResult result) {
            process(result);
          }
        };
  }