@Override
  protected Void doInBackground(Void... params) {

    int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);

    AudioRecord audioRecord =
        new AudioRecord(
            controller.getAudioSource(),
            frequency,
            channelConfiguration,
            audioEncoding,
            bufferSize);

    try {

      short[] buffer = new short[blockSize];

      audioRecord.startRecording();

      while (controller.isStarted()) {
        int bufferReadSize = audioRecord.read(buffer, 0, blockSize);

        DataBlock dataBlock = new DataBlock(buffer, blockSize, bufferReadSize);

        blockingQueue.put(dataBlock);
      }

    } catch (Throwable t) {
      // Log.e("AudioRecord", "Recording Failed");
    }

    audioRecord.stop();

    return null;
  }
Exemple #2
0
  /**
   * Gets an AudioRecord object using the current recording settings
   *
   * @param context Context which we are getting recorder for
   */
  public static AudioRecord getRecorder(Context context) throws IllegalArgumentException {
    AudioRecord recorder = null;
    int sampleRate = PreferenceHelper.getSampleRate(context);
    int bufferSize = PreferenceHelper.getBufferSize(context);
    int bufferSizeAdjuster = PreferenceHelper.getBufferSizeAdjuster(context);
    int audioSource = AudioSource.MIC;

    Log.i(
        TAG_AUDIOHELPER,
        String.format(
            "AudioRecord initialized with saved configuration! sample rate: %d, buffer: %d, buffer adjuster: %d",
            sampleRate, bufferSize, bufferSizeAdjuster));

    recorder =
        new AudioRecord(
            audioSource,
            sampleRate,
            Constants.DEFAULT_CHANNEL_CONFIG,
            Constants.DEFAULT_PCM_FORMAT,
            bufferSize * bufferSizeAdjuster);

    if (recorder.getState() != AudioRecord.STATE_INITIALIZED) {
      throw new IllegalArgumentException(
          String.format(
              "unable to initialize AudioRecord instance, sample rate: %d, channels: %d, buffer: %d",
              sampleRate, Constants.DEFAULT_CHANNEL_CONFIG, bufferSize));
    }

    return recorder;
  }
  private boolean initRecord() {
    mRecordBufSize =
        AudioRecord.getMinBufferSize(
            SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);

    if (mRecordBufSize != AudioRecord.ERROR_BAD_VALUE) {
      if (mRecordBufSize < SAMPLE_RATE) {
        mRecordBufSize = SAMPLE_RATE;
      }

      mRecorder =
          new AudioRecord(
              MediaRecorder.AudioSource.MIC,
              SAMPLE_RATE,
              AudioFormat.CHANNEL_IN_MONO,
              AudioFormat.ENCODING_PCM_16BIT,
              mRecordBufSize);

      if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED) {
        mRecorder.startRecording();
      } else {
        Log.w(TAG, "Could not start recording.");
      }

      return true;
    } else {
      mRecordBufSize = 0;
    }

    return false;
  }
  public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);

    byte[] buffer = new byte[BUFFER_SIZE];
    DatagramPacket packet;

    try {
      int minBuf = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
      recorder =
          new AudioRecord(
              MediaRecorder.AudioSource.MIC, sampleRate, channelConfig, audioFormat, minBuf);
      recorder.startRecording();
    } catch (Exception ex) {
      // TODO: handle this exception
    }

    while (recordAudio) {
      try {
        recorder.read(buffer, 0, buffer.length);

        packet = new DatagramPacket(buffer, buffer.length, InetAddress.getByName(_server), _port);
        socket.send(packet);
      } catch (Exception ex) {
        // TODO: handle this exception
      }
    }

    buffer = null;
  }
  public void run() {
    // start the encoder
    encoder.startEncoding();
    try {
      audioRecord.startRecording();
    } catch (IllegalStateException e) {
      this.isRecording = false;
      return;
    }

    this.isRecording = true;
    while (isRecording) {
      bufferRead = audioRecord.read(samples, 0, bufferSize);
      if (bufferRead > 0) {
        // add the data to the encoder
        encoder.addData(samples, bufferRead);
      }
      try {
        Thread.sleep(20);
      } catch (InterruptedException e) {
        e.printStackTrace();
      }
    }
    audioRecord.stop();
    encoder.stopEncoding();
  }
 private boolean startRecording() {
   boolean success = false;
   recBufSize =
       AudioRecord.getMinBufferSize(
           SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
   if (recBufSize < SAMPLE_RATE) {
     recBufSize = SAMPLE_RATE;
   }
   if (recBufSize != AudioRecord.ERROR_BAD_VALUE) {
     recorder =
         new AudioRecord(
             MediaRecorder.AudioSource.MIC,
             SAMPLE_RATE,
             AudioFormat.CHANNEL_IN_MONO,
             AudioFormat.ENCODING_PCM_16BIT,
             recBufSize);
     if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
       recorder.startRecording();
       success = true;
     } else {
       try {
         recorder.release();
         recorder = null;
       } catch (Exception e) {
         e.printStackTrace();
       }
     }
   }
   return success;
 }
 private void stopMicRecording(AudioRecord record) {
   if (record != null) {
     record.stop();
     record.release();
     appendLogTextView(mHandler, "stop recording ok.\n");
   }
 }
    public void run() {
      super.run();
      ar.startRecording();
      // 用于读取的 buffer
      byte[] buffer = new byte[bs];
      isRun = true;
      while (isRun) {
        int r = ar.read(buffer, 0, bs);
        int v = 0;
        // 将 buffer 内容取出,进行平方和运算
        for (int i = 0; i < buffer.length; i++) {
          // 这里没有做运算的优化,为了更加清晰的展示代码
          v += buffer[i] * buffer[i];
          // v += (buffer[i] << 1);
        }
        // 平方和除以数据总长度,得到音量大小。可以获取白噪声值,然后对实际采样进行标准化。
        // 如果想利用这个数值进行操作,建议用 sendMessage 将其抛出,在 Handler 里进行处理。
        // Log.d("spl", String.valueOf(v / (float) r));
        value = v / (float) r;
        // value = (int) (Math.abs((int)(v /(float)r)/10000) >> 1);
        dB = 10 * Math.log10(v / (double) r);

        // MainActivity.SetText(value,dB);
        Log.d("Data:", String.valueOf(value) + "    " + String.valueOf(dB));
        handler.post(
            new Runnable() {
              @Override
              public void run() {
                SetText(value, dB);
              }
            });
      }
      ar.stop();
    }
  public void close() {
    recordAudio = false;

    recorder.stop();
    recorder.release();

    socket.close();
  }
  public void run() {

    synchronized (this) {
      if (!mPlay || mParams == null) {
        return;
      }
      mRunning = true;
    }

    int trackBufSize =
        android.media.AudioTrack.getMinBufferSize(mParams.rate, mParams.outch, mParams.encoding);
    AudioTrack track =
        new AudioTrack(
            AudioManager.STREAM_VOICE_CALL,
            mParams.rate,
            mParams.outch,
            mParams.encoding,
            trackBufSize,
            AudioTrack.MODE_STREAM);
    track.play();
    appendLogTextView(mHandler, "play playback ok.\n");

    if (mParams.source == AUDIO_SOURCE_WAVE_FILE) {
      // Log.d("sakalog", "play " + mParams.wavefile);
      // WaveDataInputStream wavein = new WaveDataInputStream(mParams.wavefile);
      Log.d("sakalog", "play " + mResources.getResourceName(mParams.waveid));
      WaveDataInputStream wavein = new WaveDataInputStream(mParams.waveid);
      byte[] wave = new byte[512];
      int size = wavein.read(wave, wave.length);
      track.write(wave, 0, size);
      while (mPlay && ((size = wavein.read(wave, wave.length)) > -1)) {
        track.write(wave, 0, size);
      }
      wavein.close();
    } else {
      Log.d("sakalog", "play ");
      AudioRecord record = startMicRecording(mParams.rate, mParams.inch, mParams.encoding);
      byte[] buf = new byte[trackBufSize];
      while (mPlay) {
        int size = record.read(buf, 0, buf.length);
        track.write(buf, 0, size);
      }
      stopMicRecording(record);
    }

    track.stop();
    track.release();
    appendLogTextView(mHandler, "stop playback ok.\n");

    synchronized (this) {
      mPlay = false;
      mRunning = false;
    }
  }
 protected void onDestroy() {
   mFlag = false;
   mAudioRecod.stop();
   mAudioRecod.release();
   mAudioRecod = null;
   mAudioTrack.stop();
   mAudioTrack.release();
   mAudioTrack = null;
   unregisterReceiver(mReceiver);
   super.onDestroy();
 }
  /** Stops the stream. */
  public synchronized void stop() {
    if (mStreaming) {

      Log.d(TAG, "Interrupting threads...");
      mThread.interrupt();
      mAudioRecord.stop();
      mAudioRecord.release();
      mAudioRecord = null;

      super.stop();
    }
  }
Exemple #13
0
 public void run() {
   audioRecord.startRecording();
   AudioCodec.iLbcinit(20);
   this.isRecording = true;
   StartSendAudio();
   if (audioRecord != null) {
     audioRecord.stop();
     audioRecord.release(); // 释放资源
     audioRecord = null;
   }
   isRecording = false;
 }
    private void startAudioRecording(File outputFile) throws IOException {
      if (outputFile != null) dataOutput = new AudioWavOutputStream(outputFile, 1, samplingRate);

      final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
      final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
      int bufferSize = AudioRecord.getMinBufferSize(samplingRate, CHANNEL_CONFIG, FORMAT) * 4;
      audioRecord =
          new AudioRecord(
              MediaRecorder.AudioSource.MIC, samplingRate, CHANNEL_CONFIG, FORMAT, bufferSize);

      audioRecord.startRecording();
    }
 private void stopAudioRecording() {
   audioRecord.stop();
   if (dataOutput != null) {
     try {
       dataOutput.close();
     } catch (IOException e) {
       e.printStackTrace();
     }
     dataOutput = null;
   }
   audioRecord.release();
 }
  @Override
  public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    Modulation.initDecoder(); // init demodulator
    Modulation.initProcess();
    recdata = new short[recBufSize / 2];

    while (!Thread.interrupted() && !mStopRecording) {
      if (recorder == null) {
        break;
      }
      nread = recorder.read(recdata, 0, recBufSize / 2);

      if (nread > 0) {
        retval = Modulation.process(recdata, nread);
        if (retval == 2) {
          String str = "";
          byte[] result = Modulation.getResult();
          try {
            str = new String(result, "UTF-8");
          } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
          }

          if (mStopRecording) {
            continue;
          }

          Message msg = mHandler.obtainMessage();
          msg.what = mEventId;
          msg.obj = str;

          mHandler.sendMessage(msg);
          try {
            // when receive a message, sleep a little while;
            // so the main thread has a chance to stop recording immediately
            Thread.sleep(200);
          } catch (InterruptedException e) {
            continue;
          }
        }
      }
    }
    try {
      recorder.stop();
      recorder.release();
    } catch (Exception e) {
      e.printStackTrace();
    }
    recorder = null;
    Modulation.releaseDecoder();
  }
Exemple #17
0
  @Override
  protected void onStop() {
    super.onStop();
    Log.v(ac_tag, "onStop");

    if (audioRecord != null) {
      if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
        audioRecord.stop();
      }
      audioRecord.release();
      audioRecord = null;
    }
  }
  public void stopRecording() {
    if (null != recorder) {
      isRecording = false;

      recorder.stop();
      recorder.release();

      recorder = null;
      recordingThread = null;
      copyWaveFile(getTempFilename(), getFilename());
      deleteTempFile();
    }
  }
Exemple #19
0
  @Override
  protected void onResume() {
    Log.v(ac_tag, "onResume");
    super.onResume();

    Calendar c = Calendar.getInstance();
    int weekIndex = c.get(Calendar.DAY_OF_WEEK) - 1;

    this.timeView.setText(
        ""
            + decimalFormat.format(c.get(Calendar.HOUR_OF_DAY))
            + ":"
            + decimalFormat.format(c.get(Calendar.MINUTE)));
    this.dateView.setText(
        ""
            + decimalFormat.format(c.get(Calendar.MONTH) + 1)
            + "月"
            + decimalFormat.format(c.get(Calendar.DAY_OF_MONTH))
            + "日  "
            + weekDaysName[weekIndex]);

    this.mPassView.setVisibility(View.GONE);

    this.pbCircle.setVisibility(View.INVISIBLE);
    this.progressView.setAlpha(1f);
    this.progressView.setVisibility(View.VISIBLE);
    this.progressView.setText("正在初始化...");
    PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
    boolean isScreenOn = pm.isScreenOn();
    if (isScreenOn) {
      if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
        isRecording = false;
      } else {
        if (isRecording == false) {
          audioRecord.startRecording();
          isRecording = true;
        }
        STask = new SecondsRoundTask();

        ATask = new AuthenTask();

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
          STask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
          ATask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
        } else {
          STask.execute();
          ATask.execute();
        }
      }
    }
  }
 /** Starts the recording, and sets the state to RECORDING. Call after prepare(). */
 public void start() {
   if (state == State.READY) {
     if (rUncompressed) {
       payloadSize = 0;
       audioRecorder.startRecording();
       audioRecorder.read(buffer, 0, buffer.length);
     } else {
       mediaRecorder.start();
     }
     state = State.RECORDING;
   } else {
     Log.e(ExtAudioRecorder.class.getName(), "start() called on illegal state");
     state = State.ERROR;
   }
 }
Exemple #21
0
 @Override
 protected void onPause() {
   super.onPause();
   thread.interrupt();
   thread = null;
   try {
     if (audio != null) {
       audio.stop();
       audio.release();
       audio = null;
     }
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
 public AndroidAudioRecorder(int samplingRate, boolean isMono) {
   int channelConfig = isMono ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO;
   int minBufferSize =
       AudioRecord.getMinBufferSize(samplingRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
   recorder =
       new AudioRecord(
           MediaRecorder.AudioSource.MIC,
           samplingRate,
           channelConfig,
           AudioFormat.ENCODING_PCM_16BIT,
           minBufferSize);
   if (recorder.getState() != AudioRecord.STATE_INITIALIZED)
     throw new GdxRuntimeException(
         "Unable to initialize AudioRecorder.\nDo you have the RECORD_AUDIO permission?");
   recorder.startRecording();
 }
Exemple #23
0
  protected StampedAudio readMic() {
    StampedAudio stampedAudio = null;

    if (_mic != null) {
      int timestamp = getOutboundTimestamp();
      // read is blocking.
      int bufferRead = _mic.read(_micFramebuff, 0, _micFramebuff.length);
      Log.verb(
          this.getClass().getSimpleName()
              + ".readMic(): length="
              + _micFramebuff.length
              + ", bufferRead="
              + bufferRead);

      short[] sframe = _micFramebuff;
      // short[] seframe = effectIn(sframe);
      byte[] tbuff = _encoder.encode_frame(sframe);

      if (tbuff != null) {
        stampedAudio = _audio.getCleanStampedAudio();
        stampedAudio.setStampAndBytes(tbuff, 0, tbuff.length, timestamp);
        _audio.saveReadStampedAudio(stampedAudio);

        _countFrames++;
      }
    }
    return stampedAudio;
  }
Exemple #24
0
 protected void destroy() {
   Log.debug(this.getClass().getSimpleName() + ".destroy(): ");
   if (_mic != null) {
     _mic.release();
     Log.debug("\tmic released");
   }
 }
  private void init() {
    mBuffer =
        AudioRecord.getMinBufferSize(
            44100, AudioFormat.CHANNEL_OUT_DEFAULT, AudioFormat.ENCODING_PCM_16BIT);

    mAudioRecod =
        new AudioRecord(
            MediaRecorder.AudioSource.MIC,
            44100,
            AudioFormat.CHANNEL_OUT_DEFAULT,
            AudioFormat.ENCODING_PCM_16BIT,
            mBuffer);

    mAudioTrack =
        new AudioTrack(
            AudioManager.STREAM_VOICE_CALL,
            44100,
            AudioFormat.CHANNEL_OUT_DEFAULT,
            AudioFormat.ENCODING_PCM_16BIT,
            mBuffer,
            AudioTrack.MODE_STREAM);

    mInBytes = new byte[mBuffer];
    mOutBytes = new byte[mBuffer];
    mLinkedList = new LinkedList<byte[]>();
  }
  public void startRecording() {
    recorder =
        new AudioRecord(
            MediaRecorder.AudioSource.MIC,
            RECORDER_SAMPLERATE,
            RECORDER_CHANNELS,
            RECORDER_AUDIO_ENCODING,
            bufferSize);

    recorder.startRecording();

    isRecording = true;

    recordingThread =
        new Thread(
            new Runnable() {

              @Override
              public void run() {
                writeAudioDataToFile();
              }
            },
            "AudioRecorder Thread");

    recordingThread.start();
  }
  // STEP 4: setup recording
  public void listen() {
    recorder.startRecording();

    tarsosFormat =
        new be.hogent.tarsos.dsp.AudioFormat(
            (float) SAMPLE_RATE, // sample rate
            16, // bit depth
            1, // channels
            true, // signed samples?
            false // big endian?
            );

    Thread listeningThread =
        new Thread(
            new Runnable() {

              @Override
              public void run() {
                while (mIsRecording) {
                  int bufferReadResult = recorder.read(buffer, 0, buffer.length);
                  AudioEvent audioEvent = new AudioEvent(tarsosFormat, bufferReadResult);
                  audioEvent.setFloatBufferWithByteBuffer(buffer);
                  mPitchProcessor.process(audioEvent);
                }
                recorder.stop();
              }
            });

    listeningThread.start();
  }
Exemple #28
0
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_blackout);

    // Hide the status bar.
    View decorView = getWindow().getDecorView();
    int uiOptions = View.SYSTEM_UI_FLAG_FULLSCREEN;
    decorView.setSystemUiVisibility(uiOptions);

    // Sets screen brightness to lowest possible setting
    WindowManager.LayoutParams params = getWindow().getAttributes();
    params.screenBrightness = 0;
    getWindow().setAttributes(params);

    // Ensures the device won't lock while the app is running
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    // begins listening for audio
    // this will detect the sound from the user
    try {
      bufferSize =
          AudioRecord.getMinBufferSize(
              sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    } catch (Exception e) {
      android.util.Log.e("TrackingFlow", "Exception", e);
    }
  }
  private void writeAudioDataToFile() {
    byte data[] = new byte[bufferSize];
    String filename = getTempFilename();
    FileOutputStream os = null;

    try {
      os = new FileOutputStream(filename);
    } catch (FileNotFoundException e) {
      // TODO Auto-generated catch block
      Log.d("SAN", e.getMessage());
    }

    int read = 0;

    if (null != os) {
      while (isRecording) {
        read = recorder.read(data, 0, bufferSize);

        if (AudioRecord.ERROR_INVALID_OPERATION != read) {
          try {
            os.write(data);
          } catch (IOException e) {
            Log.d("SAN", e.getMessage());
          }
        }
      }

      try {
        os.close();
      } catch (IOException e) {
        Log.d("SAN", e.getMessage());
      }
    }
  }
 @Override
 public void read(short[] samples, int offset, int numSamples) {
   int read = 0;
   while (read != numSamples) {
     read += recorder.read(samples, offset + read, numSamples - read);
   }
 }