Exemple #1
0
  protected StampedAudio readMic() {
    StampedAudio stampedAudio = null;

    if (_mic != null) {
      int timestamp = getOutboundTimestamp();
      // read is blocking.
      int bufferRead = _mic.read(_micFramebuff, 0, _micFramebuff.length);
      Log.verb(
          this.getClass().getSimpleName()
              + ".readMic(): length="
              + _micFramebuff.length
              + ", bufferRead="
              + bufferRead);

      short[] sframe = _micFramebuff;
      // short[] seframe = effectIn(sframe);
      byte[] tbuff = _encoder.encode_frame(sframe);

      if (tbuff != null) {
        stampedAudio = _audio.getCleanStampedAudio();
        stampedAudio.setStampAndBytes(tbuff, 0, tbuff.length, timestamp);
        _audio.saveReadStampedAudio(stampedAudio);

        _countFrames++;
      }
    }
    return stampedAudio;
  }
Exemple #2
0
  public void run() {
    // android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    // _mic.startRecording();
    long then = getTime();
    long ftime = _audio.getFrameInterval();
    while (_me != null) {

      readMic();
      long now = getTime();
      long nexpected = (ftime * _countFrames) + then;
      long nap = (nexpected - now);
      if (_countFrames % 50 == 0) Log.debug("would sleep " + nap);
      nap = 1;
      try {
        Thread.sleep(nap);
      } catch (InterruptedException ex) {
        Log.verb(
            this.getClass().getSimpleName() + ".run(): InterruptedException: " + ex.getMessage());
      }
    }
  }
Exemple #3
0
 protected int getOutboundTimestamp() {
   return (int) _countFrames * _audio.getFrameInterval();
 }