コード例 #1
0
ファイル: Microphone.java プロジェクト: jyao6/respeakerApp
 /*
  * Write audio data into WAV file when recorder buffer overflows.
  */
 private void writeToFile() {
   int readCode = 0;
   byte[] audioData = new byte[RECORDER_BUFFER_SIZE];
   BufferedOutputStream outputStream = null;
   try {
     outputStream = new BufferedOutputStream(new FileOutputStream(filePath, true));
     while (isRecording) {
       readCode = recorder.read(audioData, 0, RECORDER_BUFFER_SIZE);
       if (readCode != AudioRecord.ERROR_INVALID_OPERATION) {
         try {
           outputStream.write(audioData);
           // increment fileSize by buffer size
           fileSize += RECORDER_BUFFER_SIZE;
         } catch (IOException e) {
           Log.e("Microphone", "Error writing audio data to file.");
         }
       }
     }
   } catch (FileNotFoundException e) {
     Log.e("Microphone", "Output file not found.");
   }
   if (outputStream != null) {
     try {
       outputStream.close();
     } catch (IOException e) {
       Log.e("Microphone", "Error closing audiowriter.");
     }
   }
 }
コード例 #2
0
    public void run() {
      super.run();
      ar.startRecording();
      // 用于读取的 buffer
      byte[] buffer = new byte[bs];
      isRun = true;
      while (isRun) {
        int r = ar.read(buffer, 0, bs);
        int v = 0;
        // 将 buffer 内容取出,进行平方和运算
        for (int i = 0; i < buffer.length; i++) {
          // 这里没有做运算的优化,为了更加清晰的展示代码
          v += buffer[i] * buffer[i];
          // v += (buffer[i] << 1);
        }
        // 平方和除以数据总长度,得到音量大小。可以获取白噪声值,然后对实际采样进行标准化。
        // 如果想利用这个数值进行操作,建议用 sendMessage 将其抛出,在 Handler 里进行处理。
        // Log.d("spl", String.valueOf(v / (float) r));
        value = v / (float) r;
        // value = (int) (Math.abs((int)(v /(float)r)/10000) >> 1);
        dB = 10 * Math.log10(v / (double) r);

        // MainActivity.SetText(value,dB);
        Log.d("Data:", String.valueOf(value) + "    " + String.valueOf(dB));
        handler.post(
            new Runnable() {
              @Override
              public void run() {
                SetText(value, dB);
              }
            });
      }
      ar.stop();
    }
コード例 #3
0
 @Override
 public void read(short[] samples, int offset, int numSamples) {
   int read = 0;
   while (read != numSamples) {
     read += recorder.read(samples, offset + read, numSamples - read);
   }
 }
コード例 #4
0
ファイル: AndroidAudioMic.java プロジェクト: e6/PhonoSDK
  protected StampedAudio readMic() {
    StampedAudio stampedAudio = null;

    if (_mic != null) {
      int timestamp = getOutboundTimestamp();
      // read is blocking.
      int bufferRead = _mic.read(_micFramebuff, 0, _micFramebuff.length);
      Log.verb(
          this.getClass().getSimpleName()
              + ".readMic(): length="
              + _micFramebuff.length
              + ", bufferRead="
              + bufferRead);

      short[] sframe = _micFramebuff;
      // short[] seframe = effectIn(sframe);
      byte[] tbuff = _encoder.encode_frame(sframe);

      if (tbuff != null) {
        stampedAudio = _audio.getCleanStampedAudio();
        stampedAudio.setStampAndBytes(tbuff, 0, tbuff.length, timestamp);
        _audio.saveReadStampedAudio(stampedAudio);

        _countFrames++;
      }
    }
    return stampedAudio;
  }
コード例 #5
0
 private void analyze() {
   for (int i = 0; i < samplingRates.length; i++) {
     int minSize =
         AudioRecord.getMinBufferSize(
             samplingRates[i],
             AudioFormat.CHANNEL_CONFIGURATION_MONO,
             AudioFormat.ENCODING_PCM_16BIT); // 获取允许的最小缓冲区大小
     AudioRecord ar =
         new AudioRecord(
             MediaRecorder.AudioSource.MIC,
             samplingRates[i],
             AudioFormat.CHANNEL_CONFIGURATION_MONO,
             AudioFormat.ENCODING_PCM_16BIT,
             minSize);
     if (ar.getState() == AudioRecord.STATE_INITIALIZED) {
       short[] buff = new short[minSize];
       ar.startRecording();
       while (recording) {
         ar.read(buff, 0, minSize); // 将音频数据从硬件读入缓冲区内
         for (short s : buff) {
           if (Math.abs(s) > minVolume) { // 当该平率的音量超过阈值时,向handler发送一条message
             handler.sendEmptyMessage(0);
           }
         }
       }
       ar.stop();
       i = samplingRates.length;
     }
     ar.release();
     ar = null;
   }
 }
コード例 #6
0
 public void onPeriodicNotification(AudioRecord recorder) {
   audioRecorder.read(buffer, 0, buffer.length); // Fill buffer
   try {
     randomAccessWriter.write(buffer); // Write buffer to file
     payloadSize += buffer.length;
     if (bSamples == 16) {
       for (int i = 0; i < buffer.length / 2; i++) { // 16bit sample size
         short curSample = getShort(buffer[i * 2], buffer[i * 2 + 1]);
         if (curSample > cAmplitude) { // Check amplitude
           cAmplitude = curSample;
         }
       }
     } else { // 8bit sample size
       for (int i = 0; i < buffer.length; i++) {
         if (buffer[i] > cAmplitude) { // Check amplitude
           cAmplitude = buffer[i];
         }
       }
     }
   } catch (IOException e) {
     Log.e(
         ExtAudioRecorder.class.getName(),
         "Error occured in updateListener, recording is aborted");
     // stop();
   }
 }
コード例 #7
0
  public void run() {
    // start the encoder
    encoder.startEncoding();
    try {
      audioRecord.startRecording();
    } catch (IllegalStateException e) {
      this.isRecording = false;
      return;
    }

    this.isRecording = true;
    while (isRecording) {
      bufferRead = audioRecord.read(samples, 0, bufferSize);
      if (bufferRead > 0) {
        // add the data to the encoder
        encoder.addData(samples, bufferRead);
      }
      try {
        Thread.sleep(20);
      } catch (InterruptedException e) {
        e.printStackTrace();
      }
    }
    audioRecord.stop();
    encoder.stopEncoding();
  }
コード例 #8
0
  public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);

    byte[] buffer = new byte[BUFFER_SIZE];
    DatagramPacket packet;

    try {
      int minBuf = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
      recorder =
          new AudioRecord(
              MediaRecorder.AudioSource.MIC, sampleRate, channelConfig, audioFormat, minBuf);
      recorder.startRecording();
    } catch (Exception ex) {
      // TODO: handle this exception
    }

    while (recordAudio) {
      try {
        recorder.read(buffer, 0, buffer.length);

        packet = new DatagramPacket(buffer, buffer.length, InetAddress.getByName(_server), _port);
        socket.send(packet);
      } catch (Exception ex) {
        // TODO: handle this exception
      }
    }

    buffer = null;
  }
コード例 #9
0
  @Override
  protected Void doInBackground(Void... params) {

    int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);

    AudioRecord audioRecord =
        new AudioRecord(
            controller.getAudioSource(),
            frequency,
            channelConfiguration,
            audioEncoding,
            bufferSize);

    try {

      short[] buffer = new short[blockSize];

      audioRecord.startRecording();

      while (controller.isStarted()) {
        int bufferReadSize = audioRecord.read(buffer, 0, blockSize);

        DataBlock dataBlock = new DataBlock(buffer, blockSize, bufferReadSize);

        blockingQueue.put(dataBlock);
      }

    } catch (Throwable t) {
      // Log.e("AudioRecord", "Recording Failed");
    }

    audioRecord.stop();

    return null;
  }
コード例 #10
0
  private void writeAudioDataToFile() {
    byte data[] = new byte[bufferSize];
    String filename = getTempFilename();
    FileOutputStream os = null;

    try {
      os = new FileOutputStream(filename);
    } catch (FileNotFoundException e) {
      // TODO Auto-generated catch block
      Log.d("SAN", e.getMessage());
    }

    int read = 0;

    if (null != os) {
      while (isRecording) {
        read = recorder.read(data, 0, bufferSize);

        if (AudioRecord.ERROR_INVALID_OPERATION != read) {
          try {
            os.write(data);
          } catch (IOException e) {
            Log.d("SAN", e.getMessage());
          }
        }
      }

      try {
        os.close();
      } catch (IOException e) {
        Log.d("SAN", e.getMessage());
      }
    }
  }
コード例 #11
0
  public void run() {

    synchronized (this) {
      if (!mPlay || mParams == null) {
        return;
      }
      mRunning = true;
    }

    int trackBufSize =
        android.media.AudioTrack.getMinBufferSize(mParams.rate, mParams.outch, mParams.encoding);
    AudioTrack track =
        new AudioTrack(
            AudioManager.STREAM_VOICE_CALL,
            mParams.rate,
            mParams.outch,
            mParams.encoding,
            trackBufSize,
            AudioTrack.MODE_STREAM);
    track.play();
    appendLogTextView(mHandler, "play playback ok.\n");

    if (mParams.source == AUDIO_SOURCE_WAVE_FILE) {
      // Log.d("sakalog", "play " + mParams.wavefile);
      // WaveDataInputStream wavein = new WaveDataInputStream(mParams.wavefile);
      Log.d("sakalog", "play " + mResources.getResourceName(mParams.waveid));
      WaveDataInputStream wavein = new WaveDataInputStream(mParams.waveid);
      byte[] wave = new byte[512];
      int size = wavein.read(wave, wave.length);
      track.write(wave, 0, size);
      while (mPlay && ((size = wavein.read(wave, wave.length)) > -1)) {
        track.write(wave, 0, size);
      }
      wavein.close();
    } else {
      Log.d("sakalog", "play ");
      AudioRecord record = startMicRecording(mParams.rate, mParams.inch, mParams.encoding);
      byte[] buf = new byte[trackBufSize];
      while (mPlay) {
        int size = record.read(buf, 0, buf.length);
        track.write(buf, 0, size);
      }
      stopMicRecording(record);
    }

    track.stop();
    track.release();
    appendLogTextView(mHandler, "stop playback ok.\n");

    synchronized (this) {
      mPlay = false;
      mRunning = false;
    }
  }
コード例 #12
0
 private boolean readData(byte[] buffer, final int bytesToRead) {
   int bytesRead = 0;
   int missingBytes = bytesToRead;
   while (running.get()) {
     int result = audioRecord.read(buffer, bytesRead, missingBytes);
     if (result < 0) break;
     bytesRead += result;
     missingBytes -= result;
     if (missingBytes == 0) return true;
   }
   return false;
 }
コード例 #13
0
  @Override
  public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    Modulation.initDecoder(); // init demodulator
    Modulation.initProcess();
    recdata = new short[recBufSize / 2];

    while (!Thread.interrupted() && !mStopRecording) {
      if (recorder == null) {
        break;
      }
      nread = recorder.read(recdata, 0, recBufSize / 2);

      if (nread > 0) {
        retval = Modulation.process(recdata, nread);
        if (retval == 2) {
          String str = "";
          byte[] result = Modulation.getResult();
          try {
            str = new String(result, "UTF-8");
          } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
          }

          if (mStopRecording) {
            continue;
          }

          Message msg = mHandler.obtainMessage();
          msg.what = mEventId;
          msg.obj = str;

          mHandler.sendMessage(msg);
          try {
            // when receive a message, sleep a little while;
            // so the main thread has a chance to stop recording immediately
            Thread.sleep(200);
          } catch (InterruptedException e) {
            continue;
          }
        }
      }
    }
    try {
      recorder.stop();
      recorder.release();
    } catch (Exception e) {
      e.printStackTrace();
    }
    recorder = null;
    Modulation.releaseDecoder();
  }
コード例 #14
0
 @Override
 public void run() {
   while (state == AcommsInterface.RUNNING) {
     int size = audioRecord.read(readBuffer, 0, readBuffer.length);
     if (AudioRecord.ERROR_INVALID_OPERATION != size) {
       audio_state = AcommsInterface.RUNNING;
       short[] datas = new short[size];
       System.arraycopy(readBuffer, 0, datas, 0, size);
       acomms.writeRecog(datas);
     } else {
       audio_state = AcommsInterface.INITIALLING_FAILE;
     }
   }
 }
コード例 #15
0
ファイル: AudioSender.java プロジェクト: cwt8805/MonitorS09
 private void StartSendAudio() {
   // 8Khz 16bit 采样, iLBC 20ms encoder/decoder; iLbcEncSize=320byte
   int bytesRecord = 0, bytesEncoded = 0;
   long t_capture = 0;
   long t_process = 0;
   long delay = 0;
   short[] ecBuffer = new short[iLbcEncSize / 2];
   short[] outBuffer = new short[iLbcEncSize / 2];
   byte[] pcmOut = new byte[iLbcEncSize];
   byte[] samples = new byte[iLbcEncSize];
   byte[] data = new byte[iLbcDecsize];
   while (isRecording == true) {
     // gp 有新生成文件标记时,生成新文件。
     if (newFileFLag == true) {
       newFileFLag = false;
       new File(soundFNameString);
       try {
         braf = new RandomAccessFile(soundFNameString, "rw");
       } catch (FileNotFoundException e) {
         e.printStackTrace();
       }
     }
     if (braf == null) // null时,什么也不做但是循环继续
     continue;
     t_capture = (new Date()).getTime();
     bytesRecord = audioRecord.read(samples, 0, iLbcEncSize);
     ByteBuffer.wrap(samples).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(ecBuffer);
     t_process = (new Date()).getTime();
     delay = (t_process - t_capture) + m_aecm.recvDelay; // 在类中多定义了一个变量
     try {
       m_aecm.echoCancellation(
           ecBuffer, null, outBuffer, (short) (iLbcEncSize / 2), (short) (delay + 40));
     } catch (Exception e1) {
       e1.printStackTrace();
     }
     if (AudioRecord.ERROR_INVALID_OPERATION != bytesRecord) {
       ByteBuffer.wrap(pcmOut).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(outBuffer);
       bytesEncoded = AudioCodec.iLbcencode(pcmOut, 0, bytesRecord, data, 0);
       try {
         braf.write(data, 0, bytesEncoded);
       } catch (IOException e) {
         e.printStackTrace();
       }
     }
     AudioCodec.iMVRSleepMS(10);
   }
   braf = null;
   soundFNameString = null;
   newFileFLag = false;
 }
コード例 #16
0
  @Override
  public void run() {
    // dane z mikrofonu w formacie pcm
    short data[];

    // rozmiar bufora
    int bufferSize;

    // ilosc przeciec z osia czasu
    int zeroCrossings;

    bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, ENCODING);
    recorder =
        new AudioRecord(
            AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, ENCODING, bufferSize);

    data = new short[bufferSize];

    recording = true;

    while (recording) {
      // sprawdzanie, czy nagrywanie zainicjowane
      if (recorder.getState() == android.media.AudioRecord.STATE_INITIALIZED) {
        // sprawdzanie, czy nagrywanie jest w toku
        if (recorder.getRecordingState() == android.media.AudioRecord.RECORDSTATE_STOPPED)
          recorder.startRecording();
        // gdy nagrywanie jest w toku
        else {
          // czytanie danych z mikrofonu
          recorder.read(data, 0, bufferSize);

          zeroCrossings = 0;

          // zliczanie przeciec z osia czasu
          for (int i = 0; i < bufferSize - 1; i++) {
            if (data[i] > 0 && data[i + 1] <= 0) zeroCrossings++;
            if (data[i] < 0 && data[i + 1] >= 0) zeroCrossings++;
          }

          frequency = SAMPLE_RATE * zeroCrossings / bufferSize;
        }
      }
    }
    // przerwanie nagrywania
    if (recorder.getState() == android.media.AudioRecord.RECORDSTATE_RECORDING) recorder.stop();

    recorder.release();
    recorder = null;
  }
コード例 #17
0
 /** Starts the recording, and sets the state to RECORDING. Call after prepare(). */
 public void start() {
   if (state == State.READY) {
     if (rUncompressed) {
       payloadSize = 0;
       audioRecorder.startRecording();
       audioRecorder.read(buffer, 0, buffer.length);
     } else {
       mediaRecorder.start();
     }
     state = State.RECORDING;
   } else {
     Log.e(ExtAudioRecorder.class.getName(), "start() called on illegal state");
     state = State.ERROR;
   }
 }
コード例 #18
0
 /** Starts the recording, and sets the state to RECORDING. Call after prepare(). */
 public void start() {
   if (state == State.READY) {
     payloadSize = 0;
     audioRecorder.startRecording();
     audioRecorder.read(buffer, 0, buffer.length); // [TODO: is this
     // necessary]read
     // the existing data
     // in audio
     // hardware, but
     // don't do anything
     state = State.RECORDING;
   } else {
     Log.e(WavAudioRecorder.class.getName(), "start() called on illegal state");
     state = State.ERROR;
   }
 }
コード例 #19
0
    /**
     * Reads media data from this <tt>PullBufferStream</tt> into a specific <tt>Buffer</tt> with
     * blocking.
     *
     * @param buffer the <tt>Buffer</tt> in which media data is to be read from this
     *     <tt>PullBufferStream</tt>
     * @throws IOException if anything goes wrong while reading media data from this
     *     <tt>PullBufferStream</tt> into the specified <tt>buffer</tt>
     * @see javax.media.protocol.PullBufferStream#read(javax.media.Buffer)
     */
    public void read(Buffer buffer) throws IOException {
      if (setThreadPriority) {
        setThreadPriority = false;
        setThreadPriority();
      }

      Object data = buffer.getData();
      int length = this.length;

      if (data instanceof byte[]) {
        if (((byte[]) data).length < length) data = null;
      } else data = null;
      if (data == null) {
        data = new byte[length];
        buffer.setData(data);
      }

      int toRead = length;
      byte[] bytes = (byte[]) data;
      int offset = 0;

      buffer.setLength(0);
      while (toRead > 0) {
        int read;

        synchronized (this) {
          if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING)
            read = audioRecord.read(bytes, offset, toRead);
          else break;
        }

        if (read < 0) {
          throw new IOException(
              AudioRecord.class.getName() + "#read(byte[], int, int) returned " + read);
        } else {
          buffer.setLength(buffer.getLength() + read);
          offset += read;
          toRead -= read;
        }
      }
      buffer.setOffset(0);

      // Apply software gain.
      if (gainControl != null) {
        BasicVolumeControl.applyGain(gainControl, bytes, buffer.getOffset(), buffer.getLength());
      }
    }
コード例 #20
0
  private void listenAck() {
    Modulation.initDecoder(); // for record
    Modulation.setListenMode('p');

    mRecordData = new short[mRecordBufSize / 2];

    while (!Thread.interrupted() && mIsPlaying) {

      int readBytes = mRecorder.read(mRecordData, 0, mRecordBufSize / 2);

      if (readBytes > 0) {
        int ret = Modulation.process(mRecordData, readBytes);
        if (ret == 2) {
          String str = "";
          byte[] result = Modulation.getResult();
          try {
            str = new String(result, "UTF-8");

            Log.d(TAG, "recorder ProcessData GetResult : " + str);
          } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
          }

          Message msg = mHandler.obtainMessage();
          msg.what = Modulation.MODULATION_HANDLER_RECV_NOTIFY_ACK;
          mHandler.sendMessage(msg);
          // break;
        }
      } else {
        Log.d(TAG, "recorder read ret : " + readBytes);
      }
    }

    try {
      if (mRecorder != null) {
        mRecorder.stop();
        mRecorder.release();
        mRecorder = null;
      }
    } catch (Exception e) {
      e.printStackTrace();
    }

    Modulation.setListenMode('r');
    Modulation.releaseDecoder();
  }
コード例 #21
0
ファイル: Client.java プロジェクト: uttp/SocketTcp
 public void run() {
   init();
   Log.i("c", "2");
   audioRecord.startRecording();
   while (isRecording) {
     Log.i("c", "3");
     int readLen = audioRecord.read(bufSend, 0, bufferSizeInBytes);
     try {
       sout.write(bufSend, 0, readLen);
       sout.flush();
     } catch (IOException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
   }
   audioRecord.stop();
 }
コード例 #22
0
ファイル: VoiceRecorder.java プロジェクト: yonosuke/daily
 @Override
 public void run() {
   startRecording();
   startRecordTime = System.currentTimeMillis();
   byteStream = new ByteArrayOutputStream();
   byte[] buffer = new byte[bufferSize];
   while (recordFlag && System.currentTimeMillis() - startRecordTime < MAX_RECORD_TIME) {
     try {
       Thread.sleep(1);
       audioRecord.read(buffer, 0, buffer.length);
       byteStream.write(buffer);
     } catch (InterruptedException e) {
       e.printStackTrace();
     } catch (IOException e) {
       e.printStackTrace();
     }
   }
   stopRecording();
 }
コード例 #23
0
  private void startListening() {

    int recordedDataSize = (int) (SAMPLE_FREQ * REC_LENGTH_S);
    fft = new FloatFFT_1D(recordedDataSize);
    mRecordedData = new short[recordedDataSize];

    int bufferSize =
        AudioRecord.getMinBufferSize(
            SAMPLE_FREQ, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);

    short[] readAudioData = new short[bufferSize];

    AudioRecord audioRecord =
        new AudioRecord(
            MediaRecorder.AudioSource.MIC,
            SAMPLE_FREQ,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSize);

    audioRecord.startRecording();

    int indexInRecordedData = 0;
    while (mIsListening) {
      int readDataSize = audioRecord.read(readAudioData, 0, bufferSize);
      for (int i = 0; i < readDataSize; i++) {

        if (readAudioData[i] >= AMPLITUDE_DETECTOR && !mIsProcessing) mIsRecording = true;

        if (mIsRecording == true && indexInRecordedData < recordedDataSize) {
          mRecordedData[indexInRecordedData++] = readAudioData[i];
        } else if (indexInRecordedData >= recordedDataSize) {
          mIsRecording = false;
          indexInRecordedData = 0;
          processRecordedData();
        }
      }
    }

    audioRecord.stop();
    audioRecord.release();
  }
コード例 #24
0
ファイル: WaveRecorder.java プロジェクト: ajburma/Applied-DSP
  public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

    try {
      if (recorder != null) {
        recorder.stop();
        recorder.startRecording();
        isRecording.set(true);
      }
    } catch (IllegalStateException e) {
      isRecording.set(false);
    }

    try {
      loop:
      while (true) {
        if (isRecording.get()) {
          abuf_short = new short[Settings.blockSize];
          recorder.read(abuf_short, 0, Settings.blockSize);
          output.put(new WaveFrame(abuf_short, currentFrame));
          currentFrame++;
        } else {
          output.put(Settings.STOP);
          break loop;
        }
      }
    } catch (InterruptedException e) {
      Thread.currentThread().interrupt();
      e.printStackTrace();
    }

    try {
      if (recorder != null) {
        recorder.stop();
        recorder.release();
        recorder = null;
      }
    } catch (IllegalStateException e) {
      e.printStackTrace();
    }
  }
コード例 #25
0
  public void run() {
    try {
      byte[] bytes_pkg;
      m_in_rec.startRecording();
      while (m_keep_running) {
        m_in_rec.read(m_in_bytes, 0, m_in_buf_size);
        bytes_pkg = m_in_bytes.clone();
        if (m_in_q.size() >= 2) {
          dout.write(m_in_q.removeFirst(), 0, m_in_q.removeFirst().length);
        }
        m_in_q.add(bytes_pkg);
      }
      m_in_rec.stop();
      m_in_rec = null;
      m_in_bytes = null;
      dout.close();

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
コード例 #26
0
 // periodic updates on the progress of the record head
 public void onPeriodicNotification(AudioRecord recorder) {
   if (State.STOPPED == state) {
     Log.d(WavAudioRecorder.this.getClass().getName(), "recorder stopped");
     return;
   }
   int numOfBytes = audioRecorder.read(buffer, 0, buffer.length); // read
   // audio
   // data
   // to
   // buffer
   // Log.d(WavAudioRecorder.this.getClass().getName(), state + ":" +
   // numOfBytes);
   try {
     randomAccessWriter.write(buffer); // write audio data to file
     payloadSize += buffer.length;
   } catch (IOException e) {
     Log.e(
         WavAudioRecorder.class.getName(),
         "Error occured in updateListener, recording is aborted");
     e.printStackTrace();
   }
 }
コード例 #27
0
ファイル: TunerEngine.java プロジェクト: konecnyna/RickGPS
  //    long l;
  public void run() { // fft

    targetDataLine_.startRecording();
    bufferRead = new byte[READ_BUFFERSIZE];
    int n = -1;
    while ((n = targetDataLine_.read(bufferRead, 0, READ_BUFFERSIZE)) > 0) {
      //            l = System.currentTimeMillis();
      currentFrequency = processSampleData(bufferRead, SAMPLE_RATE);
      double temp = System.currentTimeMillis() - 1;
      Log.d("processtime", "process time  = " + temp);
      if (currentFrequency > 0) {
        mHandler.post(callback);
        try {
          targetDataLine_.stop();
          Thread.sleep(20);
          targetDataLine_.startRecording();
        } catch (InterruptedException e) {
          //                    e.printStackTrace();
        }
      }
    }
  }
コード例 #28
0
ファイル: Blackout.java プロジェクト: Cpotr/HowTu
  /** Functionality that gets the sound level out of the sample */
  private void readAudioBuffer() {

    try {
      short[] buffer = new short[bufferSize];

      int bufferReadResult = 1;

      if (audio != null) {

        // Sense the voice...
        bufferReadResult = audio.read(buffer, 0, bufferSize);
        double sumLevel = 0;
        for (int i = 0; i < bufferReadResult; i++) {
          sumLevel += buffer[i];
        }
        lastLevel = Math.abs((sumLevel / bufferReadResult));
      }

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
コード例 #29
0
    @Override
    protected Void doInBackground(Void... params) {
      try {
        int bufferSize =
            AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);

        AudioRecord audioRecord =
            new AudioRecord(
                MediaRecorder.AudioSource.MIC,
                frequency,
                channelConfiguration,
                audioEncoding,
                bufferSize);

        short[] buffer = new short[blockSize];
        double[] toTransform = new double[blockSize];

        audioRecord.startRecording();

        while (recordTask != null) {
          int bufferReadResult = audioRecord.read(buffer, 0, blockSize);

          for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
            toTransform[i] = buffer[i] / 32768.0; // signed
            // 16bit
          }

          transformer.ft(toTransform);
          publishProgress(toTransform);
        }

        audioRecord.stop();
      } catch (Throwable t) {
        Log.e("AudioRecord", "Recording Failed");
      }

      return null;
    }
コード例 #30
0
  public byte[] getFrameBytes() {
    audioRecord.read(buffer, 0, frameByteSize);

    // analyze sound
    int totalAbsValue = 0;
    short sample = 0;
    float averageAbsValue = 0.0f;

    for (int i = 0; i < frameByteSize; i += 2) {
      sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
      totalAbsValue += Math.abs(sample);
    }
    averageAbsValue = totalAbsValue / frameByteSize / 2;

    // System.out.println(averageAbsValue);

    // no input
    if (averageAbsValue < 30) {
      return null;
    }

    return buffer;
  }