예제 #1
0
  public void pauseAudio() {
    if (mAudioTrack == null) return;

    mAudioTrack.pause();
    mAudioTrack.flush();
    reqThreadrunning = false;
  }
예제 #2
0
 public void writeAudio(ByteBuffer audioData, int offset, int len) {
   if (mAudioTrack == null) return;
   if (!reqThreadrunning) return;
   audioData.position(offset);
   audioData.get(mAudioData, 0, len);
   if (sync++ % 128 == 0) mAudioTrack.flush();
   mAudioTrack.write(mAudioData, 0, len);
 }
예제 #3
0
  public void terminateAudio() {
    mAudioTrack.flush();
    mAudioTrack.release();

    mAudioTrack = null;

    reqThreadrunning = false;

    stpe.shutdown();
    stpe = null;
  }
예제 #4
0
  public void initAudio(int size) {
    if (mAudioTrack != null) return;
    size /= 8;

    mAudioData = new byte[size];
    int sampleFreq = 44100;

    int bufferSize =
        Math.max(
            size,
            AudioTrack.getMinBufferSize(
                sampleFreq,
                AudioFormat.CHANNEL_CONFIGURATION_STEREO,
                AudioFormat.ENCODING_PCM_16BIT));
    mAudioTrack =
        new QuakeGVRAudioTrack(
            AudioManager.STREAM_MUSIC,
            sampleFreq,
            AudioFormat.CHANNEL_CONFIGURATION_STEREO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSize,
            AudioTrack.MODE_STREAM);
    mAudioTrack.play();
    long sleeptime = (size * 1000000000l) / (2 * 2 * sampleFreq);
    stpe = new ScheduledThreadPoolExecutor(5);
    stpe.scheduleAtFixedRate(
        new Runnable() {
          @Override
          public void run() {
            if (reqThreadrunning) {
              GLES3JNILib.requestAudioData();
            }
          }
        },
        0,
        sleeptime,
        TimeUnit.NANOSECONDS);
  }
예제 #5
0
  public void resumeAudio() {
    if (mAudioTrack == null) return;

    mAudioTrack.play();
    reqThreadrunning = true;
  }