Exemple #1
0
  /** Signals that a PooledThread has started. Creates the Thread's line and buffer. */
  protected void threadStarted() {
    // wait for the SoundManager constructor to finish
    synchronized (this) {
      try {
        wait();
      } catch (InterruptedException ex) {
      }
    }

    // use a short, 100ms (1/10th sec) buffer for filters that
    // change in real-time
    int bufferSize =
        playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10);

    // create, open, and start the line
    SourceDataLine line;
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
    try {
      line = (SourceDataLine) AudioSystem.getLine(lineInfo);
      line.open(playbackFormat, bufferSize);
    } catch (LineUnavailableException ex) {
      // the line is unavailable - signal to end this thread
      Thread.currentThread().interrupt();
      return;
    }

    line.start();

    // create the buffer
    byte[] buffer = new byte[bufferSize];

    // set this thread's locals
    localLine.set(line);
    localBuffer.set(buffer);
  }
Exemple #2
0
 public void play(InputStream source) {
   int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
   byte[] buffer = new byte[bufferSize];
   SourceDataLine line;
   try {
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
     line = (SourceDataLine) AudioSystem.getLine(info);
     line.open(format, bufferSize);
   } catch (LineUnavailableException e) {
     e.printStackTrace();
     return;
   }
   line.start();
   try {
     int numBytesRead = 0;
     while (numBytesRead != -1) {
       numBytesRead = source.read(buffer, 0, buffer.length);
       if (numBytesRead != -1) line.write(buffer, 0, numBytesRead);
     }
   } catch (IOException e) {
     e.printStackTrace();
   }
   line.drain();
   line.close();
 }
Exemple #3
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
Exemple #4
0
 public byte[] getSamples(AudioInputStream stream) {
   int length = (int) (stream.getFrameLength() * format.getFrameSize());
   byte[] samples = new byte[length];
   DataInputStream in = new DataInputStream(stream);
   try {
     in.readFully(samples);
   } catch (IOException e) {
     e.printStackTrace();
   }
   return samples;
 }
Exemple #5
0
  // 取得音频样本
  private byte[] getAudioSamples(AudioInputStream MusicStream, AudioFormat format) {
    int AudioSampleLengh = (int) (MusicStream.getFrameLength() * format.getFrameSize());
    byte aAudioSamples[] = new byte[AudioSampleLengh];
    DataInputStream dataInputStream = new DataInputStream(MusicStream);

    try {
      dataInputStream.readFully(aAudioSamples);
    } catch (Exception e) {
      e.printStackTrace();
    }

    return aAudioSamples;
  }
  /**
   * Constructs an audio input stream that reads its data from the target data line indicated. The
   * format of the stream is the same as that of the target data line, and the length is
   * AudioSystem#NOT_SPECIFIED.
   *
   * @param line the target data line from which this stream obtains its data.
   * @see AudioSystem#NOT_SPECIFIED
   */
  public AudioInputStream(TargetDataLine line) {

    TargetDataLineInputStream tstream = new TargetDataLineInputStream(line);
    format = line.getFormat();
    frameLength = AudioSystem.NOT_SPECIFIED;
    frameSize = format.getFrameSize();

    if (frameSize == AudioSystem.NOT_SPECIFIED || frameSize <= 0) {
      frameSize = 1;
    }
    this.stream = tstream;
    framePos = 0;
    markpos = 0;
  }
Exemple #7
0
 private void soundAbspielen(File sound) {
   if (!läuft) return;
   try {
     audioInputStream = AudioSystem.getAudioInputStream(sound);
     af = audioInputStream.getFormat();
     size = (int) (af.getFrameSize() * audioInputStream.getFrameLength());
     audio = new byte[size];
     info = new DataLine.Info(Clip.class, af, size);
     audioInputStream.read(audio, 0, size);
     clip = (Clip) AudioSystem.getLine(info);
     clip.open(af, audio, 0, size);
     clip.start();
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
Exemple #8
0
  public boolean load(File file) {

    this.file = file;

    if (file != null && file.isFile()) {
      try {
        errStr = null;
        audioInputStream = AudioSystem.getAudioInputStream(file);

        fileName = file.getName();

        format = audioInputStream.getFormat();

      } catch (Exception ex) {
        reportStatus(ex.toString());
        return false;
      }
    } else {
      reportStatus("Audio file required.");
      return false;
    }

    numChannels = format.getChannels();
    sampleRate = (double) format.getSampleRate();
    sampleBitSize = format.getSampleSizeInBits();
    long frameLength = audioInputStream.getFrameLength();
    long milliseconds = (long) ((frameLength * 1000) / audioInputStream.getFormat().getFrameRate());
    double audioFileDuration = milliseconds / 1000.0;

    if (audioFileDuration > MAX_AUDIO_DURATION) duration = MAX_AUDIO_DURATION;
    else duration = audioFileDuration;

    frameLength = (int) Math.floor((duration / audioFileDuration) * (double) frameLength);

    try {
      audioBytes = new byte[(int) frameLength * format.getFrameSize()];
      audioInputStream.read(audioBytes);
    } catch (Exception ex) {
      reportStatus(ex.toString());
      return false;
    }

    getAudioData();

    return true;
  }
  /**
   * Constructs an audio input stream that has the requested format and length in sample frames,
   * using audio data from the specified input stream.
   *
   * @param stream the stream on which this <code>AudioInputStream</code> object is based
   * @param format the format of this stream's audio data
   * @param length the length in sample frames of the data in this stream
   */
  public AudioInputStream(InputStream stream, AudioFormat format, long length) {

    super();

    this.format = format;
    this.frameLength = length;
    this.frameSize = format.getFrameSize();

    // any frameSize that is not well-defined will
    // cause that this stream will be read in bytes
    if (this.frameSize == AudioSystem.NOT_SPECIFIED || frameSize <= 0) {
      this.frameSize = 1;
    }

    this.stream = stream;
    framePos = 0;
    markpos = 0;
  }
Exemple #10
0
  // 播放au,aiff,wav音乐流, 这个函数基本完全为帖子上的代码
  private synchronized void play() {
    ByteArrayInputStream aMusicInputStream;
    AudioFormat format;
    AudioInputStream musicInputStream;
    byte[] audioSamples;
    SourceDataLine line;
    try {
      File MusicFile = new File(m_filename);

      musicInputStream = AudioSystem.getAudioInputStream(MusicFile); // 取得文件的音频输入流
      format = musicInputStream.getFormat(); // 取得音频输入流的格式
      audioSamples = getAudioSamples(musicInputStream, format); // 取得音频样本

      aMusicInputStream = new ByteArrayInputStream(audioSamples);
      int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
      byte[] buffer = new byte[bufferSize];
      try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
      } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
      }

      if (!line.isRunning()) {
        line.start();
      }

      int numBytesRead = 0;
      while (numBytesRead != -1 && !m_stopped) {
        numBytesRead = aMusicInputStream.read(buffer, 0, buffer.length);
        if (numBytesRead != -1) {
          line.write(buffer, 0, numBytesRead);
        }
      }
      line.drain();
      line.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
Exemple #11
0
  public static void main(String[] args) throws Exception {
    float[] testarray = new float[1024];
    for (int i = 0; i < 1024; i++) {
      double ii = i / 1024.0;
      ii = ii * ii;
      testarray[i] = (float) Math.sin(10 * ii * 2 * Math.PI);
      testarray[i] += (float) Math.sin(1.731 + 2 * ii * 2 * Math.PI);
      testarray[i] += (float) Math.sin(0.231 + 6.3 * ii * 2 * Math.PI);
      testarray[i] *= 0.3;
    }

    // Check conversion using PCM_FLOAT
    for (int big = 0; big < 2; big += 1)
      for (int bits = 32; bits <= 64; bits += 32) {
        AudioFormat frm =
            new AudioFormat(
                AudioFloatConverter.PCM_FLOAT, 44100, bits, 1, bits / 8, 44100, big == 1);
        byte[] buff = new byte[testarray.length * frm.getFrameSize()];
        float[] testarray2 = new float[testarray.length];
        AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
        conv.toByteArray(testarray, buff);
        conv.toFloatArray(buff, testarray2);
        for (int i = 0; i < testarray2.length; i++) {
          if (Math.abs(testarray[i] - testarray2[i]) > 0.05)
            throw new RuntimeException(
                "Conversion failed for " + frm + " , arrays not equal enough!\n");
        }
      }

    // Check conversion from float2byte and byte2float.
    for (int big = 0; big < 2; big += 1)
      for (int signed = 0; signed < 2; signed += 1)
        for (int bits = 6; bits <= 40; bits += 2) {
          AudioFormat frm = new AudioFormat(44100, bits, 1, signed == 1, big == 1);
          byte[] buff = new byte[testarray.length * frm.getFrameSize()];
          float[] testarray2 = new float[testarray.length];
          AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
          conv.toByteArray(testarray, buff);
          conv.toFloatArray(buff, testarray2);
          for (int i = 0; i < testarray2.length; i++) {
            if (Math.abs(testarray[i] - testarray2[i]) > 0.05)
              throw new RuntimeException(
                  "Conversion failed for " + frm + " , arrays not equal enough!\n");
          }
        }

    // Check big/little
    for (int big = 0; big < 2; big += 1)
      for (int signed = 0; signed < 2; signed += 1)
        for (int bits = 6; bits <= 40; bits += 2) {
          AudioFormat frm = new AudioFormat(44100, bits, 1, signed == 1, big == 1);
          byte[] buff = new byte[testarray.length * frm.getFrameSize()];
          AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
          conv.toByteArray(testarray, buff);
          byte[] buff2 = new byte[testarray.length * frm.getFrameSize()];
          int fs = frm.getFrameSize();
          for (int i = 0; i < buff2.length; i += fs) {
            for (int j = 0; j < fs; j++) {
              buff2[i + (fs - j - 1)] = buff[i + j];
            }
          }
          float[] testarray2 = new float[testarray.length];
          AudioFormat frm2 = new AudioFormat(44100, bits, 1, signed == 1, big == 0);
          AudioFloatConverter.getConverter(frm2).toFloatArray(buff2, testarray2);
          for (int i = 0; i < testarray2.length; i++) {
            if (Math.abs(testarray[i] - testarray2[i]) > 0.05) {
              throw new RuntimeException(
                  "Conversion failed for " + frm + " to " + frm2 + " , arrays not equal enough!\n");
            }
          }
        }

    // Check signed/unsigned
    for (int big = 0; big < 2; big += 1)
      for (int signed = 0; signed < 2; signed += 1)
        for (int bits = 6; bits <= 40; bits += 2) {
          AudioFormat frm = new AudioFormat(44100, bits, 1, signed == 1, big == 1);
          byte[] b = new byte[testarray.length * frm.getFrameSize()];
          AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
          conv.toByteArray(testarray, b);
          int fs = frm.getFrameSize();
          if (big == 1) {
            for (int i = 0; i < b.length; i += fs)
              b[i] = (b[i] >= 0) ? (byte) (0x80 | b[i]) : (byte) (0x7F & b[i]);
          } else {
            for (int i = (0 + fs - 1); i < b.length; i += fs)
              b[i] = (b[i] >= 0) ? (byte) (0x80 | b[i]) : (byte) (0x7F & b[i]);
          }
          float[] testarray2 = new float[testarray.length];
          AudioFormat frm2 = new AudioFormat(44100, bits, 1, signed == 0, big == 1);
          AudioFloatConverter.getConverter(frm2).toFloatArray(b, testarray2);
          for (int i = 0; i < testarray2.length; i++) {
            if (Math.abs(testarray[i] - testarray2[i]) > 0.05) {
              throw new RuntimeException(
                  "Conversion failed for " + frm + " to " + frm2 + " , arrays not equal enough!\n");
            }
          }
        }

    // Check if conversion 32->24, 24->16, 16->8 result in same float data
    AudioFormat frm = new AudioFormat(44100, 40, 1, true, true);
    byte[] b = new byte[testarray.length * frm.getFrameSize()];
    AudioFloatConverter.getConverter(frm).toByteArray(testarray, b);
    for (int bits = 6; bits <= 40; bits += 2) {
      AudioFormat frm2 = new AudioFormat(44100, bits, 1, true, true);
      byte[] b2 = new byte[testarray.length * frm2.getFrameSize()];
      int fs1 = frm.getFrameSize();
      int fs2 = frm2.getFrameSize();
      int ii = 0;
      for (int i = 0; i < b.length; i += fs1) for (int j = 0; j < fs2; j++) b2[ii++] = b[i + j];
      float[] testarray2 = new float[testarray.length];
      AudioFloatConverter.getConverter(frm2).toFloatArray(b2, testarray2);
      for (int i = 0; i < testarray2.length; i++) {
        if (Math.abs(testarray[i] - testarray2[i]) > 0.05) {
          throw new RuntimeException(
              "Conversion failed for " + frm + " to " + frm2 + " , arrays not equal enough!\n");
        }
      }
    }
  }