Ejemplo n.º 1
0
  /** Signals that a PooledThread has started. Creates the Thread's line and buffer. */
  protected void threadStarted() {
    // wait for the SoundManager constructor to finish
    synchronized (this) {
      try {
        wait();
      } catch (InterruptedException ex) {
      }
    }

    // use a short, 100ms (1/10th sec) buffer for filters that
    // change in real-time
    int bufferSize =
        playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10);

    // create, open, and start the line
    SourceDataLine line;
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
    try {
      line = (SourceDataLine) AudioSystem.getLine(lineInfo);
      line.open(playbackFormat, bufferSize);
    } catch (LineUnavailableException ex) {
      // the line is unavailable - signal to end this thread
      Thread.currentThread().interrupt();
      return;
    }

    line.start();

    // create the buffer
    byte[] buffer = new byte[bufferSize];

    // set this thread's locals
    localLine.set(line);
    localBuffer.set(buffer);
  }
Ejemplo n.º 2
0
 public void play(InputStream source) {
   int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
   byte[] buffer = new byte[bufferSize];
   SourceDataLine line;
   try {
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
     line = (SourceDataLine) AudioSystem.getLine(info);
     line.open(format, bufferSize);
   } catch (LineUnavailableException e) {
     e.printStackTrace();
     return;
   }
   line.start();
   try {
     int numBytesRead = 0;
     while (numBytesRead != -1) {
       numBytesRead = source.read(buffer, 0, buffer.length);
       if (numBytesRead != -1) line.write(buffer, 0, numBytesRead);
     }
   } catch (IOException e) {
     e.printStackTrace();
   }
   line.drain();
   line.close();
 }
Ejemplo n.º 3
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
Ejemplo n.º 4
0
  public Format[] getSupportedOutputFormats(Format in) {

    if (in == null) return new Format[] {new AudioFormat(AudioFormat.ULAW)};

    if (matches(in, inputFormats) == null) return new Format[1];

    if (!(in instanceof AudioFormat)) return new Format[] {new AudioFormat(AudioFormat.ULAW)};

    AudioFormat af = (AudioFormat) in;
    return new Format[] {
      new AudioFormat(
          AudioFormat.ULAW, af.getSampleRate(), af.getSampleSizeInBits(), af.getChannels())
    };
  }
Ejemplo n.º 5
0
  public boolean load(File file) {

    this.file = file;

    if (file != null && file.isFile()) {
      try {
        errStr = null;
        audioInputStream = AudioSystem.getAudioInputStream(file);

        fileName = file.getName();

        format = audioInputStream.getFormat();

      } catch (Exception ex) {
        reportStatus(ex.toString());
        return false;
      }
    } else {
      reportStatus("Audio file required.");
      return false;
    }

    numChannels = format.getChannels();
    sampleRate = (double) format.getSampleRate();
    sampleBitSize = format.getSampleSizeInBits();
    long frameLength = audioInputStream.getFrameLength();
    long milliseconds = (long) ((frameLength * 1000) / audioInputStream.getFormat().getFrameRate());
    double audioFileDuration = milliseconds / 1000.0;

    if (audioFileDuration > MAX_AUDIO_DURATION) duration = MAX_AUDIO_DURATION;
    else duration = audioFileDuration;

    frameLength = (int) Math.floor((duration / audioFileDuration) * (double) frameLength);

    try {
      audioBytes = new byte[(int) frameLength * format.getFrameSize()];
      audioInputStream.read(audioBytes);
    } catch (Exception ex) {
      reportStatus(ex.toString());
      return false;
    }

    getAudioData();

    return true;
  }
Ejemplo n.º 6
0
  protected Format[] getMatchingOutputFormats(Format in) {

    AudioFormat af = (AudioFormat) in;

    supportedOutputFormats =
        new AudioFormat[] {
          new AudioFormat(
              Constants.ALAW_RTP,
              af.getSampleRate(),
              8,
              1,
              Format.NOT_SPECIFIED,
              Format.NOT_SPECIFIED,
              8,
              Format.NOT_SPECIFIED,
              Format.byteArray)
        };
    return supportedOutputFormats;
  }
Ejemplo n.º 7
0
  private static WAVData readFromStream(AudioInputStream aIn)
      throws UnsupportedAudioFileException, IOException {
    ReadableByteChannel aChannel = Channels.newChannel(aIn);
    AudioFormat fmt = aIn.getFormat();
    int numChannels = fmt.getChannels();
    int bits = fmt.getSampleSizeInBits();
    int format = AL_FORMAT_MONO8;

    if ((bits == 8) && (numChannels == 1)) {
      format = AL_FORMAT_MONO8;
    } else if ((bits == 16) && (numChannels == 1)) {
      format = AL_FORMAT_MONO16;
    } else if ((bits == 8) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO8;
    } else if ((bits == 16) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO16;
    }

    int freq = Math.round(fmt.getSampleRate());
    int size = aIn.available();
    ByteBuffer buffer = ByteBuffer.allocateDirect(size);
    while (buffer.remaining() > 0) {
      aChannel.read(buffer);
    }
    buffer.rewind();

    // Must byte swap on big endian platforms
    // Thanks to swpalmer on javagaming.org forums for hint at fix
    if ((bits == 16) && (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN)) {
      int len = buffer.remaining();
      for (int i = 0; i < len; i += 2) {
        byte a = buffer.get(i);
        byte b = buffer.get(i + 1);
        buffer.put(i, b);
        buffer.put(i + 1, a);
      }
    }

    WAVData result = new WAVData(buffer, format, size, freq, false);
    aIn.close();

    return result;
  }
Ejemplo n.º 8
0
  // 播放au,aiff,wav音乐流, 这个函数基本完全为帖子上的代码
  private synchronized void play() {
    ByteArrayInputStream aMusicInputStream;
    AudioFormat format;
    AudioInputStream musicInputStream;
    byte[] audioSamples;
    SourceDataLine line;
    try {
      File MusicFile = new File(m_filename);

      musicInputStream = AudioSystem.getAudioInputStream(MusicFile); // 取得文件的音频输入流
      format = musicInputStream.getFormat(); // 取得音频输入流的格式
      audioSamples = getAudioSamples(musicInputStream, format); // 取得音频样本

      aMusicInputStream = new ByteArrayInputStream(audioSamples);
      int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
      byte[] buffer = new byte[bufferSize];
      try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
      } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
      }

      if (!line.isRunning()) {
        line.start();
      }

      int numBytesRead = 0;
      while (numBytesRead != -1 && !m_stopped) {
        numBytesRead = aMusicInputStream.read(buffer, 0, buffer.length);
        if (numBytesRead != -1) {
          line.write(buffer, 0, numBytesRead);
        }
      }
      line.drain();
      line.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  @Override
  public Format[] getSupportedOutputFormats(Format input) {
    if (input == null) return outputFormats;
    else {
      if (!(input instanceof AudioFormat)) {
        logger.warning(
            this.getClass().getSimpleName()
                + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for "
                + input); // this can cause an NPE in JMF if it ever
        // happens.
        return new Format[] {null};
      }
      final AudioFormat inputCast = (AudioFormat) input;
      if (!inputCast.getEncoding().equals(AudioFormat.ALAW)
          || (inputCast.getSampleSizeInBits() != 8
              && inputCast.getSampleSizeInBits() != Format.NOT_SPECIFIED)
          || (inputCast.getChannels() != 1 && inputCast.getChannels() != Format.NOT_SPECIFIED)
          || (inputCast.getFrameSizeInBits() != 8
              && inputCast.getFrameSizeInBits() != Format.NOT_SPECIFIED)) {
        logger.warning(
            this.getClass().getSimpleName()
                + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for "
                + input); // this can cause an NPE in JMF if it ever
        // happens.
        return new Format[] {null};
      }
      final AudioFormat result =
          new AudioFormat(
              BonusAudioFormatEncodings.ALAW_RTP,
              inputCast.getSampleRate(),
              8,
              1,
              inputCast.getEndian(),
              inputCast.getSigned(),
              8,
              inputCast.getFrameRate(),
              inputCast.getDataType());

      return new Format[] {result};
    }
  }
 Player() {
   super("Haven audio player");
   setDaemon(true);
   srate = (int) fmt.getSampleRate();
 }
Ejemplo n.º 11
0
  public static void main(String[] args) throws Exception {
    int midiDev = -1;
    int audioDev = -1;
    int latencyInMillis = 70;

    // parse arguments
    int argi = 0;
    while (argi < args.length) {
      String arg = args[argi];
      if (arg.equals("-h")) {
        printUsageAndExit();
      } else if (arg.equals("-m")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        midiDev = Integer.parseInt(args[argi]);
      } else if (arg.equals("-a")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        audioDev = Integer.parseInt(args[argi]);
      } else if (arg.equals("-l")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        latencyInMillis = Integer.parseInt(args[argi]);
      } else {
        printUsageAndExit();
      }
      argi++;
    }

    // load samples
    final AudioFileSource[] src = new AudioFileSource[sounds.length];
    for (int i = 0; i < sounds.length; i++) {
      src[i] = new AudioFileSource(new File(sounds[i]));
    }
    // define the first source's audioformat as the master format
    final AudioFormat format = src[0].getFormat();

    // set up mixer
    final AudioMixer mixer = new AudioMixer(format.getChannels(), format.getSampleRate());

    // set up soundcard (sink)
    SoundcardSink sink = new SoundcardSink();
    // open the sink and connect it with the mixer
    sink.open(audioDev, latencyInMillis, format, mixer);
    try {

      // do we want to open a MIDI port?
      MidiIn midi = null;
      if (midiDev >= 0) {
        // start MIDI IN
        midi = new MidiIn();
        midi.setListener(
            new MidiIn.Listener() {
              public void midiInPlayed(int status, int data1, int data2) {
                // only react to NOTE ON messages with velocity > 0
                if (((status & 0xF0) == 0x90) && (data2 > 0)) {
                  AudioFileSource newSrc = src[data1 % src.length].makeClone();
                  mixer.addAudioStream(newSrc);
                  serviceMixer(mixer);
                }
              }

              public void midiInPlayed(byte[] message) {
                // nothing to do for long MIDI messages
              }
            });
        midi.open(midiDev);
      } else {
        Debug.debug("No MIDI.");
      }
      try {

        // start the sink -- from now on, the mixer is polled for new
        // data
        sink.start();

        System.out.println("Press ENTER for a sound, 'q'+ENTER to quit.");
        int currSrc = 0;
        while (true) {
          char c = (char) System.in.read();
          if (c == 'q') {
            break;
          }
          AudioFileSource newSrc = src[(currSrc++) % src.length].makeClone();
          mixer.addAudioStream(newSrc);
          serviceMixer(mixer);
        }
      } finally {
        // clean-up
        if (midi != null) {
          midi.close();
        }
      }
    } finally {
      sink.close();
    }

    Debug.debug("done");
  }