예제 #1
1
  /** Creates an AudioInputStream from a sound from an input stream */
  public AudioInputStream getAudioInputStream(InputStream is) {

    try {
      if (!is.markSupported()) {
        is = new BufferedInputStream(is);
      }
      // open the source stream
      AudioInputStream source = AudioSystem.getAudioInputStream(is);

      // convert to playback format
      return AudioSystem.getAudioInputStream(playbackFormat, source);
    } catch (UnsupportedAudioFileException ex) {
      ex.printStackTrace();
    } catch (IOException ex) {
      ex.printStackTrace();
    } catch (IllegalArgumentException ex) {
      ex.printStackTrace();
    }

    return null;
  }
예제 #2
0
  private synchronized void startSampled(AudioInputStream as, InputStream in)
      throws UnsupportedAudioFileException, LineUnavailableException {

    Info info = null;
    DataPusher datapusher = null;
    DataLine.Info lineinfo = null;
    SourceDataLine sourcedataline = null;

    // if ALAW or ULAW, we must convert....
    as = Toolkit.getPCMConvertedAudioInputStream(as);

    if (as == null) {
      // could not convert
      return;
    }

    lineinfo = new DataLine.Info(SourceDataLine.class, as.getFormat());
    if (!(AudioSystem.isLineSupported(lineinfo))) {
      return;
    }
    sourcedataline = (SourceDataLine) AudioSystem.getLine(lineinfo);
    datapusher = new DataPusher(sourcedataline, as);

    info = new Info(null, in, datapusher);
    infos.addElement(info);

    datapusher.start();
  }
예제 #3
0
  /** Signals that a PooledThread has started. Creates the Thread's line and buffer. */
  protected void threadStarted() {
    // wait for the SoundManager constructor to finish
    synchronized (this) {
      try {
        wait();
      } catch (InterruptedException ex) {
      }
    }

    // use a short, 100ms (1/10th sec) buffer for filters that
    // change in real-time
    int bufferSize =
        playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10);

    // create, open, and start the line
    SourceDataLine line;
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
    try {
      line = (SourceDataLine) AudioSystem.getLine(lineInfo);
      line.open(playbackFormat, bufferSize);
    } catch (LineUnavailableException ex) {
      // the line is unavailable - signal to end this thread
      Thread.currentThread().interrupt();
      return;
    }

    line.start();

    // create the buffer
    byte[] buffer = new byte[bufferSize];

    // set this thread's locals
    localLine.set(line);
    localBuffer.set(buffer);
  }
예제 #4
0
  /** Does any clean up before closing. */
  protected void cleanUp() {
    // signal to unpause
    setPaused(false);

    // close the mixer (stops any running sounds)
    Mixer mixer = AudioSystem.getMixer(null);
    if (mixer.isOpen()) {
      mixer.close();
    }
  }
예제 #5
0
 private static void playWav(String name, boolean loop, double volume)
     throws FileNotFoundException, IOException, UnsupportedAudioFileException,
         LineUnavailableException {
   AudioInputStream ais = AudioSystem.getAudioInputStream(new File(path + name));
   Clip clip = AudioSystem.getClip();
   clip.open(ais);
   if (loop) {
     clip.loop(-1);
   }
   ((FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN))
       .setValue((float) (Math.log(volume) / Math.log(10.) * 20.));
   clip.start();
   wavMap.put(name, clip);
   //        // open the sound file as a Java input stream
   //        InputStream in = new FileInputStream(path + name);
   //        // create an audiostream from the inputstream
   //        AudioStream audioStream = new AudioStream(in);
   //        // play the audio clip with the audioplayer class
   //        AudioPlayer.player.start(audioStream);
   //        wavMap.put(name, audioStream);
 }
  public static void render(OutputStream os, AudioFormat format, Map<String, Object> info)
      throws Exception {
    AudioSynthesizer synth = (AudioSynthesizer) new SoftSynthesizer();
    AudioInputStream stream = synth.openStream(format, info);
    Receiver recv = synth.getReceiver();
    Soundbank defsbk = synth.getDefaultSoundbank();
    if (defsbk != null) synth.unloadAllInstruments(defsbk);
    synth.loadAllInstruments(soundbank);

    double totalTime = 5;
    send(sequence, recv);

    long len = (long) (stream.getFormat().getFrameRate() * (totalTime + 4));
    stream = new AudioInputStream(stream, stream.getFormat(), len);

    long t = System.currentTimeMillis();
    AudioSystem.write(stream, AudioFileFormat.Type.WAVE, os);
    t = System.currentTimeMillis() - t;
    stream.close();
  }
예제 #7
0
  /** Open an audio channel. */
  public synchronized void openChannel(InputStream in) {

    if (DEBUG) {
      System.out.println("AudioDevice: openChannel");
      System.out.println("input stream =" + in);
    }

    Info info = null;

    // is this already playing?  if so, then just return
    for (int i = 0; i < infos.size(); i++) {
      info = (AudioDevice.Info) infos.elementAt(i);
      if (info.in == in) {

        return;
      }
    }

    AudioInputStream as = null;

    if (in instanceof AudioStream) {

      if (((AudioStream) in).midiformat != null) {

        // it's a midi file
        try {
          startMidi(((AudioStream) in).stream, in);
        } catch (Exception e) {
          return;
        }

      } else if (((AudioStream) in).ais != null) {

        // it's sampled audio
        try {
          startSampled(((AudioStream) in).ais, in);
        } catch (Exception e) {
          return;
        }
      }
    } else if (in instanceof AudioDataStream) {
      if (in instanceof ContinuousAudioDataStream) {
        try {
          AudioInputStream ais =
              new AudioInputStream(
                  in, ((AudioDataStream) in).getAudioData().format, AudioSystem.NOT_SPECIFIED);
          startSampled(ais, in);
        } catch (Exception e) {
          return;
        }
      } else {
        try {
          AudioInputStream ais =
              new AudioInputStream(
                  in,
                  ((AudioDataStream) in).getAudioData().format,
                  ((AudioDataStream) in).getAudioData().buffer.length);
          startSampled(ais, in);
        } catch (Exception e) {
          return;
        }
      }
    } else {
      BufferedInputStream bis = new BufferedInputStream(in, 1024);

      try {

        try {
          as = AudioSystem.getAudioInputStream(bis);
        } catch (IOException ioe) {
          return;
        }

        startSampled(as, in);

      } catch (UnsupportedAudioFileException e) {

        try {
          try {
            MidiFileFormat mff = MidiSystem.getMidiFileFormat(bis);
          } catch (IOException ioe1) {
            return;
          }

          startMidi(bis, in);

        } catch (InvalidMidiDataException e1) {

          // $$jb:08.01.99: adding this section to make some of our other
          // legacy classes work.....
          // not MIDI either, special case handling for all others

          AudioFormat defformat =
              new AudioFormat(AudioFormat.Encoding.ULAW, 8000, 8, 1, 1, 8000, true);
          try {
            AudioInputStream defaif =
                new AudioInputStream(bis, defformat, AudioSystem.NOT_SPECIFIED);
            startSampled(defaif, in);
          } catch (UnsupportedAudioFileException es) {
            return;
          } catch (LineUnavailableException es2) {
            return;
          }

        } catch (MidiUnavailableException e2) {

          // could not open sequence
          return;
        }

      } catch (LineUnavailableException e) {

        return;
      }
    }

    // don't forget adjust for a new stream.
    notify();
  }
예제 #8
0
 /**
  * Gets the maximum number of simultaneous sounds with the specified AudioFormat that the default
  * mixer can play.
  */
 public static int getMaxSimultaneousSounds(AudioFormat playbackFormat) {
   DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
   Mixer mixer = AudioSystem.getMixer(null);
   return mixer.getMaxLines(lineInfo);
 }