Example #1
1
  /** Creates an AudioInputStream from a sound from an input stream */
  public AudioInputStream getAudioInputStream(InputStream is) {

    try {
      if (!is.markSupported()) {
        is = new BufferedInputStream(is);
      }
      // open the source stream
      AudioInputStream source = AudioSystem.getAudioInputStream(is);

      // convert to playback format
      return AudioSystem.getAudioInputStream(playbackFormat, source);
    } catch (UnsupportedAudioFileException ex) {
      ex.printStackTrace();
    } catch (IOException ex) {
      ex.printStackTrace();
    } catch (IllegalArgumentException ex) {
      ex.printStackTrace();
    }

    return null;
  }
Example #2
0
  public void run() {
    try {
      AudioInputStream ais = AudioSystem.getAudioInputStream(soundFile);
      AudioFormat format = ais.getFormat();
      //    System.out.println("Format: " + format);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
      SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
      source.open(format);
      source.start();
      int read = 0;
      byte[] audioData = new byte[16384];
      while (read > -1) {
        read = ais.read(audioData, 0, audioData.length);
        if (read >= 0) {
          source.write(audioData, 0, read);
        }
      }
      donePlaying = true;

      source.drain();
      source.close();
    } catch (Exception exc) {
      System.out.println("error: " + exc.getMessage());
      exc.printStackTrace();
    }
  }
Example #3
0
  /** Save the double array as a sound file (using .wav or .au format). */
  public static void save(String filename, double[] input) {
    // assumes 44,100 samples per second
    // use 16-bit audio, mono, signed PCM, little Endian
    AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false);
    byte[] data = new byte[2 * input.length];
    for (int i = 0; i < input.length; i++) {
      int temp = (short) (input[i] * MAX_16_BIT);
      data[2 * i + 0] = (byte) temp;
      data[2 * i + 1] = (byte) (temp >> 8);
    }

    // now save the file
    try {
      ByteArrayInputStream bais = new ByteArrayInputStream(data);
      AudioInputStream ais = new AudioInputStream(bais, format, input.length);
      if (filename.endsWith(".wav") || filename.endsWith(".WAV")) {
        AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename));
      } else if (filename.endsWith(".au") || filename.endsWith(".AU")) {
        AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename));
      } else {
        throw new RuntimeException("File format not supported: " + filename);
      }
    } catch (Exception e) {
      System.out.println(e);
      System.exit(1);
    }
  }
Example #4
0
  /*
   * Return data as a byte array.
   */
  private static byte[] readByte(String filename) {
    byte[] data = null;
    AudioInputStream ais = null;
    try {

      // try to read from file
      File file = new File(filename);
      if (file.exists()) {
        ais = AudioSystem.getAudioInputStream(file);
        data = new byte[ais.available()];
        ais.read(data);
      }

      // try to read from URL
      else {
        URL url = StdAudio.class.getResource(filename);
        ais = AudioSystem.getAudioInputStream(url);
        data = new byte[ais.available()];
        ais.read(data);
      }
    } catch (Exception e) {
      System.out.println(e.getMessage());
      throw new RuntimeException("Could not read " + filename);
    }

    return data;
  }
Example #5
0
  public void beginExecution() {

    AudioFormat audioFormat =
        new AudioFormat(
            AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 1, numChannels, 44100.0F, false);
    // System.out.println("AudioPlayer.playAudioInts audio format: " + audioFormat );

    DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
    if (!AudioSystem.isLineSupported(dataLineInfo)) {
      System.out.println("AudioPlayer.playAudioInts does not " + " handle this type of audio.");
      return;
    }

    try {
      SourceDataLine sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);

      sourceLine.open(audioFormat);
    } catch (LineUnavailableException e) {
      e.printStackTrace();
    }

    chunkIndex = 0;

    InitialExecution = true;
  }
Example #6
0
 /*routine to play exploding sound*/
 public void explode() {
   File sound = new File("explosion.wav");
   try {
     Clip clip = AudioSystem.getClip();
     clip.open(AudioSystem.getAudioInputStream(sound)); // open the specified clip
     clip.start(); // play it
   } catch (Exception e) {
     System.out.println(e);
   }
 }
Example #7
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
Example #8
0
 /*Routine to play theme*/
 public void playTheme() {
   File sound = new File("Death on the Dance Floor_1.wav");
   try {
     Clip clip = AudioSystem.getClip();
     clip.open(AudioSystem.getAudioInputStream(sound)); // open the specified clip
     clip.start(); // start it
     clip.loop(Clip.LOOP_CONTINUOUSLY); // loop it
   } catch (Exception e) {
     System.out.println(e);
   }
 }
Example #9
0
 private void loadAndPlayAudio(String audioResource) {
   try (AudioInputStream soundStream =
       AudioSystem.getAudioInputStream(getClass().getResource(audioResource))) {
     DataLine.Info info = new DataLine.Info(Clip.class, soundStream.getFormat());
     Clip clip = (Clip) AudioSystem.getLine(info);
     clip.open(soundStream);
     clip.start();
   } catch (UnsupportedAudioFileException | IOException | LineUnavailableException ex) {
     ex.printStackTrace();
   }
 }
 public static void main(String[] args) {
   try {
     byte[] wav = Base64Coder.base64ToBinary(getWaveLiteral().toCharArray(), 0, WAV_SIZE);
     InputStream is = new ByteArrayInputStream(wav);
     AudioFormat fmt = AudioSystem.getAudioFileFormat(is).getFormat();
     AudioInputStream sound = AudioSystem.getAudioInputStream(is);
     DataLine.Info info = new DataLine.Info(Clip.class, fmt);
     Clip clip = (Clip) AudioSystem.getLine(info);
     clip.open(sound);
     clip.start();
     Thread.sleep(3000);
   } catch (Exception ex) {
     ex.printStackTrace();
   }
 }
Example #11
0
 public void addSound(String fileName, boolean loop) {
   try {
     input = AudioSystem.getAudioInputStream(new File(fileName));
     audio = AudioSystem.getClip();
     audio.open(input);
     clips.add(audio);
     loops.add(loop);
   } catch (IOException e) {
     System.out.println("Audio File Not Found");
   } catch (UnsupportedAudioFileException e) {
     System.out.println("Audio File Type Not Supported, brah");
   } catch (LineUnavailableException e) {
     // TODO Auto-generated catch block
     System.out.println("Serious audio problem Line Unavailable");
   }
 }
Example #12
0
 private void soundAbspielen(File sound) {
   if (!läuft) return;
   try {
     audioInputStream = AudioSystem.getAudioInputStream(sound);
     af = audioInputStream.getFormat();
     size = (int) (af.getFrameSize() * audioInputStream.getFrameLength());
     audio = new byte[size];
     info = new DataLine.Info(Clip.class, af, size);
     audioInputStream.read(audio, 0, size);
     clip = (Clip) AudioSystem.getLine(info);
     clip.open(af, audio, 0, size);
     clip.start();
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
Example #13
0
  /** Signals that a PooledThread has started. Creates the Thread's line and buffer. */
  protected void threadStarted() {
    // wait for the SoundManager constructor to finish
    synchronized (this) {
      try {
        wait();
      } catch (InterruptedException ex) {
      }
    }

    // use a short, 100ms (1/10th sec) buffer for filters that
    // change in real-time
    int bufferSize =
        playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10);

    // create, open, and start the line
    SourceDataLine line;
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
    try {
      line = (SourceDataLine) AudioSystem.getLine(lineInfo);
      line.open(playbackFormat, bufferSize);
    } catch (LineUnavailableException ex) {
      // the line is unavailable - signal to end this thread
      Thread.currentThread().interrupt();
      return;
    }

    line.start();

    // create the buffer
    byte[] buffer = new byte[bufferSize];

    // set this thread's locals
    localLine.set(line);
    localBuffer.set(buffer);
  }
Example #14
0
 /**
  * This method loads a (.wav) file into a WAVData object.
  *
  * @param filename The name of the (.wav) file
  * @return a WAVData object containing the audio data
  * @throws UnsupportedAudioFileException if the format of the audio if not supported.
  * @throws IOException If the file can no be found or some other IO error occurs
  */
 public static WAVData loadFromFile(String filename)
     throws UnsupportedAudioFileException, IOException {
   WAVData result = null;
   File soundFile = new File(filename);
   AudioInputStream aIn = AudioSystem.getAudioInputStream(soundFile);
   return readFromStream(aIn);
 }
Example #15
0
  // open up an audio stream
  private static void init() {
    try {
      // 44,100 samples per second, 16-bit audio, mono, signed PCM, little
      // Endian
      AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

      line = (SourceDataLine) AudioSystem.getLine(info);
      line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);

      // the internal buffer is a fraction of the actual buffer size, this
      // choice is arbitrary
      // it gets divided because we can't expect the buffered data to line
      // up exactly with when
      // the sound card decides to push out its samples.
      buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3];
      listeners = new HashSet<AudioEventListener>();
    } catch (Exception e) {
      System.err.println("Error initializing StdAudio audio system:");
      e.printStackTrace();
      System.exit(1);
    }

    // no sound gets made before this call
    line.start();
  }
Example #16
0
  /**
   * Create preview component.
   *
   * @param type type
   * @param comboBox the options.
   * @param prefSize the preferred size
   * @return the component.
   */
  private static Component createPreview(int type, final JComboBox comboBox, Dimension prefSize) {
    JComponent preview = null;

    if (type == DeviceConfigurationComboBoxModel.AUDIO) {
      Object selectedItem = comboBox.getSelectedItem();

      if (selectedItem instanceof AudioSystem) {
        AudioSystem audioSystem = (AudioSystem) selectedItem;

        if (!NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(audioSystem.getLocatorProtocol())) {
          preview = new TransparentPanel(new GridBagLayout());
          createAudioSystemControls(audioSystem, preview);
        }
      }
    } else if (type == DeviceConfigurationComboBoxModel.VIDEO) {
      JLabel noPreview =
          new JLabel(
              NeomediaActivator.getResources().getI18NString("impl.media.configform.NO_PREVIEW"));

      noPreview.setHorizontalAlignment(SwingConstants.CENTER);
      noPreview.setVerticalAlignment(SwingConstants.CENTER);

      preview = createVideoContainer(noPreview);
      preview.setPreferredSize(prefSize);

      Object selectedItem = comboBox.getSelectedItem();
      CaptureDeviceInfo device = null;
      if (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice)
        device = ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info;

      Exception exception;
      try {
        createVideoPreview(device, preview);
        exception = null;
      } catch (IOException ex) {
        exception = ex;
      } catch (MediaException ex) {
        exception = ex;
      }
      if (exception != null) {
        logger.error("Failed to create preview for device " + device, exception);
        device = null;
      }
    }

    return preview;
  }
  private void playAudio(int store)
        //  PRE:  store must correspond to one of the existing stores.
        //  POST: Plays the audio associated with store. All files should be located in
        //        the \audio\ file path.
      {
    AudioInputStream audio; // The audio stream.
    String filePath; // The file path to the audio file.
    File audioFile; // The audio file.
    Clip audioClip; // The audio clip.

    switch (store) // create file path for audio associated with store
    {
      case -1:
        filePath = ".\\audio\\click.wav";
        break;

      case 0:
        filePath = ".\\audio\\weaponSmith.wav";
        break;

      case 1:
        filePath = ".\\audio\\armorSmith.wav";
        break;

      case 2:
        filePath = ".\\audio\\accessoryMerchant.wav";
        break;

      default:
        filePath = ".\\audio\\generalMerchant.wav";
    }

    audioFile = null;

    try {
      audioClip = AudioSystem.getClip();
      audioFile = new File(filePath);
      audio = AudioSystem.getAudioInputStream(audioFile);
      audioClip.open(audio);
      audioClip.start();
    } catch (Exception e) // if there was an error playing the clip
    {
      System.err.println("Error playing clip!");
    }
  }
Example #18
0
 // Constructor to construct each element of the enum with its own sound file.
 SoundEffect(String soundFileName) {
   try {
     // Use URL (instead of File) to read from disk and JAR.
     URL url = this.getClass().getClassLoader().getResource(soundFileName);
     // Set up an audio input stream piped from the sound file.
     AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(url);
     // Get a clip resource.
     clip = AudioSystem.getClip();
     // Open audio clip and load samples from the audio input stream.
     clip.open(audioInputStream);
   } catch (UnsupportedAudioFileException e) {
     e.printStackTrace();
   } catch (IOException e) {
     e.printStackTrace();
   } catch (LineUnavailableException e) {
     e.printStackTrace();
   }
 }
 public static void Play() {
   try {
     // Open an audio input stream.
     File soundFile = new File("foo.wav");
     AudioInputStream audioIn = AudioSystem.getAudioInputStream(soundFile);
     // Get a sound clip resource.
     Clip clip = AudioSystem.getClip();
     // Open audio clip and load samples from the audio input stream.
     clip.open(audioIn);
     clip.start();
   } catch (UnsupportedAudioFileException e) {
     e.printStackTrace();
   } catch (IOException e) {
     e.printStackTrace();
   } catch (LineUnavailableException e) {
     e.printStackTrace();
   }
 }
Example #20
0
  /** Does any clean up before closing. */
  protected void cleanUp() {
    // signal to unpause
    setPaused(false);

    // close the mixer (stops any running sounds)
    Mixer mixer = AudioSystem.getMixer(null);
    if (mixer.isOpen()) {
      mixer.close();
    }
  }
Example #21
0
  // 播放au,aiff,wav音乐流, 这个函数基本完全为帖子上的代码
  private synchronized void play() {
    ByteArrayInputStream aMusicInputStream;
    AudioFormat format;
    AudioInputStream musicInputStream;
    byte[] audioSamples;
    SourceDataLine line;
    try {
      File MusicFile = new File(m_filename);

      musicInputStream = AudioSystem.getAudioInputStream(MusicFile); // 取得文件的音频输入流
      format = musicInputStream.getFormat(); // 取得音频输入流的格式
      audioSamples = getAudioSamples(musicInputStream, format); // 取得音频样本

      aMusicInputStream = new ByteArrayInputStream(audioSamples);
      int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
      byte[] buffer = new byte[bufferSize];
      try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
      } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
      }

      if (!line.isRunning()) {
        line.start();
      }

      int numBytesRead = 0;
      while (numBytesRead != -1 && !m_stopped) {
        numBytesRead = aMusicInputStream.read(buffer, 0, buffer.length);
        if (numBytesRead != -1) {
          line.write(buffer, 0, numBytesRead);
        }
      }
      line.drain();
      line.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
Example #22
0
 private static void playWav(String name, boolean loop, double volume)
     throws FileNotFoundException, IOException, UnsupportedAudioFileException,
         LineUnavailableException {
   AudioInputStream ais = AudioSystem.getAudioInputStream(new File(path + name));
   Clip clip = AudioSystem.getClip();
   clip.open(ais);
   if (loop) {
     clip.loop(-1);
   }
   ((FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN))
       .setValue((float) (Math.log(volume) / Math.log(10.) * 20.));
   clip.start();
   wavMap.put(name, clip);
   //        // open the sound file as a Java input stream
   //        InputStream in = new FileInputStream(path + name);
   //        // create an audiostream from the inputstream
   //        AudioStream audioStream = new AudioStream(in);
   //        // play the audio clip with the audioplayer class
   //        AudioPlayer.player.start(audioStream);
   //        wavMap.put(name, audioStream);
 }
    /** Configures echo cancellation and noise suppression effects. */
    @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
    private void configureEffects() {
      if (!AndroidUtils.hasAPI(16)) return;

      AudioSystem audioSystem =
          AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD);

      // Creates echo canceler if available
      if (AcousticEchoCanceler.isAvailable()) {
        AcousticEchoCanceler echoCanceller =
            AcousticEchoCanceler.create(audioRecord.getAudioSessionId());
        if (echoCanceller != null) {
          echoCanceller.setEnableStatusListener(this);
          echoCanceller.setEnabled(audioSystem.isEchoCancel());
          logger.info("Echo cancellation: " + echoCanceller.getEnabled());
        }
      }

      // Automatic gain control
      if (AutomaticGainControl.isAvailable()) {
        AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId());
        if (agc != null) {
          agc.setEnableStatusListener(this);
          agc.setEnabled(audioSystem.isAutomaticGainControl());
          logger.info("Auto gain control: " + agc.getEnabled());
        }
      }

      // Creates noise suppressor if available
      if (NoiseSuppressor.isAvailable()) {
        NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId());
        if (noiseSuppressor != null) {
          noiseSuppressor.setEnableStatusListener(this);
          noiseSuppressor.setEnabled(audioSystem.isDenoise());
          logger.info("Noise suppressor: " + noiseSuppressor.getEnabled());
        }
      }
    }
Example #24
0
 public MakoVM(int[] m) {
   this.m = m;
   try {
     AudioFormat format = new AudioFormat(8000f, 8, 1, false, false);
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
     soundLine = (SourceDataLine) AudioSystem.getLine(info);
     soundLine.open(format, 670);
     soundLine.start();
   } catch (IllegalArgumentException e) {
     System.out.println("Unable to initialize sound.");
   } catch (LineUnavailableException e) {
     e.printStackTrace();
   }
 }
Example #25
0
  public boolean load(File file) {

    this.file = file;

    if (file != null && file.isFile()) {
      try {
        errStr = null;
        audioInputStream = AudioSystem.getAudioInputStream(file);

        fileName = file.getName();

        format = audioInputStream.getFormat();

      } catch (Exception ex) {
        reportStatus(ex.toString());
        return false;
      }
    } else {
      reportStatus("Audio file required.");
      return false;
    }

    numChannels = format.getChannels();
    sampleRate = (double) format.getSampleRate();
    sampleBitSize = format.getSampleSizeInBits();
    long frameLength = audioInputStream.getFrameLength();
    long milliseconds = (long) ((frameLength * 1000) / audioInputStream.getFormat().getFrameRate());
    double audioFileDuration = milliseconds / 1000.0;

    if (audioFileDuration > MAX_AUDIO_DURATION) duration = MAX_AUDIO_DURATION;
    else duration = audioFileDuration;

    frameLength = (int) Math.floor((duration / audioFileDuration) * (double) frameLength);

    try {
      audioBytes = new byte[(int) frameLength * format.getFrameSize()];
      audioInputStream.read(audioBytes);
    } catch (Exception ex) {
      reportStatus(ex.toString());
      return false;
    }

    getAudioData();

    return true;
  }
Example #26
0
  public boolean play() {

    try {
      if (playState != STOPPED) playStop();

      if (audioBytes == null) return false;

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      clip = (Clip) AudioSystem.getLine(info);
      clip.addLineListener(new ClipListener());

      long clipStart = (long) (audioBytes.length * getStartTime() / (getDuration() * 1000.0));
      long clipEnd = (long) (audioBytes.length * getEndTime() / (getDuration() * 1000.0));
      if ((clipEnd - clipStart) > MAX_CLIP_LENGTH) clipEnd = clipStart + MAX_CLIP_LENGTH;
      byte[] clipBytes = new byte[(int) (clipEnd - clipStart)];
      System.arraycopy(audioBytes, (int) clipStart, clipBytes, 0, clipBytes.length);
      clip.open(format, clipBytes, 0, clipBytes.length);

      FloatControl panControl = (FloatControl) clip.getControl(FloatControl.Type.PAN);

      panControl.setValue((float) panSetting / 100.0f);

      double value = (double) gainSetting;

      FloatControl gainControl = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN);
      float dB = (float) (Math.log(value == 0.0 ? 0.0001 : value) / Math.log(10.0) * 20.0);
      gainControl.setValue(dB);
      double playStartTime = (player.getSeekTime() / 100) * (playGetLength());
      clip.setMicrosecondPosition((long) playStartTime);

      clip.start();

      playState = PLAYING;

      return true;

    } catch (Exception ex) {
      ex.printStackTrace();
      playState = STOPPED;
      clip = null;
      return false;
    }
  }
Example #27
0
 public SoundMixer(int id, Mixer.Info info) {
   this.id = id;
   this.info = info;
   this.mixer = AudioSystem.getMixer(info);
 }
Example #28
0
 /**
  * This method loads a (.wav) file into a WAVData object.
  *
  * @param stream An InputStream for the .WAV file.
  * @return a WAVData object containing the audio data
  * @throws UnsupportedAudioFileException if the format of the audio if not supported.
  * @throws IOException If the file can no be found or some other IO error occurs
  */
 public static WAVData loadFromStream(InputStream stream)
     throws UnsupportedAudioFileException, IOException {
   WAVData result = null;
   AudioInputStream aIn = AudioSystem.getAudioInputStream(stream);
   return readFromStream(aIn);
 }
Example #29
0
 /**
  * Gets the maximum number of simultaneous sounds with the specified AudioFormat that the default
  * mixer can play.
  */
 public static int getMaxSimultaneousSounds(AudioFormat playbackFormat) {
   DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
   Mixer mixer = AudioSystem.getMixer(null);
   return mixer.getMaxLines(lineInfo);
 }
Example #30
0
  /**
   * Creates the UI controls which are to control the details of a specific <tt>AudioSystem</tt>.
   *
   * @param audioSystem the <tt>AudioSystem</tt> for which the UI controls to control its details
   *     are to be created
   * @param container the <tt>JComponent</tt> into which the UI controls which are to control the
   *     details of the specified <tt>audioSystem</tt> are to be added
   */
  public static void createAudioSystemControls(AudioSystem audioSystem, JComponent container) {
    GridBagConstraints constraints = new GridBagConstraints();

    constraints.anchor = GridBagConstraints.NORTHWEST;
    constraints.fill = GridBagConstraints.HORIZONTAL;
    constraints.weighty = 0;

    int audioSystemFeatures = audioSystem.getFeatures();
    boolean featureNotifyAndPlaybackDevices =
        ((audioSystemFeatures & AudioSystem.FEATURE_NOTIFY_AND_PLAYBACK_DEVICES) != 0);

    constraints.gridx = 0;
    constraints.insets = new Insets(3, 0, 3, 3);
    constraints.weightx = 0;

    constraints.gridy = 0;
    container.add(
        new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)), constraints);
    if (featureNotifyAndPlaybackDevices) {
      constraints.gridy = 2;
      container.add(
          new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)), constraints);
      constraints.gridy = 3;
      container.add(
          new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)), constraints);
    }

    constraints.gridx = 1;
    constraints.insets = new Insets(3, 3, 3, 0);
    constraints.weightx = 1;

    JComboBox captureCombo = null;

    if (featureNotifyAndPlaybackDevices) {
      captureCombo = new JComboBox();
      captureCombo.setEditable(false);
      captureCombo.setModel(
          new DeviceConfigurationComboBoxModel(
              captureCombo,
              mediaService.getDeviceConfiguration(),
              DeviceConfigurationComboBoxModel.AUDIO_CAPTURE));
      constraints.gridy = 0;
      container.add(captureCombo, constraints);
    }

    int anchor = constraints.anchor;
    SoundLevelIndicator capturePreview =
        new SoundLevelIndicator(
            SimpleAudioLevelListener.MIN_LEVEL, SimpleAudioLevelListener.MAX_LEVEL);

    constraints.anchor = GridBagConstraints.CENTER;
    constraints.gridy = (captureCombo == null) ? 0 : 1;
    container.add(capturePreview, constraints);
    constraints.anchor = anchor;

    constraints.gridy = GridBagConstraints.RELATIVE;

    if (featureNotifyAndPlaybackDevices) {
      JComboBox playbackCombo = new JComboBox();

      playbackCombo.setEditable(false);
      playbackCombo.setModel(
          new DeviceConfigurationComboBoxModel(
              captureCombo,
              mediaService.getDeviceConfiguration(),
              DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK));
      container.add(playbackCombo, constraints);

      JComboBox notifyCombo = new JComboBox();

      notifyCombo.setEditable(false);
      notifyCombo.setModel(
          new DeviceConfigurationComboBoxModel(
              captureCombo,
              mediaService.getDeviceConfiguration(),
              DeviceConfigurationComboBoxModel.AUDIO_NOTIFY));
      container.add(notifyCombo, constraints);
    }

    if ((AudioSystem.FEATURE_ECHO_CANCELLATION & audioSystemFeatures) != 0) {
      final SIPCommCheckBox echoCancelCheckBox =
          new SIPCommCheckBox(
              NeomediaActivator.getResources().getI18NString("impl.media.configform.ECHOCANCEL"));

      /*
       * First set the selected one, then add the listener in order to
       * avoid saving the value when using the default one and only
       * showing to user without modification.
       */
      echoCancelCheckBox.setSelected(mediaService.getDeviceConfiguration().isEchoCancel());
      echoCancelCheckBox.addItemListener(
          new ItemListener() {
            public void itemStateChanged(ItemEvent e) {
              mediaService.getDeviceConfiguration().setEchoCancel(echoCancelCheckBox.isSelected());
            }
          });
      container.add(echoCancelCheckBox, constraints);
    }

    if ((AudioSystem.FEATURE_DENOISE & audioSystemFeatures) != 0) {
      final SIPCommCheckBox denoiseCheckBox =
          new SIPCommCheckBox(
              NeomediaActivator.getResources().getI18NString("impl.media.configform.DENOISE"));

      /*
       * First set the selected one, then add the listener in order to
       * avoid saving the value when using the default one and only
       * showing to user without modification.
       */
      denoiseCheckBox.setSelected(mediaService.getDeviceConfiguration().isDenoise());
      denoiseCheckBox.addItemListener(
          new ItemListener() {
            public void itemStateChanged(ItemEvent e) {
              mediaService.getDeviceConfiguration().setDenoise(denoiseCheckBox.isSelected());
            }
          });
      container.add(denoiseCheckBox, constraints);
    }

    createAudioPreview(audioSystem, captureCombo, capturePreview);
  }