Esempio n. 1
0
  /*
   * Return data as a byte array.
   */
  private static byte[] readByte(String filename) {
    byte[] data = null;
    AudioInputStream ais = null;
    try {

      // try to read from file
      File file = new File(filename);
      if (file.exists()) {
        ais = AudioSystem.getAudioInputStream(file);
        data = new byte[ais.available()];
        ais.read(data);
      }

      // try to read from URL
      else {
        URL url = StdAudio.class.getResource(filename);
        ais = AudioSystem.getAudioInputStream(url);
        data = new byte[ais.available()];
        ais.read(data);
      }
    } catch (Exception e) {
      System.out.println(e.getMessage());
      throw new RuntimeException("Could not read " + filename);
    }

    return data;
  }
Esempio n. 2
0
  public void beginExecution() {

    AudioFormat audioFormat =
        new AudioFormat(
            AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 1, numChannels, 44100.0F, false);
    // System.out.println("AudioPlayer.playAudioInts audio format: " + audioFormat );

    DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
    if (!AudioSystem.isLineSupported(dataLineInfo)) {
      System.out.println("AudioPlayer.playAudioInts does not " + " handle this type of audio.");
      return;
    }

    try {
      SourceDataLine sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);

      sourceLine.open(audioFormat);
    } catch (LineUnavailableException e) {
      e.printStackTrace();
    }

    chunkIndex = 0;

    InitialExecution = true;
  }
Esempio n. 3
0
  /** Save the double array as a sound file (using .wav or .au format). */
  public static void save(String filename, double[] input) {
    // assumes 44,100 samples per second
    // use 16-bit audio, mono, signed PCM, little Endian
    AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false);
    byte[] data = new byte[2 * input.length];
    for (int i = 0; i < input.length; i++) {
      int temp = (short) (input[i] * MAX_16_BIT);
      data[2 * i + 0] = (byte) temp;
      data[2 * i + 1] = (byte) (temp >> 8);
    }

    // now save the file
    try {
      ByteArrayInputStream bais = new ByteArrayInputStream(data);
      AudioInputStream ais = new AudioInputStream(bais, format, input.length);
      if (filename.endsWith(".wav") || filename.endsWith(".WAV")) {
        AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename));
      } else if (filename.endsWith(".au") || filename.endsWith(".AU")) {
        AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename));
      } else {
        throw new RuntimeException("File format not supported: " + filename);
      }
    } catch (Exception e) {
      System.out.println(e);
      System.exit(1);
    }
  }
Esempio n. 4
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
  /**
   * Checks whether the playback and notification configuration share the same device.
   *
   * @return are audio out and notifications using the same device.
   */
  public boolean audioOutAndNotificationsShareSameDevice() {
    AudioSystem audioSystem = getDeviceConfiguration().getAudioSystem();
    CaptureDeviceInfo notify = audioSystem.getSelectedDevice(AudioSystem.DataFlow.NOTIFY);
    CaptureDeviceInfo playback = audioSystem.getSelectedDevice(AudioSystem.DataFlow.PLAYBACK);

    if (notify == null) return (playback == null);
    else {
      if (playback == null) return false;
      else return notify.getLocator().equals(playback.getLocator());
    }
  }
Esempio n. 6
0
  // open up an audio stream
  private static void init() {
    try {
      // 44,100 samples per second, 16-bit audio, mono, signed PCM, little
      // Endian
      AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

      line = (SourceDataLine) AudioSystem.getLine(info);
      line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);

      // the internal buffer is a fraction of the actual buffer size, this
      // choice is arbitrary
      // it gets divided because we can't expect the buffered data to line
      // up exactly with when
      // the sound card decides to push out its samples.
      buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3];
      listeners = new HashSet<AudioEventListener>();
    } catch (Exception e) {
      System.err.println("Error initializing StdAudio audio system:");
      e.printStackTrace();
      System.exit(1);
    }

    // no sound gets made before this call
    line.start();
  }
Esempio n. 7
0
 private void soundAbspielen(File sound) {
   if (!läuft) return;
   try {
     audioInputStream = AudioSystem.getAudioInputStream(sound);
     af = audioInputStream.getFormat();
     size = (int) (af.getFrameSize() * audioInputStream.getFrameLength());
     audio = new byte[size];
     info = new DataLine.Info(Clip.class, af, size);
     audioInputStream.read(audio, 0, size);
     clip = (Clip) AudioSystem.getLine(info);
     clip.open(af, audio, 0, size);
     clip.start();
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
  /**
   * Creates an SCAudioClip from the given URI and adds it to the list of available audio-s.
   *
   * @param uri the path where the audio file could be found
   * @param playback use or not the playback device.
   * @return a newly created <tt>SCAudioClip</tt> from <tt>uri</tt>
   */
  public SCAudioClip createAudio(String uri, boolean playback) {
    SCAudioClip audio;

    synchronized (audiosSyncRoot) {
      final AudioKey key = new AudioKey(uri, playback);

      /*
       * While we want to reuse the SCAudioClip instances, they may be
       * used by a single user at a time. That's why we'll forget about
       * them while they are in use and we'll reclaim them when they are
       * no longer in use.
       */
      audio = (audios == null) ? null : audios.remove(key);

      if (audio == null) {
        try {
          AudioSystem audioSystem = getDeviceConfiguration().getAudioSystem();

          if (audioSystem == null) {
            audio = new JavaSoundClipImpl(uri, this);
          } else if (NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(
              audioSystem.getLocatorProtocol())) {
            audio = null;
          } else {
            audio = new AudioSystemClipImpl(uri, this, audioSystem, playback);
          }
        } catch (Throwable t) {
          if (t instanceof ThreadDeath) throw (ThreadDeath) t;
          else {
            /*
             * Could not initialize a new SCAudioClip instance to be
             * played.
             */
            return null;
          }
        }
      }

      /*
       * Make sure that the SCAudioClip will be reclaimed for reuse when
       * it is no longer in use.
       */
      if (audio != null) {
        if (audios == null) audios = new HashMap<AudioKey, SCAudioClip>();

        /*
         * We have to return in the Map which was active at the time the
         * SCAudioClip was initialized because it may have become
         * invalid if the playback or notify audio device changed.
         */
        final Map<AudioKey, SCAudioClip> finalAudios = audios;
        final SCAudioClip finalAudio = audio;

        audio =
            new SCAudioClip() {
              /**
               * Evaluates a specific <tt>loopCondition</tt> as defined by {@link
               * SCAudioClip#play(int,Callable)}.
               *
               * @param loopCondition the <tt>Callable&lt;Boolean&gt;</tt> which represents the
               *     <tt>loopCondition</tt> to be evaluated
               * @return {@link Boolean#FALSE} if <tt>loopCondition</tt> is <tt>null</tt>;
               *     otherwise, the value returned by invoking {@link Callable#call()} on the
               *     specified <tt>loopCondition</tt>
               * @throws Exception if the specified <tt>loopCondition</tt> throws an
               *     <tt>Exception</tt>
               */
              private Boolean evaluateLoopCondition(Callable<Boolean> loopCondition)
                  throws Exception {
                /*
                 * SCAudioClip.play(int,Callable<Boolean>) is
                 * documented to play the SCAudioClip once only if
                 * the loopCondition is null. The same will be
                 * accomplished by returning Boolean.FALSE.
                 */
                return (loopCondition == null) ? Boolean.FALSE : loopCondition.call();
              }

              /**
               * {@inheritDoc}
               *
               * <p>Returns the wrapped <tt>SCAudioClip</tt> into the cache from it has earlier been
               * retrieved in order to allow its reuse.
               */
              @Override
              protected void finalize() throws Throwable {
                try {
                  synchronized (audios) {
                    finalAudios.put(key, finalAudio);
                  }
                } finally {
                  super.finalize();
                }
              }

              public void play() {
                /*
                 * SCAudioClip.play() is documented to behave as if
                 * loopInterval is negative and/or loopCondition is
                 * null. We have to take care that this instance
                 * does not get garbage collected until the
                 * finalAudio finishes playing so we will delegate
                 * to this instance's implementation of
                 * SCAudioClip.play(int,Callable<Boolean>) instead
                 * of to the finalAudio's.
                 */
                play(-1, null);
              }

              public void play(int loopInterval, final Callable<Boolean> finalLoopCondition) {
                /*
                 * We have to make sure that this instance does not
                 * get garbage collected before the finalAudio
                 * finishes playing. The argument loopCondition of
                 * the method
                 * SCAudioClip.play(int,Callable<Boolean>) will
                 * live/be referenced during that time so we will
                 * use it to hold on to this instance.
                 */
                Callable<Boolean> loopCondition =
                    new Callable<Boolean>() {
                      public Boolean call() throws Exception {
                        return evaluateLoopCondition(finalLoopCondition);
                      }
                    };

                finalAudio.play(loopInterval, loopCondition);
              }

              public void stop() {
                finalAudio.stop();
              }

              /**
               * Determines whether this audio is started i.e. a <tt>play</tt> method was invoked
               * and no subsequent <tt>stop</tt> has been invoked yet.
               *
               * @return <tt>true</tt> if this audio is started; otherwise, <tt>false</tt>
               */
              public boolean isStarted() {
                return finalAudio.isStarted();
              }
            };
      }
    }

    return audio;
  }