示例#1
0
  /** Signals that a PooledThread has started. Creates the Thread's line and buffer. */
  protected void threadStarted() {
    // wait for the SoundManager constructor to finish
    synchronized (this) {
      try {
        wait();
      } catch (InterruptedException ex) {
      }
    }

    // use a short, 100ms (1/10th sec) buffer for filters that
    // change in real-time
    int bufferSize =
        playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10);

    // create, open, and start the line
    SourceDataLine line;
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
    try {
      line = (SourceDataLine) AudioSystem.getLine(lineInfo);
      line.open(playbackFormat, bufferSize);
    } catch (LineUnavailableException ex) {
      // the line is unavailable - signal to end this thread
      Thread.currentThread().interrupt();
      return;
    }

    line.start();

    // create the buffer
    byte[] buffer = new byte[bufferSize];

    // set this thread's locals
    localLine.set(line);
    localBuffer.set(buffer);
  }
示例#2
0
  // open up an audio stream
  private static void init() {
    try {
      // 44,100 samples per second, 16-bit audio, mono, signed PCM, little
      // Endian
      AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

      line = (SourceDataLine) AudioSystem.getLine(info);
      line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);

      // the internal buffer is a fraction of the actual buffer size, this
      // choice is arbitrary
      // it gets divided because we can't expect the buffered data to line
      // up exactly with when
      // the sound card decides to push out its samples.
      buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3];
      listeners = new HashSet<AudioEventListener>();
    } catch (Exception e) {
      System.err.println("Error initializing StdAudio audio system:");
      e.printStackTrace();
      System.exit(1);
    }

    // no sound gets made before this call
    line.start();
  }
示例#3
0
  public void run() {
    try {
      AudioInputStream ais = AudioSystem.getAudioInputStream(soundFile);
      AudioFormat format = ais.getFormat();
      //    System.out.println("Format: " + format);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
      SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
      source.open(format);
      source.start();
      int read = 0;
      byte[] audioData = new byte[16384];
      while (read > -1) {
        read = ais.read(audioData, 0, audioData.length);
        if (read >= 0) {
          source.write(audioData, 0, read);
        }
      }
      donePlaying = true;

      source.drain();
      source.close();
    } catch (Exception exc) {
      System.out.println("error: " + exc.getMessage());
      exc.printStackTrace();
    }
  }
示例#4
0
  public void beginExecution() {

    AudioFormat audioFormat =
        new AudioFormat(
            AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 1, numChannels, 44100.0F, false);
    // System.out.println("AudioPlayer.playAudioInts audio format: " + audioFormat );

    DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
    if (!AudioSystem.isLineSupported(dataLineInfo)) {
      System.out.println("AudioPlayer.playAudioInts does not " + " handle this type of audio.");
      return;
    }

    try {
      SourceDataLine sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);

      sourceLine.open(audioFormat);
    } catch (LineUnavailableException e) {
      e.printStackTrace();
    }

    chunkIndex = 0;

    InitialExecution = true;
  }
示例#5
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
示例#6
0
 private void loadAndPlayAudio(String audioResource) {
   try (AudioInputStream soundStream =
       AudioSystem.getAudioInputStream(getClass().getResource(audioResource))) {
     DataLine.Info info = new DataLine.Info(Clip.class, soundStream.getFormat());
     Clip clip = (Clip) AudioSystem.getLine(info);
     clip.open(soundStream);
     clip.start();
   } catch (UnsupportedAudioFileException | IOException | LineUnavailableException ex) {
     ex.printStackTrace();
   }
 }
示例#7
0
 public MakoVM(int[] m) {
   this.m = m;
   try {
     AudioFormat format = new AudioFormat(8000f, 8, 1, false, false);
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
     soundLine = (SourceDataLine) AudioSystem.getLine(info);
     soundLine.open(format, 670);
     soundLine.start();
   } catch (IllegalArgumentException e) {
     System.out.println("Unable to initialize sound.");
   } catch (LineUnavailableException e) {
     e.printStackTrace();
   }
 }
 public static void main(String[] args) {
   try {
     byte[] wav = Base64Coder.base64ToBinary(getWaveLiteral().toCharArray(), 0, WAV_SIZE);
     InputStream is = new ByteArrayInputStream(wav);
     AudioFormat fmt = AudioSystem.getAudioFileFormat(is).getFormat();
     AudioInputStream sound = AudioSystem.getAudioInputStream(is);
     DataLine.Info info = new DataLine.Info(Clip.class, fmt);
     Clip clip = (Clip) AudioSystem.getLine(info);
     clip.open(sound);
     clip.start();
     Thread.sleep(3000);
   } catch (Exception ex) {
     ex.printStackTrace();
   }
 }
示例#9
0
 private void soundAbspielen(File sound) {
   if (!läuft) return;
   try {
     audioInputStream = AudioSystem.getAudioInputStream(sound);
     af = audioInputStream.getFormat();
     size = (int) (af.getFrameSize() * audioInputStream.getFrameLength());
     audio = new byte[size];
     info = new DataLine.Info(Clip.class, af, size);
     audioInputStream.read(audio, 0, size);
     clip = (Clip) AudioSystem.getLine(info);
     clip.open(af, audio, 0, size);
     clip.start();
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
示例#10
0
  public boolean play() {

    try {
      if (playState != STOPPED) playStop();

      if (audioBytes == null) return false;

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      clip = (Clip) AudioSystem.getLine(info);
      clip.addLineListener(new ClipListener());

      long clipStart = (long) (audioBytes.length * getStartTime() / (getDuration() * 1000.0));
      long clipEnd = (long) (audioBytes.length * getEndTime() / (getDuration() * 1000.0));
      if ((clipEnd - clipStart) > MAX_CLIP_LENGTH) clipEnd = clipStart + MAX_CLIP_LENGTH;
      byte[] clipBytes = new byte[(int) (clipEnd - clipStart)];
      System.arraycopy(audioBytes, (int) clipStart, clipBytes, 0, clipBytes.length);
      clip.open(format, clipBytes, 0, clipBytes.length);

      FloatControl panControl = (FloatControl) clip.getControl(FloatControl.Type.PAN);

      panControl.setValue((float) panSetting / 100.0f);

      double value = (double) gainSetting;

      FloatControl gainControl = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN);
      float dB = (float) (Math.log(value == 0.0 ? 0.0001 : value) / Math.log(10.0) * 20.0);
      gainControl.setValue(dB);
      double playStartTime = (player.getSeekTime() / 100) * (playGetLength());
      clip.setMicrosecondPosition((long) playStartTime);

      clip.start();

      playState = PLAYING;

      return true;

    } catch (Exception ex) {
      ex.printStackTrace();
      playState = STOPPED;
      clip = null;
      return false;
    }
  }
示例#11
0
  // 播放au,aiff,wav音乐流, 这个函数基本完全为帖子上的代码
  private synchronized void play() {
    ByteArrayInputStream aMusicInputStream;
    AudioFormat format;
    AudioInputStream musicInputStream;
    byte[] audioSamples;
    SourceDataLine line;
    try {
      File MusicFile = new File(m_filename);

      musicInputStream = AudioSystem.getAudioInputStream(MusicFile); // 取得文件的音频输入流
      format = musicInputStream.getFormat(); // 取得音频输入流的格式
      audioSamples = getAudioSamples(musicInputStream, format); // 取得音频样本

      aMusicInputStream = new ByteArrayInputStream(audioSamples);
      int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
      byte[] buffer = new byte[bufferSize];
      try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
      } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
      }

      if (!line.isRunning()) {
        line.start();
      }

      int numBytesRead = 0;
      while (numBytesRead != -1 && !m_stopped) {
        numBytesRead = aMusicInputStream.read(buffer, 0, buffer.length);
        if (numBytesRead != -1) {
          line.write(buffer, 0, numBytesRead);
        }
      }
      line.drain();
      line.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }