Exemplo n.º 1
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
Exemplo n.º 2
0
  public boolean load(File file) {

    this.file = file;

    if (file != null && file.isFile()) {
      try {
        errStr = null;
        audioInputStream = AudioSystem.getAudioInputStream(file);

        fileName = file.getName();

        format = audioInputStream.getFormat();

      } catch (Exception ex) {
        reportStatus(ex.toString());
        return false;
      }
    } else {
      reportStatus("Audio file required.");
      return false;
    }

    numChannels = format.getChannels();
    sampleRate = (double) format.getSampleRate();
    sampleBitSize = format.getSampleSizeInBits();
    long frameLength = audioInputStream.getFrameLength();
    long milliseconds = (long) ((frameLength * 1000) / audioInputStream.getFormat().getFrameRate());
    double audioFileDuration = milliseconds / 1000.0;

    if (audioFileDuration > MAX_AUDIO_DURATION) duration = MAX_AUDIO_DURATION;
    else duration = audioFileDuration;

    frameLength = (int) Math.floor((duration / audioFileDuration) * (double) frameLength);

    try {
      audioBytes = new byte[(int) frameLength * format.getFrameSize()];
      audioInputStream.read(audioBytes);
    } catch (Exception ex) {
      reportStatus(ex.toString());
      return false;
    }

    getAudioData();

    return true;
  }
Exemplo n.º 3
0
  private static WAVData readFromStream(AudioInputStream aIn)
      throws UnsupportedAudioFileException, IOException {
    ReadableByteChannel aChannel = Channels.newChannel(aIn);
    AudioFormat fmt = aIn.getFormat();
    int numChannels = fmt.getChannels();
    int bits = fmt.getSampleSizeInBits();
    int format = AL_FORMAT_MONO8;

    if ((bits == 8) && (numChannels == 1)) {
      format = AL_FORMAT_MONO8;
    } else if ((bits == 16) && (numChannels == 1)) {
      format = AL_FORMAT_MONO16;
    } else if ((bits == 8) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO8;
    } else if ((bits == 16) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO16;
    }

    int freq = Math.round(fmt.getSampleRate());
    int size = aIn.available();
    ByteBuffer buffer = ByteBuffer.allocateDirect(size);
    while (buffer.remaining() > 0) {
      aChannel.read(buffer);
    }
    buffer.rewind();

    // Must byte swap on big endian platforms
    // Thanks to swpalmer on javagaming.org forums for hint at fix
    if ((bits == 16) && (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN)) {
      int len = buffer.remaining();
      for (int i = 0; i < len; i += 2) {
        byte a = buffer.get(i);
        byte b = buffer.get(i + 1);
        buffer.put(i, b);
        buffer.put(i + 1, a);
      }
    }

    WAVData result = new WAVData(buffer, format, size, freq, false);
    aIn.close();

    return result;
  }
Exemplo n.º 4
0
  public static void main(String[] args) throws Exception {
    int midiDev = -1;
    int audioDev = -1;
    int latencyInMillis = 70;

    // parse arguments
    int argi = 0;
    while (argi < args.length) {
      String arg = args[argi];
      if (arg.equals("-h")) {
        printUsageAndExit();
      } else if (arg.equals("-m")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        midiDev = Integer.parseInt(args[argi]);
      } else if (arg.equals("-a")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        audioDev = Integer.parseInt(args[argi]);
      } else if (arg.equals("-l")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        latencyInMillis = Integer.parseInt(args[argi]);
      } else {
        printUsageAndExit();
      }
      argi++;
    }

    // load samples
    final AudioFileSource[] src = new AudioFileSource[sounds.length];
    for (int i = 0; i < sounds.length; i++) {
      src[i] = new AudioFileSource(new File(sounds[i]));
    }
    // define the first source's audioformat as the master format
    final AudioFormat format = src[0].getFormat();

    // set up mixer
    final AudioMixer mixer = new AudioMixer(format.getChannels(), format.getSampleRate());

    // set up soundcard (sink)
    SoundcardSink sink = new SoundcardSink();
    // open the sink and connect it with the mixer
    sink.open(audioDev, latencyInMillis, format, mixer);
    try {

      // do we want to open a MIDI port?
      MidiIn midi = null;
      if (midiDev >= 0) {
        // start MIDI IN
        midi = new MidiIn();
        midi.setListener(
            new MidiIn.Listener() {
              public void midiInPlayed(int status, int data1, int data2) {
                // only react to NOTE ON messages with velocity > 0
                if (((status & 0xF0) == 0x90) && (data2 > 0)) {
                  AudioFileSource newSrc = src[data1 % src.length].makeClone();
                  mixer.addAudioStream(newSrc);
                  serviceMixer(mixer);
                }
              }

              public void midiInPlayed(byte[] message) {
                // nothing to do for long MIDI messages
              }
            });
        midi.open(midiDev);
      } else {
        Debug.debug("No MIDI.");
      }
      try {

        // start the sink -- from now on, the mixer is polled for new
        // data
        sink.start();

        System.out.println("Press ENTER for a sound, 'q'+ENTER to quit.");
        int currSrc = 0;
        while (true) {
          char c = (char) System.in.read();
          if (c == 'q') {
            break;
          }
          AudioFileSource newSrc = src[(currSrc++) % src.length].makeClone();
          mixer.addAudioStream(newSrc);
          serviceMixer(mixer);
        }
      } finally {
        // clean-up
        if (midi != null) {
          midi.close();
        }
      }
    } finally {
      sink.close();
    }

    Debug.debug("done");
  }