Esempio n. 1
0
 public AudioSample getAudioSample(String filename, int bufferSize) {
   AudioInputStream ais = getAudioInputStream(filename);
   if (ais != null) {
     AudioMetaData meta = null;
     AudioFormat format = ais.getFormat();
     FloatSampleBuffer samples = null;
     if (format instanceof MpegAudioFormat) {
       AudioFormat baseFormat = format;
       format =
           new AudioFormat(
               AudioFormat.Encoding.PCM_SIGNED,
               baseFormat.getSampleRate(),
               16,
               baseFormat.getChannels(),
               baseFormat.getChannels() * 2,
               baseFormat.getSampleRate(),
               false);
       // converts the stream to PCM audio from mp3 audio
       ais = getAudioInputStream(format, ais);
       // get a map of properties so we can find out how long it is
       Map<String, Object> props = getID3Tags(filename);
       // there is a property called mp3.length.bytes, but that is
       // the length in bytes of the mp3 file, which will of course
       // be much shorter than the decoded version. so we use the
       // duration of the file to figure out how many bytes the
       // decoded file will be.
       long dur = ((Long) props.get("duration")).longValue();
       int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
       samples = loadFloatAudio(ais, toRead);
       meta = new MP3MetaData(filename, dur / 1000, props);
     } else {
       samples = loadFloatAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
       long length = AudioUtils.frames2Millis(samples.getSampleCount(), format);
       meta = new BasicMetaData(filename, length);
     }
     AudioSynthesizer out =
         getAudioSynthesizer(
             format.getChannels(),
             bufferSize,
             format.getSampleRate(),
             format.getSampleSizeInBits());
     if (out != null) {
       SampleSignal ssig = new SampleSignal(samples);
       out.setAudioSignal(ssig);
       return new JSAudioSample(meta, ssig, out);
     } else {
       error("Couldn't acquire an output.");
     }
   }
   return null;
 }
Esempio n. 2
0
  public AudioInputStream getAudioInputStream(
      AudioFormat.Encoding targetEncoding, AudioInputStream sourceStream) {
    AudioFormat sourceFormat = sourceStream.getFormat();
    AudioFormat.Encoding sourceEncoding = sourceFormat.getEncoding();

    if (sourceEncoding.equals(targetEncoding)) {
      return sourceStream;
    } else {
      AudioFormat targetFormat = null;
      if (!isConversionSupported(targetEncoding, sourceStream.getFormat())) {
        throw new IllegalArgumentException(
            "Unsupported conversion: "
                + sourceStream.getFormat().toString()
                + " to "
                + targetEncoding.toString());
      }
      if (AudioFormat.Encoding.ULAW.equals(sourceEncoding)
          && AudioFormat.Encoding.PCM_SIGNED.equals(targetEncoding)) {
        targetFormat =
            new AudioFormat(
                targetEncoding,
                sourceFormat.getSampleRate(),
                16,
                sourceFormat.getChannels(),
                2 * sourceFormat.getChannels(),
                sourceFormat.getSampleRate(),
                sourceFormat.isBigEndian());
      } else if (AudioFormat.Encoding.PCM_SIGNED.equals(sourceEncoding)
          && AudioFormat.Encoding.ULAW.equals(targetEncoding)) {
        targetFormat =
            new AudioFormat(
                targetEncoding,
                sourceFormat.getSampleRate(),
                8,
                sourceFormat.getChannels(),
                sourceFormat.getChannels(),
                sourceFormat.getSampleRate(),
                false);
      } else {
        throw new IllegalArgumentException(
            "Unsupported conversion: "
                + sourceStream.getFormat().toString()
                + " to "
                + targetEncoding.toString());
      }

      return getAudioInputStream(targetFormat, sourceStream);
    }
  }
Esempio n. 3
0
  /*  public AudioFormat[] getOutputFormats(AudioFormat inputFormat) { */
  private AudioFormat[] getOutputFormats(AudioFormat inputFormat) {

    Vector formats = new Vector();
    AudioFormat format;

    if ((inputFormat.getSampleSizeInBits() == 16)
        && AudioFormat.Encoding.PCM_SIGNED.equals(inputFormat.getEncoding())) {
      format =
          new AudioFormat(
              AudioFormat.Encoding.ULAW,
              inputFormat.getSampleRate(),
              8,
              inputFormat.getChannels(),
              inputFormat.getChannels(),
              inputFormat.getSampleRate(),
              false);
      formats.addElement(format);
    }

    if (AudioFormat.Encoding.ULAW.equals(inputFormat.getEncoding())) {
      format =
          new AudioFormat(
              AudioFormat.Encoding.PCM_SIGNED,
              inputFormat.getSampleRate(),
              16,
              inputFormat.getChannels(),
              inputFormat.getChannels() * 2,
              inputFormat.getSampleRate(),
              false);
      formats.addElement(format);

      format =
          new AudioFormat(
              AudioFormat.Encoding.PCM_SIGNED,
              inputFormat.getSampleRate(),
              16,
              inputFormat.getChannels(),
              inputFormat.getChannels() * 2,
              inputFormat.getSampleRate(),
              true);
      formats.addElement(format);
    }

    AudioFormat[] formatArray = new AudioFormat[formats.size()];
    for (int i = 0; i < formatArray.length; i++) {
      formatArray[i] = (AudioFormat) (formats.elementAt(i));
    }
    return formatArray;
  }
Esempio n. 4
0
  /**
   * Construct a RawAudioFormat from an AudioFormat, assuming a WAV header of size WAV_HEADER_SIZE
   * (44) bytes.
   *
   * @param af AudioFormat (e.g. from AudioSystem.getAudioFileFormat(File).
   */
  public RawAudioFormat(AudioFormat af) throws IOException {
    sr = (int) af.getFrameRate();
    br = af.getSampleSizeInBits();
    fs = br / 8;

    if (af.getChannels() > 1) throw new IOException("multi-channel files are not supported");

    if (af.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) {
      signed = true;
      alaw = false;
      ulaw = false;
      hs = WAV_HEADER_SIZE;
    }
    if (af.getEncoding() == AudioFormat.Encoding.PCM_UNSIGNED) {
      signed = false;
      alaw = false;
      ulaw = false;
      hs = WAV_HEADER_SIZE;
    }
    if (af.getEncoding() == AudioFormat.Encoding.ALAW) {
      alaw = true;
      signed = true;
      ulaw = false;
      hs = WAV_HEADER_SIZE2;
    }
    if (af.getEncoding() == AudioFormat.Encoding.ULAW) {
      ulaw = true;
      signed = true;
      alaw = false;
      hs = WAV_HEADER_SIZE2;
    }
  }
Esempio n. 5
0
 private static boolean checkConversion(
     AudioFormat srcFormat, AudioFormat.Encoding targetEncoding, boolean neg) {
   AudioInputStream srcStream =
       new AudioInputStream(new ByteArrayInputStream(new byte[0]), srcFormat, -1);
   boolean couldConvert = true;
   try {
     AudioInputStream targetStream = AudioSystem.getAudioInputStream(targetEncoding, srcStream);
     // always a failure if src bits != target bits, or src channels !=
     // target channels
     AudioFormat targetFormat = targetStream.getFormat();
     if (!isSameBitsChannelSampleRate(srcFormat, targetFormat)) {
       System.out.println("ERROR");
       System.out.println(
           "  converted stream has "
               + targetFormat.getChannels()
               + " channels, "
               + targetFormat.getSampleSizeInBits()
               + " bits, and "
               + targetFormat.getSampleRate()
               + "Hz, "
               + " but source stream had "
               + srcFormat.getChannels()
               + " channels, "
               + srcFormat.getSampleSizeInBits()
               + " bits, and "
               + srcFormat.getSampleRate()
               + "Hz");
       return false;
     }
   } catch (Exception e) {
     couldConvert = false;
   }
   if (couldConvert == neg) {
     System.out.println("ERROR");
     System.out.println(
         "  can"
             + ((!couldConvert) ? "not" : "")
             + " convert from "
             + srcFormat
             + " to "
             + targetEncoding);
     return false;
   }
   System.out.println("OK");
   return true;
 }
Esempio n. 6
0
 /**
  * Test method for {@link net.sourceforge.gjtapi.protocols.JavaSoundParser#parse(java.net.URL)} .
  *
  * @exception Exception test failed.
  */
 @Test
 public void testParse() throws Exception {
   final URL url = new URL("playback://audio?rate=8000&channels=2&encoding=pcm");
   AudioFormat format = JavaSoundParser.parse(url);
   Assert.assertEquals(new Float(8000.0), new Float(format.getSampleRate()));
   Assert.assertEquals(2, format.getChannels());
   Assert.assertEquals(AudioFormat.Encoding.PCM_SIGNED, format.getEncoding());
 }
Esempio n. 7
0
 public AudioRecordingStream getAudioRecordingStream(String filename, int bufferSize) {
   AudioRecordingStream mstream = null;
   if (getAudioInputStream(filename) == null)
     debug("Reading from " + getAudioInputStream(filename).getClass().toString());
   if (getAudioInputStream(filename) != null) {
     debug("File format is: " + getAudioInputStream(filename).getFormat().toString());
     AudioFormat format = getAudioInputStream(filename).getFormat();
     // special handling for mp3 files because
     // they need to be converted to PCM
     if (format instanceof MpegAudioFormat) {
       AudioFormat baseFormat = format;
       format =
           new AudioFormat(
               AudioFormat.Encoding.PCM_SIGNED,
               baseFormat.getSampleRate(),
               16,
               baseFormat.getChannels(),
               baseFormat.getChannels() * 2,
               baseFormat.getSampleRate(),
               false);
       // converts the stream to PCM audio from mp3 audio
       AudioInputStream decAis = getAudioInputStream(format, getAudioInputStream(filename));
       // source data line is for sending the file audio out to the
       // speakers
       SourceDataLine line = getSourceDataLine(format, bufferSize);
       if (decAis != null && line != null) {
         Map<String, Object> props = getID3Tags(filename);
         long lengthInMillis = -1;
         if (props.containsKey("duration")) {
           Long dur = (Long) props.get("duration");
           if (dur.longValue() > 0) {
             lengthInMillis = dur.longValue() / 1000;
           }
         }
         MP3MetaData meta = new MP3MetaData(filename, lengthInMillis, props);
         mstream =
             new JSMPEGAudioRecordingStream(
                 this, meta, getAudioInputStream(filename), decAis, line, bufferSize);
       }
     } // format instanceof MpegAudioFormat
     else {
       // source data line is for sending the file audio out to the
       // speakers
       SourceDataLine line = getSourceDataLine(format, bufferSize);
       if (line != null) {
         long length =
             AudioUtils.frames2Millis(getAudioInputStream(filename).getFrameLength(), format);
         BasicMetaData meta = new BasicMetaData(filename, length);
         mstream =
             new JSPCMAudioRecordingStream(
                 this, meta, getAudioInputStream(filename), line, bufferSize);
       }
     } // else
   } // ais != null
   return mstream;
 }
Esempio n. 8
0
 private static boolean checkDirect(AudioFormat srcFormat, boolean neg) {
   AudioFormat targetFormat =
       new AudioFormat(
           srcFormat.getSampleRate(),
           srcFormat.getSampleSizeInBits(),
           srcFormat.getChannels(),
           true,
           false);
   return checkConversion(srcFormat, targetFormat, neg);
 }
 /**
  * Store an AudioFormat
  *
  * @param audioFormat
  */
 public AudioFormatTransport(AudioFormat audioFormat) {
   _channels = audioFormat.getChannels();
   _encoding = audioFormat.getEncoding().toString();
   _frameRate = audioFormat.getFrameRate();
   _frameSize = audioFormat.getFrameSize();
   _sampleRate = audioFormat.getSampleRate();
   _sampleSizeInBits = audioFormat.getSampleSizeInBits();
   _isBigEndian = audioFormat.isBigEndian();
   _properties = audioFormat.properties();
 }
Esempio n. 10
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
Esempio n. 11
0
  public void open(AudioInputStream stream) throws IOException, LineUnavailableException {

    AudioInputStream is1;
    format = stream.getFormat();

    if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
      is1 = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, stream);
    } else {
      is1 = stream;
    }
    format = is1.getFormat();
    InputStream is2;
    if (parent != null) {
      ProgressMonitorInputStream pmis =
          new ProgressMonitorInputStream(parent, "Loading track..", is1);
      pmis.getProgressMonitor().setMillisToPopup(0);
      is2 = pmis;
    } else {
      is2 = is1;
    }

    byte[] buf = new byte[2 ^ 16];
    int totalRead = 0;
    int numRead = 0;
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    numRead = is2.read(buf);
    while (numRead > -1) {
      baos.write(buf, 0, numRead);
      numRead = is2.read(buf, 0, buf.length);
      totalRead += numRead;
    }
    is2.close();
    audioData = baos.toByteArray();
    AudioFormat afTemp;
    if (format.getChannels() < 2) {
      afTemp =
          new AudioFormat(
              format.getEncoding(),
              format.getSampleRate(),
              format.getSampleSizeInBits(),
              2,
              format.getSampleSizeInBits() * 2 / 8, // calculate
              // frame
              // size
              format.getFrameRate(),
              format.isBigEndian());
    } else {
      afTemp = format;
    }

    setLoopPoints(0, audioData.length);
    dataLine = AudioSystem.getSourceDataLine(afTemp);
    dataLine.open();
    inputStream = new ByteArrayInputStream(audioData);
  }
Esempio n. 12
0
  // http://stackoverflow.com/questions/13789063/get-sound-from-a-url-with-java
  private void playMP3(final String url) {

    try {

      // Create the JavaFX Panel for the WebView
      JFXPanel fxPanel = new JFXPanel();
      fxPanel.setLocation(new Point(0, 0));

      // Initialize the webView in a JavaFX-Thread
      Platform.runLater(
          new Runnable() {
            public void run() {
              MediaPlayer player = new MediaPlayer(new Media(url));
              player.play();
            }
          });

      if (true) return;

      AudioInputStream in = AudioSystem.getAudioInputStream(new URL(url));
      AudioFormat baseFormat = in.getFormat();
      AudioFormat decodedFormat =
          new AudioFormat(
              AudioFormat.Encoding.PCM_SIGNED,
              baseFormat.getSampleRate(),
              16,
              baseFormat.getChannels(),
              baseFormat.getChannels() * 2,
              baseFormat.getSampleRate(),
              false);
      AudioInputStream din = AudioSystem.getAudioInputStream(decodedFormat, in);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, decodedFormat);
      SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
      if (line != null) {
        line.open(decodedFormat);
        byte[] data = new byte[4096];
        // Start
        line.start();

        int nBytesRead;
        while ((nBytesRead = din.read(data, 0, data.length)) != -1) {
          line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
      }
    } catch (Exception e) {
      App.debug("playing MP3 failed " + url + " " + e.toString());
    }
  }
 private AudioInputStream toLittleEndian(AudioInputStream ais) {
   AudioFormat format = ais.getFormat();
   AudioFormat targetFormat =
       new AudioFormat(
           format.getEncoding(),
           format.getSampleRate(),
           format.getSampleSizeInBits(),
           format.getChannels(),
           format.getFrameSize(),
           format.getFrameRate(),
           false);
   return AudioSystem.getAudioInputStream(targetFormat, ais);
 }
 private void adjustConfigurations(AudioFormat format) {
   int sampleRate = (int) format.getSampleRate();
   int sampleSize = (int) format.getSampleSizeInBits();
   int channels = (int) format.getChannels();
   // int blockSize = sc.getMaxBlockSize();
   /*
    * sc = new StreamConfiguration(channels, blockSize, blockSize,
    * sampleRate, sampleSize);
    */
   sc.setSampleRate(sampleRate);
   sc.setBitsPerSample(sampleSize);
   sc.setChannelCount(channels);
 }
Esempio n. 15
0
 /**
  * Inits a DateLine.<br>
  * We check if the line supports Gain and Pan controls.
  *
  * <p>From the AudioInputStream, i.e. from the sound file, we fetch information about the format
  * of the audio data. These information include the sampling frequency, the number of channels and
  * the size of the samples. There information are needed to ask JavaSound for a suitable output
  * line for this audio file. Furthermore, we have to give JavaSound a hint about how big the
  * internal buffer for the line should be. Here, we say AudioSystem.NOT_SPECIFIED, signaling that
  * we don't care about the exact size. JavaSound will use some default value for the buffer size.
  */
 protected void createLine() throws LineUnavailableException {
   log.info("Create Line");
   if (m_line == null) {
     AudioFormat sourceFormat = m_audioInputStream.getFormat();
     log.info("Create Line : Source format : " + sourceFormat.toString());
     int nSampleSizeInBits = sourceFormat.getSampleSizeInBits();
     if (nSampleSizeInBits <= 0) {
       nSampleSizeInBits = 16;
     }
     if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW)
         || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) {
       nSampleSizeInBits = 16;
     }
     if (nSampleSizeInBits != 8) {
       nSampleSizeInBits = 16;
     }
     AudioFormat targetFormat =
         new AudioFormat(
             AudioFormat.Encoding.PCM_SIGNED,
             sourceFormat.getSampleRate(),
             nSampleSizeInBits,
             sourceFormat.getChannels(),
             sourceFormat.getChannels() * (nSampleSizeInBits / 8),
             sourceFormat.getSampleRate(),
             false);
     log.info("Create Line : Target format: " + targetFormat);
     // Keep a reference on encoded stream to progress notification.
     m_encodedaudioInputStream = m_audioInputStream;
     try {
       // Get total length in bytes of the encoded stream.
       encodedLength = m_encodedaudioInputStream.available();
     } catch (IOException e) {
       log.log(Level.SEVERE, "Cannot get m_encodedaudioInputStream.available()", e);
     }
     // Create decoded stream.
     m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
     AudioFormat audioFormat = m_audioInputStream.getFormat();
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, -1);
     Mixer mixer = getMixer(m_mixerName);
     if (mixer != null) {
       log.info("Mixer : " + mixer.getMixerInfo().toString());
       m_line = (SourceDataLine) mixer.getLine(info);
     } else {
       m_line = (SourceDataLine) AudioSystem.getLine(info);
       m_mixerName = null;
     }
     log.info("Line : " + m_line.toString());
     log.info("Line Info : " + m_line.getLineInfo().toString());
     log.info("Line AudioFormat: " + m_line.getFormat().toString());
   }
 }
Esempio n. 16
0
 /**
  * Numbers used here are verbatim from Javazoom
  *
  * @param baseFormat
  * @return
  */
 private AudioFormat getDecodedFormat(AudioFormat baseFormat) {
   // Do we need to "decode" the base format?
   if (AudioFormat.Encoding.PCM_SIGNED.equals(baseFormat.getEncoding())
       || AudioFormat.Encoding.PCM_UNSIGNED.equals(baseFormat.getEncoding())) {
     return baseFormat;
   }
   return new AudioFormat(
       AudioFormat.Encoding.PCM_SIGNED,
       baseFormat.getSampleRate(),
       16,
       baseFormat.getChannels(),
       baseFormat.getChannels() * 2,
       baseFormat.getSampleRate(),
       false);
 }
Esempio n. 17
0
 public AudioRecording getAudioRecordingClip(String filename) {
   Clip clip = null;
   AudioMetaData meta = null;
   AudioInputStream ais = getAudioInputStream(filename);
   if (ais != null) {
     AudioFormat format = ais.getFormat();
     if (format instanceof MpegAudioFormat) {
       AudioFormat baseFormat = format;
       format =
           new AudioFormat(
               AudioFormat.Encoding.PCM_SIGNED,
               baseFormat.getSampleRate(),
               16,
               baseFormat.getChannels(),
               baseFormat.getChannels() * 2,
               baseFormat.getSampleRate(),
               false);
       // converts the stream to PCM audio from mp3 audio
       ais = getAudioInputStream(format, ais);
     }
     DataLine.Info info = new DataLine.Info(Clip.class, ais.getFormat());
     if (AudioSystem.isLineSupported(info)) {
       // Obtain and open the line.
       try {
         clip = (Clip) AudioSystem.getLine(info);
         clip.open(ais);
       } catch (Exception e) {
         error("Error obtaining Javasound Clip: " + e.getMessage());
         return null;
       }
       Map<String, Object> props = getID3Tags(filename);
       long lengthInMillis = -1;
       if (props.containsKey("duration")) {
         Long dur = (Long) props.get("duration");
         lengthInMillis = dur.longValue() / 1000;
       }
       meta = new MP3MetaData(filename, lengthInMillis, props);
     } else {
       error("File format not supported.");
       return null;
     }
   }
   if (meta == null) {
     // this means we're dealing with not-an-mp3
     meta = new BasicMetaData(filename, clip.getMicrosecondLength() / 1000);
   }
   return new JSAudioRecordingClip(clip, meta);
 }
Esempio n. 18
0
  public boolean load(File file) {

    this.file = file;

    if (file != null && file.isFile()) {
      try {
        errStr = null;
        audioInputStream = AudioSystem.getAudioInputStream(file);

        fileName = file.getName();

        format = audioInputStream.getFormat();

      } catch (Exception ex) {
        reportStatus(ex.toString());
        return false;
      }
    } else {
      reportStatus("Audio file required.");
      return false;
    }

    numChannels = format.getChannels();
    sampleRate = (double) format.getSampleRate();
    sampleBitSize = format.getSampleSizeInBits();
    long frameLength = audioInputStream.getFrameLength();
    long milliseconds = (long) ((frameLength * 1000) / audioInputStream.getFormat().getFrameRate());
    double audioFileDuration = milliseconds / 1000.0;

    if (audioFileDuration > MAX_AUDIO_DURATION) duration = MAX_AUDIO_DURATION;
    else duration = audioFileDuration;

    frameLength = (int) Math.floor((duration / audioFileDuration) * (double) frameLength);

    try {
      audioBytes = new byte[(int) frameLength * format.getFrameSize()];
      audioInputStream.read(audioBytes);
    } catch (Exception ex) {
      reportStatus(ex.toString());
      return false;
    }

    getAudioData();

    return true;
  }
  public void write(AudioInputStream stream, RIFFWriter writer) throws IOException {

    RIFFWriter fmt_chunk = writer.writeChunk("fmt ");

    AudioFormat format = stream.getFormat();
    fmt_chunk.writeUnsignedShort(3); // WAVE_FORMAT_IEEE_FLOAT
    fmt_chunk.writeUnsignedShort(format.getChannels());
    fmt_chunk.writeUnsignedInt((int) format.getSampleRate());
    fmt_chunk.writeUnsignedInt(((int) format.getFrameRate()) * format.getFrameSize());
    fmt_chunk.writeUnsignedShort(format.getFrameSize());
    fmt_chunk.writeUnsignedShort(format.getSampleSizeInBits());
    fmt_chunk.close();
    RIFFWriter data_chunk = writer.writeChunk("data");
    byte[] buff = new byte[1024];
    int len;
    while ((len = stream.read(buff, 0, buff.length)) != -1) data_chunk.write(buff, 0, len);
    data_chunk.close();
  }
Esempio n. 20
0
  private static WAVData readFromStream(AudioInputStream aIn)
      throws UnsupportedAudioFileException, IOException {
    ReadableByteChannel aChannel = Channels.newChannel(aIn);
    AudioFormat fmt = aIn.getFormat();
    int numChannels = fmt.getChannels();
    int bits = fmt.getSampleSizeInBits();
    int format = AL_FORMAT_MONO8;

    if ((bits == 8) && (numChannels == 1)) {
      format = AL_FORMAT_MONO8;
    } else if ((bits == 16) && (numChannels == 1)) {
      format = AL_FORMAT_MONO16;
    } else if ((bits == 8) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO8;
    } else if ((bits == 16) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO16;
    }

    int freq = Math.round(fmt.getSampleRate());
    int size = aIn.available();
    ByteBuffer buffer = ByteBuffer.allocateDirect(size);
    while (buffer.remaining() > 0) {
      aChannel.read(buffer);
    }
    buffer.rewind();

    // Must byte swap on big endian platforms
    // Thanks to swpalmer on javagaming.org forums for hint at fix
    if ((bits == 16) && (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN)) {
      int len = buffer.remaining();
      for (int i = 0; i < len; i += 2) {
        byte a = buffer.get(i);
        byte b = buffer.get(i + 1);
        buffer.put(i, b);
        buffer.put(i + 1, a);
      }
    }

    WAVData result = new WAVData(buffer, format, size, freq, false);
    aIn.close();

    return result;
  }
Esempio n. 21
0
 public AudioRecording getAudioRecording(String filename) {
   AudioMetaData meta = null;
   AudioInputStream ais = getAudioInputStream(filename);
   byte[] samples;
   if (ais != null) {
     AudioFormat format = ais.getFormat();
     if (format instanceof MpegAudioFormat) {
       AudioFormat baseFormat = format;
       format =
           new AudioFormat(
               AudioFormat.Encoding.PCM_SIGNED,
               baseFormat.getSampleRate(),
               16,
               baseFormat.getChannels(),
               baseFormat.getChannels() * 2,
               baseFormat.getSampleRate(),
               false);
       // converts the stream to PCM audio from mp3 audio
       ais = getAudioInputStream(format, ais);
       //	 get a map of properties so we can find out how long it is
       Map<String, Object> props = getID3Tags(filename);
       // there is a property called mp3.length.bytes, but that is
       // the length in bytes of the mp3 file, which will of course
       // be much shorter than the decoded version. so we use the
       // duration of the file to figure out how many bytes the
       // decoded file will be.
       long dur = ((Long) props.get("duration")).longValue();
       int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
       samples = loadByteAudio(ais, toRead);
       meta = new MP3MetaData(filename, dur / 1000, props);
     } else {
       samples = loadByteAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
       long length = AudioUtils.bytes2Millis(samples.length, format);
       meta = new BasicMetaData(filename, length);
     }
     SourceDataLine line = getSourceDataLine(format, 2048);
     if (line != null) {
       return new JSAudioRecording(this, samples, line, meta);
     }
   }
   return null;
 }
  /** Convert javax.sound.sampled.AudioFormat to javax.media.format.AudioFormat. */
  public static AudioFormat convertFormat(javax.sound.sampled.AudioFormat format) {

    Encoding encoding = format.getEncoding();
    int channels = format.getChannels();
    float frameRate = format.getFrameRate();
    int frameSize = format.getFrameSize() < 0 ? format.getFrameSize() : (format.getFrameSize() * 8);
    float sampleRate = format.getSampleRate();
    int sampleSize = format.getSampleSizeInBits();

    int endian = format.isBigEndian() ? AudioFormat.BIG_ENDIAN : AudioFormat.LITTLE_ENDIAN;

    int signed = AudioFormat.NOT_SPECIFIED;
    String encodingString = AudioFormat.LINEAR;

    if (encoding == Encoding.PCM_SIGNED) {
      signed = AudioFormat.SIGNED;
      encodingString = AudioFormat.LINEAR;
    } else if (encoding == Encoding.PCM_UNSIGNED) {
      signed = AudioFormat.UNSIGNED;
      encodingString = AudioFormat.LINEAR;
    } else if (encoding == Encoding.ALAW) {
      encodingString = AudioFormat.ALAW;
    } else if (encoding == Encoding.ULAW) {
      encodingString = AudioFormat.ULAW;
    } else {
      encodingString = encoding.toString();
    }

    AudioFormat jmfFormat =
        new AudioFormat(
            encodingString,
            (double) sampleRate,
            sampleSize,
            channels,
            endian,
            signed,
            frameSize,
            frameRate,
            AudioFormat.byteArray);

    return jmfFormat;
  }
  /**
   * Private method to load audio file
   *
   * @throws LineUnavailableException
   * @throws LineUnavailableException
   * @throws IOException
   * @throws UnsupportedAudioFileException
   */
  private void load() throws LineUnavailableException, UnsupportedAudioFileException, IOException {
    // From URL
    audioStream = AudioSystem.getAudioInputStream(audioURL);

    // At present, ALAW and ULAW encodings must be converted
    // to PCM_SIGNED before it can be played
    AudioFormat audioFormat = audioStream.getFormat();

    DataLine.Info info =
        new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);

    if (!AudioSystem.isLineSupported(info)) {
      AudioFormat sourceFormat = audioFormat;
      AudioFormat targetFormat =
          new AudioFormat(
              AudioFormat.Encoding.PCM_SIGNED,
              sourceFormat.getSampleRate(),
              16,
              sourceFormat.getChannels(),
              sourceFormat.getChannels() * (16 / 8),
              sourceFormat.getSampleRate(),
              false);
      audioStream = AudioSystem.getAudioInputStream(targetFormat, audioStream);
      audioFormat = audioStream.getFormat();
    }

    // Create the clip
    info =
        new DataLine.Info(
            Clip.class,
            audioFormat,
            ((int) audioStream.getFrameLength() * audioFormat.getFrameSize()));
    audioClip = (Clip) AudioSystem.getLine(info);

    // Add a listener for line events
    audioClip.addLineListener(this);

    // This method does not return until the audio file is completely
    // loaded
    audioClip.open(audioStream);
  }
  private void initializeOggStream() throws IOException {
    vorbis_info vorbisInfo = new vorbis_info();
    vorbisenc encoder = new vorbisenc();

    if (!encoder.vorbis_encode_init_vbr(
        vorbisInfo,
        audioFormat.getChannels(),
        (int) audioFormat.getSampleRate(),
        HIGH_QUALITY_256_KB)) {
      throw new IOException("Failed to initialize Vorbis encoder.");
    }

    vorbis_comment vorbisComment = new vorbis_comment();
    vorbisComment.vorbis_comment_add_tag(ENCODER_TAG_NAME, ENCODER_TAG_CONTENT);

    vorbisDspState = new vorbis_dsp_state();

    if (!vorbisDspState.vorbis_analysis_init(vorbisInfo)) {
      throw new IOException("Failed to initialize Vorbis DSP state.");
    }

    vorbisBlock = new vorbis_block(vorbisDspState);

    java.util.Random generator = new java.util.Random(); // need to randomize seed
    oggStreamState = new ogg_stream_state(generator.nextInt(256));

    ogg_packet header = new ogg_packet();
    ogg_packet header_comm = new ogg_packet();
    ogg_packet header_code = new ogg_packet();

    vorbisDspState.vorbis_analysis_headerout(vorbisComment, header, header_comm, header_code);

    oggStreamState.ogg_stream_packetin(header); // automatically placed in its own page
    oggStreamState.ogg_stream_packetin(header_comm);
    oggStreamState.ogg_stream_packetin(header_code);
  }
Esempio n. 25
0
 private static Clip loadFile(String path) {
   try {
     path = ClassLoader.getSystemResource(path).getPath();
     path = path.substring(1, path.length()).replaceAll("%20", " ");
     InputStream in = new FileInputStream(path);
     InputStream bin = new BufferedInputStream(in);
     AudioInputStream ais = AudioSystem.getAudioInputStream(bin);
     AudioFormat baseFormat = ais.getFormat();
     AudioFormat decodeFormat =
         new AudioFormat(
             AudioFormat.Encoding.PCM_SIGNED,
             baseFormat.getSampleRate(),
             16,
             baseFormat.getChannels(),
             baseFormat.getChannels() * 2,
             baseFormat.getSampleRate(),
             false);
     AudioInputStream dais = AudioSystem.getAudioInputStream(decodeFormat, ais);
     return AudioSystem.getClip();
   } catch (Exception ex) {
     System.out.println(ex);
   }
   return null;
 }
Esempio n. 26
0
 public int millisecondsToBytes(AudioFormat fmt, int time) {
   return (int)
       (time * (fmt.getSampleRate() * fmt.getChannels() * fmt.getSampleSizeInBits()) / 8000.0);
 }
Esempio n. 27
0
 public long getLongFramePosition() {
   return dataLine.getLongFramePosition() * 2 / format.getChannels();
 }
Esempio n. 28
0
  private InputStream getFileStream(WaveFileFormat waveFileFormat, InputStream audioStream)
      throws IOException {
    // private method ... assumes audioFileFormat is a supported file type

    // WAVE header fields
    AudioFormat audioFormat = waveFileFormat.getFormat();
    int headerLength = waveFileFormat.getHeaderSize();
    int riffMagic = WaveFileFormat.RIFF_MAGIC;
    int waveMagic = WaveFileFormat.WAVE_MAGIC;
    int fmtMagic = WaveFileFormat.FMT_MAGIC;
    int fmtLength = WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType());
    short wav_type = (short) waveFileFormat.getWaveType();
    short channels = (short) audioFormat.getChannels();
    short sampleSizeInBits = (short) audioFormat.getSampleSizeInBits();
    int sampleRate = (int) audioFormat.getSampleRate();
    int frameSizeInBytes = audioFormat.getFrameSize();
    int frameRate = (int) audioFormat.getFrameRate();
    int avgBytesPerSec = channels * sampleSizeInBits * sampleRate / 8;
    short blockAlign = (short) ((sampleSizeInBits / 8) * channels);
    int dataMagic = WaveFileFormat.DATA_MAGIC;
    int dataLength = waveFileFormat.getFrameLength() * frameSizeInBytes;
    int length = waveFileFormat.getByteLength();
    int riffLength = dataLength + headerLength - 8;

    byte header[] = null;
    ByteArrayInputStream headerStream = null;
    ByteArrayOutputStream baos = null;
    DataOutputStream dos = null;
    SequenceInputStream waveStream = null;

    AudioFormat audioStreamFormat = null;
    AudioFormat.Encoding encoding = null;
    InputStream codedAudioStream = audioStream;

    // if audioStream is an AudioInputStream and we need to convert, do it here...
    if (audioStream instanceof AudioInputStream) {
      audioStreamFormat = ((AudioInputStream) audioStream).getFormat();

      encoding = audioStreamFormat.getEncoding();

      if (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) {
        if (sampleSizeInBits == 8) {
          wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
          // plug in the transcoder to convert from PCM_SIGNED to PCM_UNSIGNED
          codedAudioStream =
              AudioSystem.getAudioInputStream(
                  new AudioFormat(
                      AudioFormat.Encoding.PCM_UNSIGNED,
                      audioStreamFormat.getSampleRate(),
                      audioStreamFormat.getSampleSizeInBits(),
                      audioStreamFormat.getChannels(),
                      audioStreamFormat.getFrameSize(),
                      audioStreamFormat.getFrameRate(),
                      false),
                  (AudioInputStream) audioStream);
        }
      }
      if ((AudioFormat.Encoding.PCM_SIGNED.equals(encoding) && audioStreamFormat.isBigEndian())
          || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)
              && !audioStreamFormat.isBigEndian())
          || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)
              && audioStreamFormat.isBigEndian())) {
        if (sampleSizeInBits != 8) {
          wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
          // plug in the transcoder to convert to PCM_SIGNED_LITTLE_ENDIAN
          codedAudioStream =
              AudioSystem.getAudioInputStream(
                  new AudioFormat(
                      AudioFormat.Encoding.PCM_SIGNED,
                      audioStreamFormat.getSampleRate(),
                      audioStreamFormat.getSampleSizeInBits(),
                      audioStreamFormat.getChannels(),
                      audioStreamFormat.getFrameSize(),
                      audioStreamFormat.getFrameRate(),
                      false),
                  (AudioInputStream) audioStream);
        }
      }
    }

    // Now push the header into a stream, concat, and return the new SequenceInputStream

    baos = new ByteArrayOutputStream();
    dos = new DataOutputStream(baos);

    // we write in littleendian...
    dos.writeInt(riffMagic);
    dos.writeInt(big2little(riffLength));
    dos.writeInt(waveMagic);
    dos.writeInt(fmtMagic);
    dos.writeInt(big2little(fmtLength));
    dos.writeShort(big2littleShort(wav_type));
    dos.writeShort(big2littleShort(channels));
    dos.writeInt(big2little(sampleRate));
    dos.writeInt(big2little(avgBytesPerSec));
    dos.writeShort(big2littleShort(blockAlign));
    dos.writeShort(big2littleShort(sampleSizeInBits));
    // $$fb 2002-04-16: Fix for 4636355: RIFF audio headers could be _more_ spec compliant
    if (wav_type != WaveFileFormat.WAVE_FORMAT_PCM) {
      // add length 0 for "codec specific data length"
      dos.writeShort(0);
    }

    dos.writeInt(dataMagic);
    dos.writeInt(big2little(dataLength));

    dos.close();
    header = baos.toByteArray();
    headerStream = new ByteArrayInputStream(header);
    waveStream = new SequenceInputStream(headerStream, new NoCloseInputStream(codedAudioStream));

    return waveStream;
  }
Esempio n. 29
0
 /**
  * Inits AudioInputStream and AudioFileFormat from the data source.
  *
  * @throws BasicPlayerException
  */
 protected void initAudioInputStream() throws BasicPlayerException {
   try {
     reset();
     notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
     if (m_dataSource instanceof URL) {
       initAudioInputStream((URL) m_dataSource);
     } else if (m_dataSource instanceof File) {
       initAudioInputStream((File) m_dataSource);
     } else if (m_dataSource instanceof InputStream) {
       initAudioInputStream((InputStream) m_dataSource);
     }
     createLine();
     // Notify listeners with AudioFileFormat properties.
     Map properties = null;
     if (m_audioFileFormat instanceof TAudioFileFormat) {
       // Tritonus SPI compliant audio file format.
       properties = ((TAudioFileFormat) m_audioFileFormat).properties();
       // Clone the Map because it is not mutable.
       properties = deepCopy(properties);
     } else {
       properties = new HashMap();
     }
     // Add JavaSound properties.
     if (m_audioFileFormat.getByteLength() > 0) {
       properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
     }
     if (m_audioFileFormat.getFrameLength() > 0) {
       properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
     }
     if (m_audioFileFormat.getType() != null) {
       properties.put("audio.type", (m_audioFileFormat.getType().toString()));
     }
     // Audio format.
     AudioFormat audioFormat = m_audioFileFormat.getFormat();
     if (audioFormat.getFrameRate() > 0) {
       properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
     }
     if (audioFormat.getFrameSize() > 0) {
       properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
     }
     if (audioFormat.getSampleRate() > 0) {
       properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
     }
     if (audioFormat.getSampleSizeInBits() > 0) {
       properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
     }
     if (audioFormat.getChannels() > 0) {
       properties.put("audio.channels", new Integer(audioFormat.getChannels()));
     }
     if (audioFormat instanceof TAudioFormat) {
       // Tritonus SPI compliant audio format.
       Map addproperties = ((TAudioFormat) audioFormat).properties();
       properties.putAll(addproperties);
     }
     // Add SourceDataLine
     properties.put("basicplayer.sourcedataline", m_line);
     Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator();
     while (it.hasNext()) {
       BasicPlayerListener bpl = it.next();
       bpl.opened(m_dataSource, properties);
     }
     m_status = OPENED;
     notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
   } catch (LineUnavailableException e) {
     throw new BasicPlayerException(e);
   } catch (UnsupportedAudioFileException e) {
     throw new BasicPlayerException(e);
   } catch (IOException e) {
     throw new BasicPlayerException(e);
   }
 }
Esempio n. 30
0
  public static final ALoad create(File f) throws AudioException {
    try {
      // stream
      AudioInputStream ais = AudioSystem.getAudioInputStream(f);

      // make difference between encoded and non-encoded streams, because mp3
      // doesn't give streamlength information :-(
      boolean isEncoded =
          !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_UNSIGNED)
              && !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)
              && !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.ALAW)
              && !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.ULAW);

      AudioFormat.Encoding targetEncoding = AudioFormat.Encoding.PCM_SIGNED;
      if (isEncoded) {
        ais = AudioSystem.getAudioInputStream(targetEncoding, ais); // this is required for mp3
      }
      AudioFormat af = ais.getFormat();

      // count size...
      int sl = 0;

      if (isEncoded)
      // if (false)
      {
        int st = 0;
        byte a[] = new byte[4096];

        try {
          while ((st = ais.read(a, 0, a.length - 1)) >= 0) {
            sl += st;
          }
          sl /= ais.getFormat().getChannels() * ais.getFormat().getSampleSizeInBits() / 8;
        } catch (IOException ioe) {
          ioe.printStackTrace();
        }
      } else {
        sl =
            (int)
                (ais.getFrameLength()
                    * af.getFrameSize()
                    / af.getChannels()
                    / (af.getSampleSizeInBits() >> 3));
      }

      // stream for data loading
      ais = AudioSystem.getAudioInputStream(f);
      targetEncoding = AudioFormat.Encoding.PCM_SIGNED;
      if (isEncoded) {
        ais = AudioSystem.getAudioInputStream(targetEncoding, ais); // this is required for mp3
      }

      af = ais.getFormat();
      Debug.println(3, "audioformat = " + af.toString());

      // search the correct loader...
      for (int i = 0; i < classList.size(); i++) {
        ALoad l = classList.get(i);
        if (l.supports(af)) {
          l = l.duplicate();
          l.setAudioInputStream(ais, sl);
          l.setFile(f);
          return l;
        }
      }

      Debug.println(3, "unsupported audioformat = " + af.toString());
      throw new AudioException("unsupportedAudioFormat");
    } catch (UnsupportedAudioFileException uafe) {
      Debug.printStackTrace(5, uafe);
      throw new AudioException("unsupportedAudioFormat");
    } catch (IOException ioe) {
      Debug.printStackTrace(5, ioe);
      throw new AudioException("unsupportedAudioFormat");
    }
  }