Exemple #1
0
    UlawCodecStream(AudioInputStream stream, AudioFormat outputFormat) {
      super(stream, outputFormat, AudioSystem.NOT_SPECIFIED);

      AudioFormat inputFormat = stream.getFormat();

      // throw an IllegalArgumentException if not ok
      if (!(isConversionSupported(outputFormat, inputFormat))) {
        throw new IllegalArgumentException(
            "Unsupported conversion: " + inputFormat.toString() + " to " + outputFormat.toString());
      }

      // $$fb 2002-07-18: fix for 4714846: JavaSound ULAW (8-bit) encoder erroneously depends on
      // endian-ness
      boolean PCMIsBigEndian;

      // determine whether we are encoding or decoding
      if (AudioFormat.Encoding.ULAW.equals(inputFormat.getEncoding())) {
        encode = false;
        encodeFormat = inputFormat;
        decodeFormat = outputFormat;
        PCMIsBigEndian = outputFormat.isBigEndian();
      } else {
        encode = true;
        encodeFormat = outputFormat;
        decodeFormat = inputFormat;
        PCMIsBigEndian = inputFormat.isBigEndian();
        tempBuffer = new byte[tempBufferSize];
      }

      // setup tables according to byte order
      if (PCMIsBigEndian) {
        tabByte1 = ULAW_TABH;
        tabByte2 = ULAW_TABL;
        highByte = 0;
        lowByte = 1;
      } else {
        tabByte1 = ULAW_TABL;
        tabByte2 = ULAW_TABH;
        highByte = 1;
        lowByte = 0;
      }

      // set the AudioInputStream length in frames if we know it
      if (stream instanceof AudioInputStream) {
        frameLength = ((AudioInputStream) stream).getFrameLength();
      }
      // set framePos to zero
      framePos = 0;
      frameSize = inputFormat.getFrameSize();
      if (frameSize == AudioSystem.NOT_SPECIFIED) {
        frameSize = 1;
      }
    }
Exemple #2
0
 TargetDataLine getTargetDataLine(AudioFormat format, int bufferSize) {
   TargetDataLine line = null;
   DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (inputMixer == null) {
         line = (TargetDataLine) AudioSystem.getLine(info);
       } else {
         line = (TargetDataLine) inputMixer.getLine(info);
       }
       line.open(format, bufferSize * format.getFrameSize());
       debug(
           "TargetDataLine buffer size is "
               + line.getBufferSize()
               + "\n"
               + "TargetDataLine format is "
               + line.getFormat().toString()
               + "\n"
               + "TargetDataLine info is "
               + line.getLineInfo().toString());
     } catch (Exception e) {
       error("Error acquiring TargetDataLine: " + e.getMessage());
     }
   } else {
     error("Unable to return a TargetDataLine: unsupported format - " + format.toString());
   }
   return line;
 }
Exemple #3
0
 SourceDataLine getSourceDataLine(AudioFormat format, int bufferSize) {
   SourceDataLine line = null;
   DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (outputMixer == null) {
         line = (SourceDataLine) AudioSystem.getLine(info);
       } else {
         line = (SourceDataLine) outputMixer.getLine(info);
       }
       // remember that time you spent, like, an entire afternoon fussing
       // with this buffer size to try to get the latency decent on Linux?
       // Yah, don't fuss with this anymore, ok?
       line.open(format, bufferSize * format.getFrameSize() * 4);
       if (line.isOpen()) {
         debug(
             "SourceDataLine is "
                 + line.getClass().toString()
                 + "\n"
                 + "Buffer size is "
                 + line.getBufferSize()
                 + " bytes.\n"
                 + "Format is "
                 + line.getFormat().toString()
                 + ".");
         return line;
       }
     } catch (LineUnavailableException e) {
       error("Couldn't open the line: " + e.getMessage());
     }
   }
   error("Unable to return a SourceDataLine: unsupported format - " + format.toString());
   return line;
 }
  /**
   * Sets the InputStream from which this StreamDataSource reads.
   *
   * @param inputStream the InputStream from which audio data comes
   * @param streamName the name of the InputStream
   */
  public void setInputStream(AudioInputStream inputStream, String streamName) {
    dataStream = inputStream;
    streamEndReached = false;
    utteranceEndSent = false;
    utteranceStarted = false;

    AudioFormat format = inputStream.getFormat();
    sampleRate = (int) format.getSampleRate();
    bigEndian = format.isBigEndian();

    String s = format.toString();
    logger.finer("input format is " + s);

    if (format.getSampleSizeInBits() % 8 != 0)
      throw new Error("StreamDataSource: bits per sample must be a multiple of 8.");
    bytesPerValue = format.getSampleSizeInBits() / 8;

    // test whether all files in the stream have the same format

    AudioFormat.Encoding encoding = format.getEncoding();
    if (encoding.equals(AudioFormat.Encoding.PCM_SIGNED)) signedData = true;
    else if (encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED)) signedData = false;
    else throw new RuntimeException("used file encoding is not supported");

    totalValuesRead = 0;
  }
 /**
  * Inits a DateLine.<br>
  * We check if the line supports Gain and Pan controls.
  *
  * <p>From the AudioInputStream, i.e. from the sound file, we fetch information about the format
  * of the audio data. These information include the sampling frequency, the number of channels and
  * the size of the samples. There information are needed to ask JavaSound for a suitable output
  * line for this audio file. Furthermore, we have to give JavaSound a hint about how big the
  * internal buffer for the line should be. Here, we say AudioSystem.NOT_SPECIFIED, signaling that
  * we don't care about the exact size. JavaSound will use some default value for the buffer size.
  */
 protected void createLine() throws LineUnavailableException {
   log.info("Create Line");
   if (m_line == null) {
     AudioFormat sourceFormat = m_audioInputStream.getFormat();
     log.info("Create Line : Source format : " + sourceFormat.toString());
     int nSampleSizeInBits = sourceFormat.getSampleSizeInBits();
     if (nSampleSizeInBits <= 0) {
       nSampleSizeInBits = 16;
     }
     if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW)
         || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) {
       nSampleSizeInBits = 16;
     }
     if (nSampleSizeInBits != 8) {
       nSampleSizeInBits = 16;
     }
     AudioFormat targetFormat =
         new AudioFormat(
             AudioFormat.Encoding.PCM_SIGNED,
             sourceFormat.getSampleRate(),
             nSampleSizeInBits,
             sourceFormat.getChannels(),
             sourceFormat.getChannels() * (nSampleSizeInBits / 8),
             sourceFormat.getSampleRate(),
             false);
     log.info("Create Line : Target format: " + targetFormat);
     // Keep a reference on encoded stream to progress notification.
     m_encodedaudioInputStream = m_audioInputStream;
     try {
       // Get total length in bytes of the encoded stream.
       encodedLength = m_encodedaudioInputStream.available();
     } catch (IOException e) {
       log.log(Level.SEVERE, "Cannot get m_encodedaudioInputStream.available()", e);
     }
     // Create decoded stream.
     m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
     AudioFormat audioFormat = m_audioInputStream.getFormat();
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, -1);
     Mixer mixer = getMixer(m_mixerName);
     if (mixer != null) {
       log.info("Mixer : " + mixer.getMixerInfo().toString());
       m_line = (SourceDataLine) mixer.getLine(info);
     } else {
       m_line = (SourceDataLine) AudioSystem.getLine(info);
       m_mixerName = null;
     }
     log.info("Line : " + m_line.toString());
     log.info("Line Info : " + m_line.getLineInfo().toString());
     log.info("Line AudioFormat: " + m_line.getFormat().toString());
   }
 }
  public static final ALoad create(File f) throws AudioException {
    try {
      // stream
      AudioInputStream ais = AudioSystem.getAudioInputStream(f);

      // make difference between encoded and non-encoded streams, because mp3
      // doesn't give streamlength information :-(
      boolean isEncoded =
          !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_UNSIGNED)
              && !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)
              && !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.ALAW)
              && !ais.getFormat().getEncoding().equals(AudioFormat.Encoding.ULAW);

      AudioFormat.Encoding targetEncoding = AudioFormat.Encoding.PCM_SIGNED;
      if (isEncoded) {
        ais = AudioSystem.getAudioInputStream(targetEncoding, ais); // this is required for mp3
      }
      AudioFormat af = ais.getFormat();

      // count size...
      int sl = 0;

      if (isEncoded)
      // if (false)
      {
        int st = 0;
        byte a[] = new byte[4096];

        try {
          while ((st = ais.read(a, 0, a.length - 1)) >= 0) {
            sl += st;
          }
          sl /= ais.getFormat().getChannels() * ais.getFormat().getSampleSizeInBits() / 8;
        } catch (IOException ioe) {
          ioe.printStackTrace();
        }
      } else {
        sl =
            (int)
                (ais.getFrameLength()
                    * af.getFrameSize()
                    / af.getChannels()
                    / (af.getSampleSizeInBits() >> 3));
      }

      // stream for data loading
      ais = AudioSystem.getAudioInputStream(f);
      targetEncoding = AudioFormat.Encoding.PCM_SIGNED;
      if (isEncoded) {
        ais = AudioSystem.getAudioInputStream(targetEncoding, ais); // this is required for mp3
      }

      af = ais.getFormat();
      Debug.println(3, "audioformat = " + af.toString());

      // search the correct loader...
      for (int i = 0; i < classList.size(); i++) {
        ALoad l = classList.get(i);
        if (l.supports(af)) {
          l = l.duplicate();
          l.setAudioInputStream(ais, sl);
          l.setFile(f);
          return l;
        }
      }

      Debug.println(3, "unsupported audioformat = " + af.toString());
      throw new AudioException("unsupportedAudioFormat");
    } catch (UnsupportedAudioFileException uafe) {
      Debug.printStackTrace(5, uafe);
      throw new AudioException("unsupportedAudioFormat");
    } catch (IOException ioe) {
      Debug.printStackTrace(5, ioe);
      throw new AudioException("unsupportedAudioFormat");
    }
  }