/** * Construct a RawAudioFormat from an AudioFormat, assuming a WAV header of size WAV_HEADER_SIZE * (44) bytes. * * @param af AudioFormat (e.g. from AudioSystem.getAudioFileFormat(File). */ public RawAudioFormat(AudioFormat af) throws IOException { sr = (int) af.getFrameRate(); br = af.getSampleSizeInBits(); fs = br / 8; if (af.getChannels() > 1) throw new IOException("multi-channel files are not supported"); if (af.getEncoding() == AudioFormat.Encoding.PCM_SIGNED) { signed = true; alaw = false; ulaw = false; hs = WAV_HEADER_SIZE; } if (af.getEncoding() == AudioFormat.Encoding.PCM_UNSIGNED) { signed = false; alaw = false; ulaw = false; hs = WAV_HEADER_SIZE; } if (af.getEncoding() == AudioFormat.Encoding.ALAW) { alaw = true; signed = true; ulaw = false; hs = WAV_HEADER_SIZE2; } if (af.getEncoding() == AudioFormat.Encoding.ULAW) { ulaw = true; signed = true; alaw = false; hs = WAV_HEADER_SIZE2; } }
public static Clip loadClip(URL url) { Clip clip = null; String fnm = "" + url; try { AudioInputStream stream = AudioSystem.getAudioInputStream(url); AudioFormat format = stream.getFormat(); if ((format.getEncoding() == AudioFormat.Encoding.ULAW) || (format.getEncoding() == AudioFormat.Encoding.ALAW)) { AudioFormat newFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(), format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2, format.getFrameRate(), true); // big endian stream = AudioSystem.getAudioInputStream(newFormat, stream); // System.out.println("Converted Audio format: " + newFormat); format = newFormat; } DataLine.Info info = new DataLine.Info(Clip.class, format); // make sure sound system supports data line if (!AudioSystem.isLineSupported(info)) { System.out.println("Unsupported Clip File: " + fnm); return null; } // get clip line resource clip = (Clip) AudioSystem.getLine(info); clip.open(stream); // open the sound file as a clip stream.close(); // we're done with the input stream // duration (in secs) of the clip double duration = clip.getMicrosecondLength() / 1000000.0; // new if (duration <= 1.0) { System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs"); System.out.println( " The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer"); } // else // System.out.println(fnm + ": Duration: " + duration + " secs"); } // end of try block catch (UnsupportedAudioFileException audioException) { System.out.println("Unsupported audio file: " + fnm); } catch (LineUnavailableException noLineException) { System.out.println("No audio line available for : " + fnm); } catch (IOException ioException) { System.out.println("Could not read: " + fnm); } catch (Exception e) { System.out.println("Problem with " + fnm); } return clip; } // end of loadClip()
public void open(AudioInputStream stream) throws IOException, LineUnavailableException { AudioInputStream is1; format = stream.getFormat(); if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { is1 = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, stream); } else { is1 = stream; } format = is1.getFormat(); InputStream is2; if (parent != null) { ProgressMonitorInputStream pmis = new ProgressMonitorInputStream(parent, "Loading track..", is1); pmis.getProgressMonitor().setMillisToPopup(0); is2 = pmis; } else { is2 = is1; } byte[] buf = new byte[2 ^ 16]; int totalRead = 0; int numRead = 0; ByteArrayOutputStream baos = new ByteArrayOutputStream(); numRead = is2.read(buf); while (numRead > -1) { baos.write(buf, 0, numRead); numRead = is2.read(buf, 0, buf.length); totalRead += numRead; } is2.close(); audioData = baos.toByteArray(); AudioFormat afTemp; if (format.getChannels() < 2) { afTemp = new AudioFormat( format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits(), 2, format.getSampleSizeInBits() * 2 / 8, // calculate // frame // size format.getFrameRate(), format.isBigEndian()); } else { afTemp = format; } setLoopPoints(0, audioData.length); dataLine = AudioSystem.getSourceDataLine(afTemp); dataLine.open(); inputStream = new ByteArrayInputStream(audioData); }
public AudioFormat[] getTargetFormats( AudioFormat.Encoding targetEncoding, AudioFormat sourceFormat) { if ((AudioFormat.Encoding.PCM_SIGNED.equals(targetEncoding) && AudioFormat.Encoding.ULAW.equals(sourceFormat.getEncoding())) || (AudioFormat.Encoding.ULAW.equals(targetEncoding) && AudioFormat.Encoding.PCM_SIGNED.equals(sourceFormat.getEncoding()))) { return getOutputFormats(sourceFormat); } else { return new AudioFormat[0]; } }
/** * Inits a DateLine.<br> * We check if the line supports Gain and Pan controls. * * <p>From the AudioInputStream, i.e. from the sound file, we fetch information about the format * of the audio data. These information include the sampling frequency, the number of channels and * the size of the samples. There information are needed to ask JavaSound for a suitable output * line for this audio file. Furthermore, we have to give JavaSound a hint about how big the * internal buffer for the line should be. Here, we say AudioSystem.NOT_SPECIFIED, signaling that * we don't care about the exact size. JavaSound will use some default value for the buffer size. */ protected void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); int nSampleSizeInBits = sourceFormat.getSampleSizeInBits(); if (nSampleSizeInBits <= 0) { nSampleSizeInBits = 16; } if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) { nSampleSizeInBits = 16; } if (nSampleSizeInBits != 8) { nSampleSizeInBits = 16; } AudioFormat targetFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.log(Level.SEVERE, "Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, -1); Mixer mixer = getMixer(m_mixerName); if (mixer != null) { log.info("Mixer : " + mixer.getMixerInfo().toString()); m_line = (SourceDataLine) mixer.getLine(info); } else { m_line = (SourceDataLine) AudioSystem.getLine(info); m_mixerName = null; } log.info("Line : " + m_line.toString()); log.info("Line Info : " + m_line.getLineInfo().toString()); log.info("Line AudioFormat: " + m_line.getFormat().toString()); } }
/* public AudioFormat[] getOutputFormats(AudioFormat inputFormat) { */ private AudioFormat[] getOutputFormats(AudioFormat inputFormat) { Vector formats = new Vector(); AudioFormat format; if ((inputFormat.getSampleSizeInBits() == 16) && AudioFormat.Encoding.PCM_SIGNED.equals(inputFormat.getEncoding())) { format = new AudioFormat( AudioFormat.Encoding.ULAW, inputFormat.getSampleRate(), 8, inputFormat.getChannels(), inputFormat.getChannels(), inputFormat.getSampleRate(), false); formats.addElement(format); } if (AudioFormat.Encoding.ULAW.equals(inputFormat.getEncoding())) { format = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), 16, inputFormat.getChannels(), inputFormat.getChannels() * 2, inputFormat.getSampleRate(), false); formats.addElement(format); format = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), 16, inputFormat.getChannels(), inputFormat.getChannels() * 2, inputFormat.getSampleRate(), true); formats.addElement(format); } AudioFormat[] formatArray = new AudioFormat[formats.size()]; for (int i = 0; i < formatArray.length; i++) { formatArray[i] = (AudioFormat) (formats.elementAt(i)); } return formatArray; }
/** * Numbers used here are verbatim from Javazoom * * @param baseFormat * @return */ private AudioFormat getDecodedFormat(AudioFormat baseFormat) { // Do we need to "decode" the base format? if (AudioFormat.Encoding.PCM_SIGNED.equals(baseFormat.getEncoding()) || AudioFormat.Encoding.PCM_UNSIGNED.equals(baseFormat.getEncoding())) { return baseFormat; } return new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false); }
/** * Sets the InputStream from which this StreamDataSource reads. * * @param inputStream the InputStream from which audio data comes * @param streamName the name of the InputStream */ public void setInputStream(AudioInputStream inputStream, String streamName) { dataStream = inputStream; streamEndReached = false; utteranceEndSent = false; utteranceStarted = false; AudioFormat format = inputStream.getFormat(); sampleRate = (int) format.getSampleRate(); bigEndian = format.isBigEndian(); String s = format.toString(); logger.finer("input format is " + s); if (format.getSampleSizeInBits() % 8 != 0) throw new Error("StreamDataSource: bits per sample must be a multiple of 8."); bytesPerValue = format.getSampleSizeInBits() / 8; // test whether all files in the stream have the same format AudioFormat.Encoding encoding = format.getEncoding(); if (encoding.equals(AudioFormat.Encoding.PCM_SIGNED)) signedData = true; else if (encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED)) signedData = false; else throw new RuntimeException("used file encoding is not supported"); totalValuesRead = 0; }
private void doPlay(File file) { AudioInputStream actualAudioIn = null; this.file = file; try (AudioInputStream audioIn = AudioSystem.getAudioInputStream(file)) { AudioFormat baseFormat = audioIn.getFormat(); System.out.println(baseFormat.getEncoding()); AudioFormat decodedFormat = getDecodedFormat(baseFormat); // make new audio in stream based on decoded format actualAudioIn = AudioSystem.getAudioInputStream(decodedFormat, audioIn); // get data from audio system line = getLine(decodedFormat); line.addLineListener(this); doPlay(decodedFormat, actualAudioIn); audioIn.close(); } catch (Exception e) { logger.log(Level.WARNING, "Exception playing file '" + file.getName() + "'", e); } finally { if (actualAudioIn != null) { try { actualAudioIn.close(); } catch (IOException e) { } } if (line != null) line.close(); } }
/** * Test method for {@link net.sourceforge.gjtapi.protocols.JavaSoundParser#parse(java.net.URL)} . * * @exception Exception test failed. */ @Test public void testParse() throws Exception { final URL url = new URL("playback://audio?rate=8000&channels=2&encoding=pcm"); AudioFormat format = JavaSoundParser.parse(url); Assert.assertEquals(new Float(8000.0), new Float(format.getSampleRate())); Assert.assertEquals(2, format.getChannels()); Assert.assertEquals(AudioFormat.Encoding.PCM_SIGNED, format.getEncoding()); }
/** * Store an AudioFormat * * @param audioFormat */ public AudioFormatTransport(AudioFormat audioFormat) { _channels = audioFormat.getChannels(); _encoding = audioFormat.getEncoding().toString(); _frameRate = audioFormat.getFrameRate(); _frameSize = audioFormat.getFrameSize(); _sampleRate = audioFormat.getSampleRate(); _sampleSizeInBits = audioFormat.getSampleSizeInBits(); _isBigEndian = audioFormat.isBigEndian(); _properties = audioFormat.properties(); }
public AudioFormat.Encoding[] getTargetEncodings(AudioFormat sourceFormat) { if (AudioFormat.Encoding.PCM_SIGNED.equals(sourceFormat.getEncoding())) { if (sourceFormat.getSampleSizeInBits() == 16) { AudioFormat.Encoding enc[] = new AudioFormat.Encoding[1]; enc[0] = AudioFormat.Encoding.ULAW; return enc; } else { return new AudioFormat.Encoding[0]; } } else if (AudioFormat.Encoding.ULAW.equals(sourceFormat.getEncoding())) { if (sourceFormat.getSampleSizeInBits() == 8) { AudioFormat.Encoding enc[] = new AudioFormat.Encoding[1]; enc[0] = AudioFormat.Encoding.PCM_SIGNED; return enc; } else { return new AudioFormat.Encoding[0]; } } else { return new AudioFormat.Encoding[0]; } }
UlawCodecStream(AudioInputStream stream, AudioFormat outputFormat) { super(stream, outputFormat, AudioSystem.NOT_SPECIFIED); AudioFormat inputFormat = stream.getFormat(); // throw an IllegalArgumentException if not ok if (!(isConversionSupported(outputFormat, inputFormat))) { throw new IllegalArgumentException( "Unsupported conversion: " + inputFormat.toString() + " to " + outputFormat.toString()); } // $$fb 2002-07-18: fix for 4714846: JavaSound ULAW (8-bit) encoder erroneously depends on // endian-ness boolean PCMIsBigEndian; // determine whether we are encoding or decoding if (AudioFormat.Encoding.ULAW.equals(inputFormat.getEncoding())) { encode = false; encodeFormat = inputFormat; decodeFormat = outputFormat; PCMIsBigEndian = outputFormat.isBigEndian(); } else { encode = true; encodeFormat = outputFormat; decodeFormat = inputFormat; PCMIsBigEndian = inputFormat.isBigEndian(); tempBuffer = new byte[tempBufferSize]; } // setup tables according to byte order if (PCMIsBigEndian) { tabByte1 = ULAW_TABH; tabByte2 = ULAW_TABL; highByte = 0; lowByte = 1; } else { tabByte1 = ULAW_TABL; tabByte2 = ULAW_TABH; highByte = 1; lowByte = 0; } // set the AudioInputStream length in frames if we know it if (stream instanceof AudioInputStream) { frameLength = ((AudioInputStream) stream).getFrameLength(); } // set framePos to zero framePos = 0; frameSize = inputFormat.getFrameSize(); if (frameSize == AudioSystem.NOT_SPECIFIED) { frameSize = 1; } }
private AudioInputStream toLittleEndian(AudioInputStream ais) { AudioFormat format = ais.getFormat(); AudioFormat targetFormat = new AudioFormat( format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits(), format.getChannels(), format.getFrameSize(), format.getFrameRate(), false); return AudioSystem.getAudioInputStream(targetFormat, ais); }
public AudioInputStream getAudioInputStream( AudioFormat.Encoding targetEncoding, AudioInputStream sourceStream) { AudioFormat sourceFormat = sourceStream.getFormat(); AudioFormat.Encoding sourceEncoding = sourceFormat.getEncoding(); if (sourceEncoding.equals(targetEncoding)) { return sourceStream; } else { AudioFormat targetFormat = null; if (!isConversionSupported(targetEncoding, sourceStream.getFormat())) { throw new IllegalArgumentException( "Unsupported conversion: " + sourceStream.getFormat().toString() + " to " + targetEncoding.toString()); } if (AudioFormat.Encoding.ULAW.equals(sourceEncoding) && AudioFormat.Encoding.PCM_SIGNED.equals(targetEncoding)) { targetFormat = new AudioFormat( targetEncoding, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), 2 * sourceFormat.getChannels(), sourceFormat.getSampleRate(), sourceFormat.isBigEndian()); } else if (AudioFormat.Encoding.PCM_SIGNED.equals(sourceEncoding) && AudioFormat.Encoding.ULAW.equals(targetEncoding)) { targetFormat = new AudioFormat( targetEncoding, sourceFormat.getSampleRate(), 8, sourceFormat.getChannels(), sourceFormat.getChannels(), sourceFormat.getSampleRate(), false); } else { throw new IllegalArgumentException( "Unsupported conversion: " + sourceStream.getFormat().toString() + " to " + targetEncoding.toString()); } return getAudioInputStream(targetFormat, sourceStream); } }
WaveFileFormat( AudioFileFormat.Type type, int lengthInBytes, AudioFormat format, int lengthInFrames) { super(type, lengthInBytes, format, lengthInFrames); AudioFormat.Encoding encoding = format.getEncoding(); if (encoding.equals(AudioFormat.Encoding.ALAW)) { waveType = WAVE_FORMAT_ALAW; } else if (encoding.equals(AudioFormat.Encoding.ULAW)) { waveType = WAVE_FORMAT_MULAW; } else if (encoding.equals(AudioFormat.Encoding.PCM_SIGNED) || encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED)) { waveType = WAVE_FORMAT_PCM; } else { waveType = WAVE_FORMAT_UNKNOWN; } }
/** Convert javax.sound.sampled.AudioFormat to javax.media.format.AudioFormat. */ public static AudioFormat convertFormat(javax.sound.sampled.AudioFormat format) { Encoding encoding = format.getEncoding(); int channels = format.getChannels(); float frameRate = format.getFrameRate(); int frameSize = format.getFrameSize() < 0 ? format.getFrameSize() : (format.getFrameSize() * 8); float sampleRate = format.getSampleRate(); int sampleSize = format.getSampleSizeInBits(); int endian = format.isBigEndian() ? AudioFormat.BIG_ENDIAN : AudioFormat.LITTLE_ENDIAN; int signed = AudioFormat.NOT_SPECIFIED; String encodingString = AudioFormat.LINEAR; if (encoding == Encoding.PCM_SIGNED) { signed = AudioFormat.SIGNED; encodingString = AudioFormat.LINEAR; } else if (encoding == Encoding.PCM_UNSIGNED) { signed = AudioFormat.UNSIGNED; encodingString = AudioFormat.LINEAR; } else if (encoding == Encoding.ALAW) { encodingString = AudioFormat.ALAW; } else if (encoding == Encoding.ULAW) { encodingString = AudioFormat.ULAW; } else { encodingString = encoding.toString(); } AudioFormat jmfFormat = new AudioFormat( encodingString, (double) sampleRate, sampleSize, channels, endian, signed, frameSize, frameRate, AudioFormat.byteArray); return jmfFormat; }
@Override public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) { AudioFileFormat.Type[] filetypes = new AudioFileFormat.Type[types.length]; System.arraycopy(types, 0, filetypes, 0, types.length); // make sure we can write this stream AudioFormat format = stream.getFormat(); AudioFormat.Encoding encoding = format.getEncoding(); if (AudioFormat.Encoding.ALAW.equals(encoding) || AudioFormat.Encoding.ULAW.equals(encoding) || AudioFormat.Encoding.PCM_SIGNED.equals(encoding) || AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) { return filetypes; } return new AudioFileFormat.Type[0]; }
public float[] extractFloatDataFromAmplitudeByteArray(AudioFormat format, byte[] audioBytes) { // convert audioData = null; if (format.getSampleSizeInBits() == 16) { int nlengthInSamples = audioBytes.length / 2; audioData = new float[nlengthInSamples]; if (format.isBigEndian()) { for (int i = 0; i < nlengthInSamples; i++) { /* First byte is MSB (high order) */ int MSB = audioBytes[2 * i]; /* Second byte is LSB (low order) */ int LSB = audioBytes[2 * i + 1]; audioData[i] = MSB << 8 | (255 & LSB); } } else { for (int i = 0; i < nlengthInSamples; i++) { /* First byte is LSB (low order) */ int LSB = audioBytes[2 * i]; /* Second byte is MSB (high order) */ int MSB = audioBytes[2 * i + 1]; audioData[i] = MSB << 8 | (255 & LSB); } } } else if (format.getSampleSizeInBits() == 8) { int nlengthInSamples = audioBytes.length; audioData = new float[nlengthInSamples]; if (format.getEncoding().toString().startsWith("PCM_SIGN")) { for (int i = 0; i < audioBytes.length; i++) { audioData[i] = audioBytes[i]; } } else { for (int i = 0; i < audioBytes.length; i++) { audioData[i] = audioBytes[i] - 128; } } } // end of if..else // System.out.println("PCM Returned===============" + // audioData.length); return audioData; }
private void getAudioData() { if (format.getSampleSizeInBits() == 16) { nlengthInSamples = audioBytes.length / 2; audioData = new int[nlengthInSamples]; if (format.isBigEndian()) { for (int i = 0; i < nlengthInSamples; i++) { // First byte is MSB (high order) int MSB = (int) audioBytes[2 * i]; // Second byte is LSB (low order) int LSB = (int) audioBytes[2 * i + 1]; audioData[i] = MSB << 8 | (255 & LSB); } } else { for (int i = 0; i < nlengthInSamples; i++) { // First byte is LSB (low order) int LSB = (int) audioBytes[2 * i]; // Second byte is MSB (high order) int MSB = (int) audioBytes[2 * i + 1]; audioData[i] = MSB << 8 | (255 & LSB); } } } else { if (format.getSampleSizeInBits() == 8) { nlengthInSamples = audioBytes.length; audioData = new int[nlengthInSamples]; if (format.getEncoding().toString().startsWith("PCM_SIGN")) { for (int i = 0; i < audioBytes.length; i++) { audioData[i] = audioBytes[i]; } } else { for (int i = 0; i < audioBytes.length; i++) { audioData[i] = audioBytes[i] - 128; } } } } }
/** * Determines the single largest sample size of all channels of the current clip. This can be * handy for determining a fraction to scal visual representations. * * @return Double between 0 & 1 representing the maximum signal level of any channel. */ public double getLargestSampleSize() { int largest = 0; int current; boolean signed = (format.getEncoding() == AudioFormat.Encoding.PCM_SIGNED); int bitDepth = format.getSampleSizeInBits(); boolean bigEndian = format.isBigEndian(); int samples = audioData.length * 8 / bitDepth; if (signed) { if (bitDepth / 8 == 2) { if (bigEndian) { for (int cc = 0; cc < samples; cc++) { current = (audioData[cc * 2] * 256 + (audioData[cc * 2 + 1] & 0xFF)); if (Math.abs(current) > largest) { largest = Math.abs(current); } } } else { for (int cc = 0; cc < samples; cc++) { current = (audioData[cc * 2 + 1] * 256 + (audioData[cc * 2] & 0xFF)); if (Math.abs(current) > largest) { largest = Math.abs(current); } } } } else { for (int cc = 0; cc < samples; cc++) { current = (audioData[cc] & 0xFF); if (Math.abs(current) > largest) { largest = Math.abs(current); } } } } else { if (bitDepth / 8 == 2) { if (bigEndian) { for (int cc = 0; cc < samples; cc++) { current = (audioData[cc * 2] * 256 + (audioData[cc * 2 + 1] - 0x80)); if (Math.abs(current) > largest) { largest = Math.abs(current); } } } else { for (int cc = 0; cc < samples; cc++) { current = (audioData[cc * 2 + 1] * 256 + (audioData[cc * 2] - 0x80)); if (Math.abs(current) > largest) { largest = Math.abs(current); } } } } else { for (int cc = 0; cc < samples; cc++) { if (audioData[cc] > 0) { current = (audioData[cc] - 0x80); if (Math.abs(current) > largest) { largest = Math.abs(current); } } else { current = (audioData[cc] + 0x80); if (Math.abs(current) > largest) { largest = Math.abs(current); } } } } } // audioData logger.log(Level.FINEST, "Max signal level: " + (double) largest / (Math.pow(2, bitDepth - 1))); return (double) largest / (Math.pow(2, bitDepth - 1)); }
/** * Returns the AudioFileFormat describing the file that will be written from this * AudioInputStream. Throws IllegalArgumentException if not supported. */ private AudioFileFormat getAudioFileFormat(AudioFileFormat.Type type, AudioInputStream stream) { if (!isFileTypeSupported(type, stream)) { throw new IllegalArgumentException("File type " + type + " not supported."); } AudioFormat format = null; WaveFileFormat fileFormat = null; AudioFormat.Encoding encoding = AudioFormat.Encoding.PCM_SIGNED; AudioFormat streamFormat = stream.getFormat(); AudioFormat.Encoding streamEncoding = streamFormat.getEncoding(); float sampleRate; int sampleSizeInBits; int channels; int frameSize; float frameRate; int fileSize; int waveType = WaveFileFormat.WAVE_FORMAT_PCM; if (AudioFormat.Encoding.ALAW.equals(streamEncoding) || AudioFormat.Encoding.ULAW.equals(streamEncoding)) { encoding = streamEncoding; sampleSizeInBits = streamFormat.getSampleSizeInBits(); if (streamEncoding.equals(AudioFormat.Encoding.ALAW)) { waveType = WaveFileFormat.WAVE_FORMAT_ALAW; } else { waveType = WaveFileFormat.WAVE_FORMAT_MULAW; } } else if (streamFormat.getSampleSizeInBits() == 8) { encoding = AudioFormat.Encoding.PCM_UNSIGNED; sampleSizeInBits = 8; } else { encoding = AudioFormat.Encoding.PCM_SIGNED; sampleSizeInBits = streamFormat.getSampleSizeInBits(); } format = new AudioFormat( encoding, streamFormat.getSampleRate(), sampleSizeInBits, streamFormat.getChannels(), streamFormat.getFrameSize(), streamFormat.getFrameRate(), false); // WAVE is little endian if (stream.getFrameLength() != AudioSystem.NOT_SPECIFIED) { fileSize = (int) stream.getFrameLength() * streamFormat.getFrameSize() + WaveFileFormat.getHeaderSize(waveType); } else { fileSize = AudioSystem.NOT_SPECIFIED; } fileFormat = new WaveFileFormat( AudioFileFormat.Type.WAVE, fileSize, format, (int) stream.getFrameLength()); return fileFormat; }
private InputStream getFileStream(WaveFileFormat waveFileFormat, InputStream audioStream) throws IOException { // private method ... assumes audioFileFormat is a supported file type // WAVE header fields AudioFormat audioFormat = waveFileFormat.getFormat(); int headerLength = waveFileFormat.getHeaderSize(); int riffMagic = WaveFileFormat.RIFF_MAGIC; int waveMagic = WaveFileFormat.WAVE_MAGIC; int fmtMagic = WaveFileFormat.FMT_MAGIC; int fmtLength = WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType()); short wav_type = (short) waveFileFormat.getWaveType(); short channels = (short) audioFormat.getChannels(); short sampleSizeInBits = (short) audioFormat.getSampleSizeInBits(); int sampleRate = (int) audioFormat.getSampleRate(); int frameSizeInBytes = audioFormat.getFrameSize(); int frameRate = (int) audioFormat.getFrameRate(); int avgBytesPerSec = channels * sampleSizeInBits * sampleRate / 8; short blockAlign = (short) ((sampleSizeInBits / 8) * channels); int dataMagic = WaveFileFormat.DATA_MAGIC; int dataLength = waveFileFormat.getFrameLength() * frameSizeInBytes; int length = waveFileFormat.getByteLength(); int riffLength = dataLength + headerLength - 8; byte header[] = null; ByteArrayInputStream headerStream = null; ByteArrayOutputStream baos = null; DataOutputStream dos = null; SequenceInputStream waveStream = null; AudioFormat audioStreamFormat = null; AudioFormat.Encoding encoding = null; InputStream codedAudioStream = audioStream; // if audioStream is an AudioInputStream and we need to convert, do it here... if (audioStream instanceof AudioInputStream) { audioStreamFormat = ((AudioInputStream) audioStream).getFormat(); encoding = audioStreamFormat.getEncoding(); if (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) { if (sampleSizeInBits == 8) { wav_type = WaveFileFormat.WAVE_FORMAT_PCM; // plug in the transcoder to convert from PCM_SIGNED to PCM_UNSIGNED codedAudioStream = AudioSystem.getAudioInputStream( new AudioFormat( AudioFormat.Encoding.PCM_UNSIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream); } } if ((AudioFormat.Encoding.PCM_SIGNED.equals(encoding) && audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && !audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && audioStreamFormat.isBigEndian())) { if (sampleSizeInBits != 8) { wav_type = WaveFileFormat.WAVE_FORMAT_PCM; // plug in the transcoder to convert to PCM_SIGNED_LITTLE_ENDIAN codedAudioStream = AudioSystem.getAudioInputStream( new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream); } } } // Now push the header into a stream, concat, and return the new SequenceInputStream baos = new ByteArrayOutputStream(); dos = new DataOutputStream(baos); // we write in littleendian... dos.writeInt(riffMagic); dos.writeInt(big2little(riffLength)); dos.writeInt(waveMagic); dos.writeInt(fmtMagic); dos.writeInt(big2little(fmtLength)); dos.writeShort(big2littleShort(wav_type)); dos.writeShort(big2littleShort(channels)); dos.writeInt(big2little(sampleRate)); dos.writeInt(big2little(avgBytesPerSec)); dos.writeShort(big2littleShort(blockAlign)); dos.writeShort(big2littleShort(sampleSizeInBits)); // $$fb 2002-04-16: Fix for 4636355: RIFF audio headers could be _more_ spec compliant if (wav_type != WaveFileFormat.WAVE_FORMAT_PCM) { // add length 0 for "codec specific data length" dos.writeShort(0); } dos.writeInt(dataMagic); dos.writeInt(big2little(dataLength)); dos.close(); header = baos.toByteArray(); headerStream = new ByteArrayInputStream(header); waveStream = new SequenceInputStream(headerStream, new NoCloseInputStream(codedAudioStream)); return waveStream; }