@Override public Format[] getSupportedOutputFormats(Format input) { if (input == null) return outputFormats; else { if (!(input instanceof AudioFormat)) { logger.warning( this.getClass().getSimpleName() + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for " + input); // this can cause an NPE in JMF if it ever // happens. return new Format[] {null}; } final AudioFormat inputCast = (AudioFormat) input; if (!inputCast.getEncoding().equals(AudioFormat.ALAW) || (inputCast.getSampleSizeInBits() != 8 && inputCast.getSampleSizeInBits() != Format.NOT_SPECIFIED) || (inputCast.getChannels() != 1 && inputCast.getChannels() != Format.NOT_SPECIFIED) || (inputCast.getFrameSizeInBits() != 8 && inputCast.getFrameSizeInBits() != Format.NOT_SPECIFIED)) { logger.warning( this.getClass().getSimpleName() + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for " + input); // this can cause an NPE in JMF if it ever // happens. return new Format[] {null}; } final AudioFormat result = new AudioFormat( BonusAudioFormatEncodings.ALAW_RTP, inputCast.getSampleRate(), 8, 1, inputCast.getEndian(), inputCast.getSigned(), 8, inputCast.getFrameRate(), inputCast.getDataType()); return new Format[] {result}; } }
public static javax.sound.sampled.AudioFormat convertFormat(AudioFormat format) { String encodingString = format.getEncoding(); int channels = format.getChannels(); double frameRate = format.getFrameRate(); int frameSize = format.getFrameSizeInBits() / 8; double sampleRate = format.getSampleRate(); int sampleSize = format.getSampleSizeInBits(); boolean endian = (format.getEndian() == AudioFormat.BIG_ENDIAN); int signed = format.getSigned(); Encoding encoding; if (AudioFormat.LINEAR.equals(encodingString)) { switch (signed) { case AudioFormat.SIGNED: encoding = Encoding.PCM_SIGNED; break; case AudioFormat.UNSIGNED: encoding = Encoding.PCM_UNSIGNED; break; default: encoding = Encoding.PCM_SIGNED; // TODO: return null } } else if (AudioFormat.ALAW.equals(encodingString)) { encoding = Encoding.ALAW; } else if (AudioFormat.ULAW.equals(encodingString)) { encoding = Encoding.ULAW; } else if (toMpegEncoding(encodingString) != null) { encoding = toMpegEncoding(encodingString); } else if (toVorbisEncoding(encodingString) != null) { encoding = toVorbisEncoding(encodingString); } else { encoding = new CustomEncoding(encodingString); } final javax.sound.sampled.AudioFormat sampledFormat; if (encoding == Encoding.PCM_SIGNED) { sampledFormat = new javax.sound.sampled.AudioFormat( (float) sampleRate, sampleSize, channels, true, endian); } else if (encoding == Encoding.PCM_UNSIGNED) { sampledFormat = new javax.sound.sampled.AudioFormat( (float) sampleRate, sampleSize, channels, false, endian); } else if (encoding instanceof MpegEncoding) { // TODO: perhaps we should use reflection to avoid class not found problems if javazoom is not // in the classpath. return new MpegAudioFormat( encoding, (float) sampleRate, sampleSize, channels, // signed, frameSize, (float) frameRate, endian, new HashMap()); } else if (encoding instanceof VorbisEncoding) { // TODO: perhaps we should use reflection to avoid class not found problems if javazoom is not // in the classpath. return new VorbisAudioFormat( encoding, (float) sampleRate, sampleSize, channels, // signed, frameSize, (float) frameRate, endian, new HashMap()); } else { sampledFormat = new javax.sound.sampled.AudioFormat( encoding, (float) sampleRate, sampleSize, channels, frameSize, (float) frameRate, endian); } return sampledFormat; }