SourceDataLine getSourceDataLine(AudioFormat format, int bufferSize) { SourceDataLine line = null; DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); if (AudioSystem.isLineSupported(info)) { try { if (outputMixer == null) { line = (SourceDataLine) AudioSystem.getLine(info); } else { line = (SourceDataLine) outputMixer.getLine(info); } // remember that time you spent, like, an entire afternoon fussing // with this buffer size to try to get the latency decent on Linux? // Yah, don't fuss with this anymore, ok? line.open(format, bufferSize * format.getFrameSize() * 4); if (line.isOpen()) { debug( "SourceDataLine is " + line.getClass().toString() + "\n" + "Buffer size is " + line.getBufferSize() + " bytes.\n" + "Format is " + line.getFormat().toString() + "."); return line; } } catch (LineUnavailableException e) { error("Couldn't open the line: " + e.getMessage()); } } error("Unable to return a SourceDataLine: unsupported format - " + format.toString()); return line; }
TargetDataLine getTargetDataLine(AudioFormat format, int bufferSize) { TargetDataLine line = null; DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); if (AudioSystem.isLineSupported(info)) { try { if (inputMixer == null) { line = (TargetDataLine) AudioSystem.getLine(info); } else { line = (TargetDataLine) inputMixer.getLine(info); } line.open(format, bufferSize * format.getFrameSize()); debug( "TargetDataLine buffer size is " + line.getBufferSize() + "\n" + "TargetDataLine format is " + line.getFormat().toString() + "\n" + "TargetDataLine info is " + line.getLineInfo().toString()); } catch (Exception e) { error("Error acquiring TargetDataLine: " + e.getMessage()); } } else { error("Unable to return a TargetDataLine: unsupported format - " + format.toString()); } return line; }
public SonarSoundEngine(int maxChannels) throws LineUnavailableException { silentSample = new SonarSample(new float[] {0}, 44100); Mixer mixer = AudioSystem.getMixer(null); sdl = (SourceDataLine) mixer.getLine(new Line.Info(SourceDataLine.class)); sdl.open(new AudioFormat(rate, 16, 2, true, false), bufferSize * 2 * 2 * 2 * 2 * 2); soundBuffer.order(ByteOrder.LITTLE_ENDIAN); sdl.start(); try { /* FloatControl volumeControl = (FloatControl) sdl.getControl(FloatControl.Type.MASTER_GAIN); volumeControl.setValue(volumeControl.getMaximum());*/ } catch (IllegalArgumentException e) { // System.out.println("Failed to set the sound volume"); } listenerMixer = new ListenerMixer(maxChannels); leftBuf = new float[bufferSize]; rightBuf = new float[bufferSize]; Thread thread = new Thread(this); thread.setDaemon(true); thread.setPriority(10); thread.start(); }
public SourceDataLine getOutputLine(AudioFormat format) throws LineUnavailableException { SourceDataLine out; DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); out = (SourceDataLine) mixer.getLine(info); out.open(format, out.getBufferSize()); return out; }
public TargetDataLine getInputLine(AudioFormat format) throws LineUnavailableException { TargetDataLine in; DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); in = (TargetDataLine) mixer.getLine(info); in.open(format, in.getBufferSize()); return in; }
@Ignore @Test public void testSilenceWriter() throws UnsupportedAudioFileException, InterruptedException, LineUnavailableException, FileNotFoundException { float sampleRate = 44100; int bufferSize = 1024; int overlap = 0; // available mixers int index = 0; int selectedMixerIndex = 4; for (Mixer.Info mixer : AudioSystem.getMixerInfo()) { System.out.println(index + ": " + Shared.toLocalString(mixer)); index++; } Mixer.Info selectedMixer = AudioSystem.getMixerInfo()[selectedMixerIndex]; System.out.println("Selected mixer: " + Shared.toLocalString(selectedMixer)); // open a line final Mixer mixer = AudioSystem.getMixer(selectedMixer); final AudioFormat format = new AudioFormat(sampleRate, 16, 1, true, true); final DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, format); TargetDataLine line; line = (TargetDataLine) mixer.getLine(dataLineInfo); final int numberOfSamples = bufferSize; line.open(format, numberOfSamples); line.start(); final AudioInputStream stream = new AudioInputStream(line); // create a new dispatcher AudioDispatcher dispatcher = new AudioDispatcher(stream, bufferSize, overlap); WaveformWriter writer = new WaveformWriter(format, "01.file.wav"); // add a processor, handle percussion event. dispatcher.addAudioProcessor(new SilenceDetector()); dispatcher.addAudioProcessor(writer); // run the dispatcher (on the same thread, use start() to run it on // another thread). new Thread(dispatcher).start(); Thread.sleep(3000); dispatcher.removeAudioProcessor(writer); writer = new WaveformWriter(format, "02.file.wav"); dispatcher.addAudioProcessor(writer); Thread.sleep(3000); dispatcher.stop(); }
private void createMixerChildren(JavaMixer.MixerNode mixerNode) { Mixer mixer = mixerNode.getMixer(); Line.Info[] infosToCheck = getPortInfo(mixer); for (Line.Info anInfosToCheck : infosToCheck) { if (mixer.isLineSupported(anInfosToCheck)) { Port port = null; DataLine dLine = null; int maxLines = mixer.getMaxLines(anInfosToCheck); // Workaround to prevent a JVM crash on Mac OS X (Intel) 1.5.0_07 JVM if (maxLines > 0) { try { if (anInfosToCheck instanceof Port.Info) { port = (Port) mixer.getLine(anInfosToCheck); port.open(); } else if (anInfosToCheck instanceof DataLine.Info) { dLine = (DataLine) mixer.getLine(anInfosToCheck); if (!dLine.isOpen()) { dLine.open(); } } } catch (LineUnavailableException e) { e.printStackTrace(); } catch (Exception e) { // Do Nothing } } if (port != null) { JavaMixer.PortNode portNode = new JavaMixer.PortNode(port); createPortChildren(portNode); mixerNode.add(portNode); } else if (dLine != null) { JavaMixer.PortNode portNode = new JavaMixer.PortNode(dLine); createPortChildren(portNode); mixerNode.add(portNode); } } } }
/** * Inits a DateLine.<br> * We check if the line supports Gain and Pan controls. * * <p>From the AudioInputStream, i.e. from the sound file, we fetch information about the format * of the audio data. These information include the sampling frequency, the number of channels and * the size of the samples. There information are needed to ask JavaSound for a suitable output * line for this audio file. Furthermore, we have to give JavaSound a hint about how big the * internal buffer for the line should be. Here, we say AudioSystem.NOT_SPECIFIED, signaling that * we don't care about the exact size. JavaSound will use some default value for the buffer size. */ protected void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); int nSampleSizeInBits = sourceFormat.getSampleSizeInBits(); if (nSampleSizeInBits <= 0) { nSampleSizeInBits = 16; } if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) { nSampleSizeInBits = 16; } if (nSampleSizeInBits != 8) { nSampleSizeInBits = 16; } AudioFormat targetFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.log(Level.SEVERE, "Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, -1); Mixer mixer = getMixer(m_mixerName); if (mixer != null) { log.info("Mixer : " + mixer.getMixerInfo().toString()); m_line = (SourceDataLine) mixer.getLine(info); } else { m_line = (SourceDataLine) AudioSystem.getLine(info); m_mixerName = null; } log.info("Line : " + m_line.toString()); log.info("Line Info : " + m_line.getLineInfo().toString()); log.info("Line AudioFormat: " + m_line.getFormat().toString()); } }
private static void runVolumeCommand(Closure closure) { Mixer.Info[] infos = AudioSystem.getMixerInfo(); for (Mixer.Info info : infos) { Mixer mixer = AudioSystem.getMixer(info); if (mixer.isLineSupported(Port.Info.SPEAKER)) { Port port; try { port = (Port) mixer.getLine(Port.Info.SPEAKER); port.open(); if (port.isControlSupported(FloatControl.Type.VOLUME)) { FloatControl volume = (FloatControl) port.getControl(FloatControl.Type.VOLUME); closure.execute(volume); } port.close(); } catch (LineUnavailableException e) { logger.error("Cannot access master volume control", e); } } } }
private boolean setupSpeaker(String device) throws LineUnavailableException { Mixer.Info[] aInfos = AudioSystem.getMixerInfo(); for (int i = 0; i < aInfos.length; i++) { Mixer.Info mixerInfo = aInfos[i]; if (GetDataLines.equals(device, mixerInfo) == false) { if (Logger.logLevel >= Logger.LOG_MOREINFO) { Logger.println("Skipping: " + mixerInfo.getName() + "," + mixerInfo.getDescription()); } continue; } try { Mixer mixer = AudioSystem.getMixer(mixerInfo); Line.Info[] infos = mixer.getSourceLineInfo(); for (int j = 0; j < infos.length; j++) { Line line = (Line) mixer.getLine(infos[j]); if (line instanceof SourceDataLine) { speaker = (SourceDataLine) line; if (Logger.logLevel >= Logger.LOG_INFO) { Logger.println("Found speaker: " + j); } break; } } } catch (Exception e) { if (Logger.logLevel >= Logger.LOG_MOREINFO) { Logger.println("Exception: " + e.getMessage()); } } } return speaker != null; }
private void mixerComboBoxActionPerformed() { Mixer mixer = AudioSystem.getMixer((Mixer.Info) mixerComboBox.getSelectedItem()); Line.Info lineInfo = mixer.getSourceLineInfo(new Line.Info(Clip.class))[0]; boolean volumeSupported; boolean panSupported; try { Line line = mixer.getLine(lineInfo); volumeSupported = line.isControlSupported(FloatControl.Type.MASTER_GAIN); panSupported = line.isControlSupported(FloatControl.Type.PAN); } catch (LineUnavailableException e) { volumeSupported = false; panSupported = false; } enableMixerVolumeCheckBox.setEnabled(volumeSupported); enableMixerPanCheckBox.setEnabled(panSupported); }
private void setNewMixer(Mixer mixer) throws LineUnavailableException, UnsupportedAudioFileException { if (dispatcher != null) { dispatcher.stop(); } if (fileName == null) { final AudioFormat format = new AudioFormat(sampleRate, 16, 1, true, false); final DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, format); TargetDataLine line; line = (TargetDataLine) mixer.getLine(dataLineInfo); final int numberOfSamples = bufferSize; line.open(format, numberOfSamples); line.start(); final AudioInputStream stream = new AudioInputStream(line); // create a new dispatcher dispatcher = new AudioDispatcher(stream, bufferSize, overlap); } else { try { File audioFile = new File(fileName); dispatcher = AudioDispatcher.fromFile(audioFile, bufferSize, overlap); AudioFormat format = AudioSystem.getAudioFileFormat(audioFile).getFormat(); dispatcher.addAudioProcessor(new AudioPlayer(format)); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } currentMixer = mixer; // add a processor, handle pitch event. dispatcher.addAudioProcessor(new PitchProcessor(algo, sampleRate, bufferSize, this)); dispatcher.addAudioProcessor(fftProcessor); // run the dispatcher (on a new thread). new Thread(dispatcher, "Audio dispatching").start(); }
private void startFile(File inputFile, Mixer mixer) { if (dispatcher != null) { dispatcher.stop(); } AudioFormat format; int bufferSize = 1024; int overlap = 0; double sampleRate = 44100; try { if (inputFile != null) { format = AudioSystem.getAudioFileFormat(inputFile).getFormat(); sampleRate = format.getSampleRate(); } else { format = new AudioFormat((float) sampleRate, 16, 1, true, true); } inputGain = new GainProcessor(defaultInputGain / 100.0); AudioPlayer audioPlayer = new AudioPlayer(format); if (inputFile == null) { DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, format); TargetDataLine line; line = (TargetDataLine) mixer.getLine(dataLineInfo); line.open(format, bufferSize); line.start(); final AudioInputStream stream = new AudioInputStream(line); final TarsosDSPAudioInputStream audioStream = new JVMAudioInputStream(stream); dispatcher = new AudioDispatcher(audioStream, bufferSize, overlap); } else { if (format.getChannels() != 1) { dispatcher = AudioDispatcherFactory.fromFile( inputFile, bufferSize * format.getChannels(), overlap * format.getChannels()); dispatcher.addAudioProcessor(new MultichannelToMono(format.getChannels(), true)); } else { dispatcher = AudioDispatcherFactory.fromFile(inputFile, bufferSize, overlap); } } flangerEffect = new FlangerEffect( defaultLength / 1000.0, defaultImpact / 100.0, sampleRate, defaultFrequency / 10.0); dispatcher.addAudioProcessor(flangerEffect); dispatcher.addAudioProcessor(inputGain); dispatcher.addAudioProcessor(new WaveformWriter(format, "flanger.wav")); dispatcher.addAudioProcessor(audioPlayer); Thread t = new Thread(dispatcher); t.start(); } catch (UnsupportedAudioFileException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (LineUnavailableException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
/* * Set up the speaker to use the DirectAudioDevice */ private void setupSpeaker() throws LineUnavailableException { done = false; /* * Set up the speaker. */ AudioFormat audioFormat = new AudioFormat( sampleRate, // Sample rate (Hz) MediaManager.BITS_PER_SAMPLE, // Sample size (bits) channels, // Channels (2 = stereo) true, // signed true); // False == little endian String device = Utils.getPreference(Speaker.SPEAKER_PREFERENCE); if (device != null && device.length() > 0 && device.equalsIgnoreCase("Default") == false) { if (setupSpeaker(device) == true) { if (Logger.logLevel >= Logger.LOG_INFO) { Logger.println("Using specified speaker: " + device); } startSpeaker(audioFormat); return; } Logger.println("Specified speaker not available: " + device); } else { if (Logger.logLevel >= Logger.LOG_INFO) { Logger.println("Using default speaker"); } } DataLine.Info speakerInfo = new DataLine.Info(SourceDataLine.class, audioFormat); try { speaker = (SourceDataLine) AudioSystem.getLine(speakerInfo); } catch (IllegalArgumentException e) { Logger.println("GetLine failed " + e.getMessage()); throw new LineUnavailableException(e.getMessage()); } if (speaker == null) { Logger.println("Line unavailable..."); throw new LineUnavailableException("No audio device for the speaker!"); } if (isDirectAudio(speaker) == false) { Mixer.Info[] aInfos = AudioSystem.getMixerInfo(); for (int i = 0; i < aInfos.length; i++) { try { Mixer mixer = AudioSystem.getMixer(aInfos[i]); SourceDataLine s = (SourceDataLine) mixer.getLine(speakerInfo); if (isDirectAudio(s)) { speaker = s; break; } } catch (Exception e) { } } } if (isDirectAudio(speaker) == false) { String s = System.getProperty("java.version"); if (s.indexOf("1.5.") >= 0) { Logger.println("No DirectAudioDevice found for the speaker"); } } startSpeaker(audioFormat); }
@SuppressWarnings("SleepWhileInLoop") @Override boolean bindAudioBuffer(AudioBuffer audioBuffer) { // First check we've been initialised if (!initialised) { return false; } // Wait for AudioBuffer to be loaded, or 20 seconds long startTime = System.currentTimeMillis(); while (audioBuffer.getState() != AudioBuffer.STATE_LOADED && System.currentTimeMillis() - startTime < 20000) { try { Thread.sleep(50); } catch (InterruptedException ex) { } } if (audioBuffer instanceof JavaSoundAudioBuffer && audioBuffer.getState() == AudioBuffer.STATE_LOADED) { // Cast to JavaSoundAudioBuffer to enable easier access to specific methods JavaSoundAudioBuffer buffer = (JavaSoundAudioBuffer) audioBuffer; // Get a JavaSound DataLine and Clip DataLine.Info lineInfo; lineInfo = new DataLine.Info(Clip.class, buffer.getAudioFormat()); Clip newClip; try { newClip = (Clip) mixer.getLine(lineInfo); } catch (LineUnavailableException ex) { log.warn( "Error binding JavaSoundSource (" + this.getSystemName() + ") to AudioBuffer (" + this.getAssignedBufferName() + ") " + ex); return false; } this.clip = newClip; try { clip.open( buffer.getAudioFormat(), buffer.getDataStorageBuffer(), 0, buffer.getDataStorageBuffer().length); } catch (LineUnavailableException ex) { log.warn( "Error binding JavaSoundSource (" + this.getSystemName() + ") to AudioBuffer (" + this.getAssignedBufferName() + ")" + ex); } if (log.isDebugEnabled()) { log.debug( "Bind JavaSoundAudioSource (" + this.getSystemName() + ") to JavaSoundAudioBuffer (" + audioBuffer.getSystemName() + ")"); } return true; } else { log.warn( "AudioBuffer not loaded error when binding JavaSoundSource (" + this.getSystemName() + ") to AudioBuffer (" + this.getAssignedBufferName() + ")"); return false; } }
/** * Single audio channel playback with automatic starting and stopping of the underlying * sourcedataline specified by the mixer and mixer channel arguments. * * <p>Maintains an internal non-blocking audio packet queue and processes this queue 25 times a * second (every 40 ms). * * @param threadPoolManager for assigning buffer processing schedule task * @param mixer to obtain source data line * @param mixerChannel either mono or left/right stereo * @param audioFormat to use during playback * @param lineInfo to use when obtaining the source data line * @param requestedBufferSize of approximately 1 second of audio */ public AudioOutput( ThreadPoolManager threadPoolManager, Mixer mixer, MixerChannel mixerChannel, AudioFormat audioFormat, Line.Info lineInfo, int requestedBufferSize) { mThreadPoolManager = threadPoolManager; mMixer = mixer; mMixerChannel = mixerChannel; try { mOutput = (SourceDataLine) mMixer.getLine(lineInfo); if (mOutput != null) { mOutput.open(audioFormat, requestedBufferSize); // Start threshold: buffer is full with 10% or less of capacity remaining mBufferStartThreshold = (int) (mOutput.getBufferSize() * 0.10); // Stop threshold: buffer is empty with 90% or more capacity available mBufferStopThreshold = (int) (mOutput.getBufferSize() * 0.90); mOutput.addLineListener(this); if (mOutput != null) { try { Control gain = mOutput.getControl(FloatControl.Type.MASTER_GAIN); mGainControl = (FloatControl) gain; } catch (IllegalArgumentException iae) { mLog.warn( "Couldn't obtain MASTER GAIN control for stereo line [" + mixer.getMixerInfo().getName() + " | " + getChannelName() + "]"); } try { Control mute = mOutput.getControl(BooleanControl.Type.MUTE); mMuteControl = (BooleanControl) mute; } catch (IllegalArgumentException iae) { mLog.warn( "Couldn't obtain MUTE control for stereo line [" + mixer.getMixerInfo().getName() + " | " + getChannelName() + "]"); } /* Run the queue processor task every 40 milliseconds or 25 times a second */ mProcessorTask = mThreadPoolManager.scheduleFixedRate( ThreadType.AUDIO_PROCESSING, new BufferProcessor(), 40, TimeUnit.MILLISECONDS); } mAudioStartEvent = new AudioEvent(AudioEvent.Type.AUDIO_STARTED, getChannelName()); mAudioStopEvent = new AudioEvent(AudioEvent.Type.AUDIO_STOPPED, getChannelName()); mCanProcessAudio = true; } } catch (LineUnavailableException e) { mLog.error( "Couldn't obtain audio source data line for " + "audio output - mixer [" + mMixer.getMixerInfo().getName() + "]"); } }
public SoundServer(Mixer mixer, DataLine.Info dataLineInfo, AudioFormat audioFormat) { try { final TargetDataLine targetDataLine = (TargetDataLine) mixer.getLine(dataLineInfo); targetDataLine.open(audioFormat); targetDataLine.start(); try { serverSocket = new ServerSocket(20000); } catch (IOException e1) { e1.printStackTrace(); return; } clients = new ArrayList<>(); streams = new HashMap<>(); runServer = true; acceptRunnable = new Runnable() { public void run() { while (runServer) { try { Socket socket = serverSocket.accept(); synchronized (clients) { clients.add(socket); synchronized (streams) { streams.put(socket, socket.getOutputStream()); } System.out.println("Client connected from " + socket.getInetAddress()); } } catch (IOException e) { e.printStackTrace(); } } } }; sendRunnable = new Runnable() { public void run() { System.out.println("Server is running..."); recordBuffer = new byte[BUFFER_SIZE]; while (runServer) { int count = targetDataLine.read(recordBuffer, 0, BUFFER_SIZE); if (count > 0) { synchronized (clients) { for (int i = 0; i < clients.size(); i++) { Socket client = clients.get(i); OutputStream os = streams.get(client); try { os.write(recordBuffer, 0, BUFFER_SIZE); } catch (SocketException e) { clients.set(i, null); System.out.println("Client connection dropped..."); } catch (IOException e) { e.printStackTrace(); } } for (int i = 0; i < clients.size(); i++) { if (clients.get(i) == null) { clients.remove(i); } } } } } } }; new Thread(acceptRunnable).start(); new Thread(sendRunnable).start(); } catch (LineUnavailableException lue) { lue.printStackTrace(); } }