public void run() { // get line and buffer from ThreadLocals SourceDataLine line = (SourceDataLine) localLine.get(); byte[] buffer = (byte[]) localBuffer.get(); if (line == null || buffer == null) { // the line is unavailable return; } // copy data to the line try { int numBytesRead = 0; while (numBytesRead != -1) { // if paused, wait until unpaused synchronized (pausedLock) { if (paused) { try { pausedLock.wait(); } catch (InterruptedException ex) { return; } } } // copy data numBytesRead = source.read(buffer, 0, buffer.length); if (numBytesRead != -1) { line.write(buffer, 0, numBytesRead); } } } catch (IOException ex) { ex.printStackTrace(); } }
public int write(byte[] data, int ofs, int len) { int maxTries = 10; int totWrite = 0; int nofs = ofs, nlen = len; while (nofs < ofs + len && maxTries > 0) { int written = 0; if (format.getBits() == 16 && format.getChannels() == 2 && format.getRate() == INPUT_RATE) written = sdl.write(data, nofs, nlen); else written = writeConv(data, nofs, nlen); nofs += written; nlen -= written; totWrite += written; maxTries--; deliveredData += written; // calculations when channels and bits and rate varies... // int divisor = (format.getBits() / 8) * format.getChannels(); // int deliveredTime = // (int) (1000 * deliveredData / (format.getRate() * divisor)); int deliveredTime = (int) (1000 * deliveredData / (INPUT_RATE * 4)); long pos = sdl.getMicrosecondPosition() / 1000; long sleepTime = deliveredTime - pos - bufferTime; if (sleepTime < 0) sleepTime = 0; try { Thread.sleep(sleepTime); } catch (InterruptedException e) { } } return totWrite; }
public static void warp(int repeat) throws LineUnavailableException, InterruptedException { AudioFormat af = new AudioFormat( SAMPLE_RATE, // sampleRate 8, // sampleSizeInBits 1, // channels true, // signed false); // bigEndian SourceDataLine sdl = AudioSystem.getSourceDataLine(af); sdl.open(af); sdl.start(); byte[] buf = new byte[1]; int step; for (int j = 0; j < repeat; j++) { step = 25; for (int i = 0; i < 2000; i++) { if (i < 500) { buf[0] = ((i % step > 0) ? 32 : (byte) 0); if (i % 25 == 0) step--; } else { buf[0] = ((i % step > 0) ? 16 : (byte) 0); if (i % 50 == 0) step++; } sdl.write(buf, 0, 1); } } sdl.drain(); sdl.stop(); sdl.close(); }
public void play(InputStream source) { int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10); byte[] buffer = new byte[bufferSize]; SourceDataLine line; try { DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, bufferSize); } catch (LineUnavailableException e) { e.printStackTrace(); return; } line.start(); try { int numBytesRead = 0; while (numBytesRead != -1) { numBytesRead = source.read(buffer, 0, buffer.length); if (numBytesRead != -1) line.write(buffer, 0, numBytesRead); } } catch (IOException e) { e.printStackTrace(); } line.drain(); line.close(); }
/** @see com.groovemanager.thread.ProgressThread#processNext() */ protected void processNext() throws Exception { numBytesRead = in.read(buffer, 0, buffer.length); if (numBytesRead != -1) { analysisLine.write(buffer, 0, numBytesRead); written += numBytesRead / in.getFormat().getFrameSize(); } }
public void tick() { soundBuffer.clear(); // targetAmplitude = (targetAmplitude - 1) * 0.9f + 1; // targetAmplitude = (targetAmplitude - 1) * 0.9f + 1; synchronized (listenerMixer) { float maxAmplitude = listenerMixer.read(leftBuf, rightBuf, rate); // if (maxAmplitude > targetAmplitude) targetAmplitude = maxAmplitude; } soundBuffer.clear(); float gain = 32000; for (int i = 0; i < bufferSize; i++) { // amplitude += (targetAmplitude - amplitude) / rate; // amplitude = 1; // float gain = 30000; int l = (int) (leftBuf[i] * gain); int r = (int) (rightBuf[i] * gain); if (l > 32767) l = 32767; if (r > 32767) r = 32767; if (l < -32767) l = -32767; if (r < -32767) r = -32767; soundBuffer.putShort((short) l); soundBuffer.putShort((short) r); } sdl.write(soundBuffer.array(), 0, bufferSize * 2 * 2); }
// @Test public void playTest() throws Exception { logger.info("start playTest"); SourceDataLine audioLine = null; MediaAudio samples = beepSamples(); logger.info("sample is ready"); AudioFormat format = new AudioFormat( (float) samples.getSampleRate(), (int) samples.getBytesPerSample() * 8, samples.getChannels(), true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); audioLine = (SourceDataLine) AudioSystem.getLine(info); audioLine.open(format); logger.info("beepstart"); audioLine.start(); Buffer buffer = samples.getData(0); audioLine.write( buffer.getByteArray(0, samples.getDataPlaneSize(0)), 0, samples.getDataPlaneSize(0)); audioLine.drain(); logger.info("beepend"); audioLine.close(); audioLine = null; }
public static void sound(double hz, int msecs, double vol) throws LineUnavailableException { if (hz <= 0) throw new IllegalArgumentException("Frequency <= 0 hz"); if (msecs <= 0) throw new IllegalArgumentException("Duration <= 0 msecs"); if (vol > 1.0 || vol < 0.0) throw new IllegalArgumentException("Volume out of range 0.0 - 1.0"); byte[] buf = new byte[(int) SAMPLE_RATE * msecs / 1000]; for (int i = 0; i < buf.length; i++) { double angle = i / (SAMPLE_RATE / hz) * 2.0 * Math.PI; buf[i] = (byte) (Math.sin(angle) * 127.0 * vol); } // shape the front and back 10ms of the wave form for (int i = 0; i < SAMPLE_RATE / 100.0 && i < buf.length / 2; i++) { buf[i] = (byte) (buf[i] * i / (SAMPLE_RATE / 100.0)); buf[buf.length - 1 - i] = (byte) (buf[buf.length - 1 - i] * i / (SAMPLE_RATE / 100.0)); } AudioFormat af = new AudioFormat(SAMPLE_RATE, 8, 1, true, false); SourceDataLine sdl = AudioSystem.getSourceDataLine(af); sdl.open(af); sdl.start(); sdl.write(buf, 0, buf.length); sdl.drain(); sdl.close(); }
public void run() { try { AudioInputStream ais = AudioSystem.getAudioInputStream(soundFile); AudioFormat format = ais.getFormat(); // System.out.println("Format: " + format); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info); source.open(format); source.start(); int read = 0; byte[] audioData = new byte[16384]; while (read > -1) { read = ais.read(audioData, 0, audioData.length); if (read >= 0) { source.write(audioData, 0, read); } } donePlaying = true; source.drain(); source.close(); } catch (Exception exc) { System.out.println("error: " + exc.getMessage()); exc.printStackTrace(); } }
public static void bang() throws LineUnavailableException, InterruptedException { AudioFormat af = new AudioFormat( SAMPLE_RATE, // sampleRate 8, // sampleSizeInBits 1, // channels true, // signed false); // bigEndian SourceDataLine sdl = AudioSystem.getSourceDataLine(af); sdl.open(af); sdl.start(); byte[] buf = new byte[1]; Random r = new Random(); boolean silence = true; for (int i = 0; i < 8000; i++) { while (r.nextInt() % 10 != 0) { buf[0] = silence ? 0 : (byte) Math.abs( r.nextInt() % (int) (1. + 63. * (1. + Math.cos(((double) i) * Math.PI / 8000.)))); i++; sdl.write(buf, 0, 1); } silence = !silence; } sdl.drain(); sdl.stop(); sdl.close(); }
private void doPlay(AudioFormat decodedFormat, AudioInputStream audioIn) throws Exception { byte[] data = new byte[4096]; SourceDataLine localLine = line; if (localLine != null) { // Start line.start(); int nBytesRead = 0; while (nBytesRead != -1) { // if this line was stopped, return to caller who will do cleanup // no space to write - busy wait until available - no need to wait if play was stopped i.e. // line closed while (localLine.available() == 0 && localLine.isOpen()) ; if (!localLine.isOpen()) break; // how much space is available to write on the line // don't read more than that int available = localLine.available(); int maxBytesToRead = (available < data.length) ? available : data.length; nBytesRead = audioIn.read(data, 0, maxBytesToRead); if (nBytesRead != -1) { localLine.write(data, 0, nBytesRead); } } // Stop // stop(); } }
public void run() { File soundFile = new File(filename); if (!soundFile.exists()) { System.err.println("Wave file not found: " + filename); Dialog.erreur(null, "Le fichier " + filename + " n'a pas été trouver."); return; } AudioInputStream audioInputStream = null; try { audioInputStream = AudioSystem.getAudioInputStream(soundFile); } catch (UnsupportedAudioFileException e1) { e1.printStackTrace(); return; } catch (IOException e1) { e1.printStackTrace(); return; } AudioFormat format = audioInputStream.getFormat(); SourceDataLine auline = null; DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); try { auline = (SourceDataLine) AudioSystem.getLine(info); auline.open(format); } catch (LineUnavailableException e) { e.printStackTrace(); return; } catch (Exception e) { e.printStackTrace(); return; } if (auline.isControlSupported(FloatControl.Type.PAN)) { FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN); if (curPosition == Position.RIGHT) pan.setValue(1.0f); else if (curPosition == Position.LEFT) pan.setValue(-1.0f); } auline.start(); int nBytesRead = 0; byte[] abData = new byte[EXTERNAL_BUFFER_SIZE]; try { while (nBytesRead != -1) { nBytesRead = audioInputStream.read(abData, 0, abData.length); if (nBytesRead >= 0) auline.write(abData, 0, nBytesRead); } } catch (IOException e) { e.printStackTrace(); return; } finally { auline.drain(); auline.close(); } }
/** 列に周波数とバイト列の長さ指定で形を書き込む */ public void writeNote(double frequency, int sampleCount) { byte[] b = new byte[sampleRate]; double amplitude = sampleRate / frequency; // 波長 for (int i = 0; i < b.length; i++) { double r = i / amplitude; b[i] = (byte) ((Math.round(r) % 2 == 0) ? 100 : -100); } // 再生(バイト列をlineに書き込む) line.write(b, 0, b.length); }
public void writeNote(double frequency) { byte[] b = new byte[sampleRate]; for (int i = 0; i < b.length; i++) { double r = i / (sampleRate / frequency); b[i] = (byte) ((Math.round(r) % 2 == 0) ? 100 : -100); } // 再生(バイト列をlineに書き込む) line.write(b, 0, b.length); line.drain(); // 終了まで }
public void run() { File soundFile = new File(this.filename); if (!soundFile.exists()) { System.err.println("nicht gefunden: " + filename); return; } AudioInputStream audioInputStream = null; try { audioInputStream = AudioSystem.getAudioInputStream(soundFile); } catch (UnsupportedAudioFileException e1) { e1.printStackTrace(); return; } catch (IOException e1) { e1.printStackTrace(); return; } AudioFormat format = audioInputStream.getFormat(); SourceDataLine auline = null; DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); try { auline = (SourceDataLine) AudioSystem.getLine(info); auline.open(format); } catch (LineUnavailableException e) { e.printStackTrace(); return; } catch (Exception e) { e.printStackTrace(); return; } FloatControl rate = (FloatControl) auline.getControl(FloatControl.Type.SAMPLE_RATE); rate.setValue(rate.getValue() * 5f); auline.start(); int nBytesRead = 0; byte[] abData = new byte[EXTERNAL_BUFFER_SIZE]; try { while (nBytesRead != -1) { nBytesRead = audioInputStream.read(abData, 0, abData.length); if (nBytesRead >= 0) auline.write(abData, 0, nBytesRead); } } catch (IOException e) { e.printStackTrace(); return; } finally { auline.drain(); auline.close(); } }
// http://stackoverflow.com/questions/13789063/get-sound-from-a-url-with-java private void playMP3(final String url) { try { // Create the JavaFX Panel for the WebView JFXPanel fxPanel = new JFXPanel(); fxPanel.setLocation(new Point(0, 0)); // Initialize the webView in a JavaFX-Thread Platform.runLater( new Runnable() { public void run() { MediaPlayer player = new MediaPlayer(new Media(url)); player.play(); } }); if (true) return; AudioInputStream in = AudioSystem.getAudioInputStream(new URL(url)); AudioFormat baseFormat = in.getFormat(); AudioFormat decodedFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false); AudioInputStream din = AudioSystem.getAudioInputStream(decodedFormat, in); DataLine.Info info = new DataLine.Info(SourceDataLine.class, decodedFormat); SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info); if (line != null) { line.open(decodedFormat); byte[] data = new byte[4096]; // Start line.start(); int nBytesRead; while ((nBytesRead = din.read(data, 0, data.length)) != -1) { line.write(data, 0, nBytesRead); } // Stop line.drain(); line.stop(); line.close(); din.close(); } } catch (Exception e) { App.debug("playing MP3 failed " + url + " " + e.toString()); } }
/** バイト列に周波数、ベロシティ指定で波形書き込む */ public void writeWave(byte[] b, double frequency, int velocity) { double amplitude = sampleRate / frequency; // 波長 for (int i = 0; i < b.length; i++) { double r = i / amplitude; int v = (byte) ((Math.round(r) % 2 == 0) ? velocity : -velocity); v += b[i]; v = Math.min(Math.max(v, -128), 127); b[i] = (byte) v; } // 再生(バイト列をlineに書き込む) line.write(b, 0, b.length); }
private void play( AudioInputStream audioInputStream, AudioFormat audioFormat, SourceDataLine dataLine) { int bufferSize = (int) audioFormat.getSampleRate() * audioFormat.getFrameSize(); byte[] buffer = new byte[bufferSize]; int bytesRead = 0; while (true) { bytesRead = read(audioInputStream, buffer); if (bytesRead == -1) return; dataLine.write(buffer, 0, bytesRead); } }
/** @param filename the name of the file that is going to be played */ public void playSound(String filename) { String strFilename = filename; try { soundFile = new File(strFilename); } catch (Exception e) { e.printStackTrace(); System.exit(1); } try { audioStream = AudioSystem.getAudioInputStream(soundFile); } catch (Exception e) { e.printStackTrace(); System.exit(1); } audioFormat = audioStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); try { sourceLine = (SourceDataLine) AudioSystem.getLine(info); sourceLine.open(audioFormat); } catch (LineUnavailableException e) { e.printStackTrace(); System.exit(1); } catch (Exception e) { e.printStackTrace(); System.exit(1); } sourceLine.start(); int nBytesRead = 0; byte[] abData = new byte[BUFFER_SIZE]; while (nBytesRead != -1) { try { nBytesRead = audioStream.read(abData, 0, abData.length); } catch (IOException e) { e.printStackTrace(); } if (nBytesRead >= 0) { @SuppressWarnings("unused") int nBytesWritten = sourceLine.write(abData, 0, nBytesRead); } } sourceLine.drain(); sourceLine.close(); }
@Override public int onAudioData(byte[] data) { if (audioLine != null && audioLine.isOpen()) { if (!audioLine.isRunning()) { audioLine.start(); } int toWrite = Math.min(audioLine.available(), data.length); if (toWrite == audioLine.available()) log.trace("full! toWrite: " + toWrite + " instead of: " + data.length); return audioLine.write(data, 0, toWrite); } else { return 0; } }
public synchronized int write(byte[] buffer, int offset, int length) { if (speaker == null) { return 0; } start(); /* * Break the buffer up into 20 ms chunks, write as much as we can, * then wait until there's room for more. */ int len = length; int sleepCount = 0; long start = System.currentTimeMillis(); while (len > 0) { int writeLength = Math.min(len, chunkSize); while (!done && available() < chunkSize) { try { Thread.sleep(RtpPacket.PACKET_PERIOD); sleepCount++; } catch (InterruptedException e) { } } applyVolume(buffer, offset, writeLength); speaker.write(buffer, offset, writeLength); offset += writeLength; len -= writeLength; } if (sleepCount > 0) { if (Logger.logLevel >= Logger.LOG_MOREINFO) { long elapsed = System.currentTimeMillis() - start; Logger.println("write to speaker slept " + sleepCount + " times, " + elapsed + "ms"); } } numWrites++; return length; }
/** * Write one sample (between -1.0 and +1.0) to standard audio. If the sample is outside the range, * it will be clipped. */ public static void play(double in) { // clip if outside [-1, +1] if (in < -1.0) in = -1.0; if (in > +1.0) in = +1.0; // convert to bytes short s = (short) (MAX_16_BIT * in); buffer[bufferSize++] = (byte) s; buffer[bufferSize++] = (byte) (s >> 8); // little Endian // send to sound card if buffer is full if (bufferSize >= buffer.length) { line.write(buffer, 0, buffer.length); bufferSize = 0; } }
/** * Writes one sample (between -1.0 and +1.0) to standard audio. If the sample is outside the * range, it will be clipped. * * @param sample the sample to play * @throws IllegalArgumentException if the sample is <tt>Double.NaN</tt> */ public void play(double sample) { // clip if outside [-1, +1] if (Double.isNaN(sample)) throw new IllegalArgumentException("sample is NaN"); if (sample < -1.0) sample = -1.0; if (sample > +1.0) sample = +1.0; // convert to bytes short s = (short) (MAX_16_BIT * sample); buffer[bufferSize++] = (byte) s; buffer[bufferSize++] = (byte) (s >> 8); // little Endian // send to sound card if buffer is full if (bufferSize >= buffer.length) { line.write(buffer, 0, buffer.length); bufferSize = 0; } }
/** * this will try to write all the indicated data whitout ever giving up... Either all the data is * written or it will loop for ever. * * @param data * @param ofs * @param len */ private void writeData(byte[] data, int ofs, int len) { int count = 0; int written = 0; while (written < len) { written += sdl.write(data, ofs + written, len - written); // take a litle break if we couldn't write everything in 100 // tries... count++; if (count >= 100) { try { Thread.sleep(10); } catch (InterruptedException e) { } count = 0; } } }
public void run() { try { b = new byte[6300]; line.open(new AudioFormat(44100, 16, 1, true, true), 6300); line.start(); while (true) { line.read(b, 0, b.length); server.writeByteBuffers(b, sourceIndex); if (output) serverOutput.write(b, 0, b.length); } } catch (LineUnavailableException e) { e.printStackTrace(); System.out.println(sourceIndex); } finally { line.stop(); line.close(); } }
@Override public void run() { try { AudioFormat format = settings.getAudioFormat(); DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format); SourceDataLine speakers = (SourceDataLine) AudioSystem.getLine(dataLineInfo); speakers.open(format); speakers.start(); byte[] data = new byte[204800]; while (!Thread.interrupted()) { int numBytesRead = receiver.getInputStream().read(data, 0, 1024); log.debug("[Player] received {} bytes", numBytesRead); speakers.write(data, 0, numBytesRead); } } catch (LineUnavailableException | IOException e) { log.error("[Player] error occurred", e); } }
public void sync() { final int scrollx = m[SX]; final int scrolly = m[SY]; java.util.Arrays.fill(p, m[CL]); drawGrid(false, scrollx, scrolly); for (int sprite = 0; sprite < 1024; sprite += 4) { final int status = m[m[SP] + sprite]; final int tile = m[m[SP] + sprite + 1]; final int px = m[m[SP] + sprite + 2]; final int py = m[m[SP] + sprite + 3]; drawSprite(tile, status, px - scrollx, py - scrolly); } drawGrid(true, scrollx, scrolly); if (soundLine != null && apointer > 0) { soundLine.write(abuffer, 0, apointer); apointer = 0; } }
private void pcmOut() throws IOException { if (vorbisDspState == null) { mpcm = new float[1][][]; mindex = new int[vorbisInfo.channels]; vorbisDspState = new DspState(); vorbisDspState.synthesis_init(vorbisInfo); vorbisBlock = new Block(vorbisDspState); sampleBuffer = new byte[BUFFER_SIZE * 2]; } if (out == null) { openOutput(); } if (vorbisBlock.synthesis(oggPacket) == 0) { vorbisDspState.synthesis_blockin(vorbisBlock); } int n; int[] idx = mindex; int nch = vorbisInfo.channels; int max = sampleBuffer.length; while ((n = vorbisDspState.synthesis_pcmout(mpcm, idx)) > 0) { int len = (n < max ? n : max); float[][] pcm = mpcm[0]; int off = 0; for (int i = 0; i < len; i++) { for (int ch = 0; ch < nch; ch++) { int m = (int) (pcm[ch][idx[ch] + i] * SHORT_RANGE); if (m < Short.MIN_VALUE) { sampleBuffer[off++] = (byte) 0x00; sampleBuffer[off++] = (byte) 0x80; } else if (m > Short.MAX_VALUE) { sampleBuffer[off++] = (byte) 0xff; sampleBuffer[off++] = (byte) 0x7f; } else { short s = (short) m; sampleBuffer[off++] = (byte) s; sampleBuffer[off++] = (byte) (s >>> 8); } } } out.write(sampleBuffer, 0, 2 * nch * len); vorbisDspState.synthesis_read(len); } }
// 播放au,aiff,wav音乐流, 这个函数基本完全为帖子上的代码 private synchronized void play() { ByteArrayInputStream aMusicInputStream; AudioFormat format; AudioInputStream musicInputStream; byte[] audioSamples; SourceDataLine line; try { File MusicFile = new File(m_filename); musicInputStream = AudioSystem.getAudioInputStream(MusicFile); // 取得文件的音频输入流 format = musicInputStream.getFormat(); // 取得音频输入流的格式 audioSamples = getAudioSamples(musicInputStream, format); // 取得音频样本 aMusicInputStream = new ByteArrayInputStream(audioSamples); int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10); byte[] buffer = new byte[bufferSize]; try { DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, bufferSize); } catch (LineUnavailableException e) { e.printStackTrace(); return; } if (!line.isRunning()) { line.start(); } int numBytesRead = 0; while (numBytesRead != -1 && !m_stopped) { numBytesRead = aMusicInputStream.read(buffer, 0, buffer.length); if (numBytesRead != -1) { line.write(buffer, 0, numBytesRead); } } line.drain(); line.close(); } catch (Exception e) { e.printStackTrace(); } }
/** * Play a sound. * * @param in The <code>AudioInputStream</code> to play. * @return True if the stream was played without incident. */ private boolean playSound(AudioInputStream in) throws IOException { boolean ret = false; SourceDataLine line = openLine(in.getFormat()); if (line == null) return false; try { startPlaying(); int rd; while (keepPlaying() && (rd = in.read(data)) > 0) { line.write(data, 0, rd); } ret = true; } finally { stopPlaying(); line.drain(); line.stop(); line.close(); } return ret; }