Beispiel #1
0
  public static void warp(int repeat) throws LineUnavailableException, InterruptedException {
    AudioFormat af =
        new AudioFormat(
            SAMPLE_RATE, // sampleRate
            8, // sampleSizeInBits
            1, // channels
            true, // signed
            false); // bigEndian
    SourceDataLine sdl = AudioSystem.getSourceDataLine(af);
    sdl.open(af);
    sdl.start();

    byte[] buf = new byte[1];
    int step;

    for (int j = 0; j < repeat; j++) {
      step = 25;
      for (int i = 0; i < 2000; i++) {
        if (i < 500) {
          buf[0] = ((i % step > 0) ? 32 : (byte) 0);
          if (i % 25 == 0) step--;
        } else {
          buf[0] = ((i % step > 0) ? 16 : (byte) 0);
          if (i % 50 == 0) step++;
        }
        sdl.write(buf, 0, 1);
      }
    }
    sdl.drain();
    sdl.stop();
    sdl.close();
  }
  public synchronized void done() {
    if (done) {
      return;
    }

    done = true;

    /*
     * There seems to be a bug in the Sun Ray audio system
     * where close() hangs sometimes if there is still data
     * in the speaker buffer.  By sleeping for the time
     * it would take to empty a full buffer (plus some slop),
     * the close() seems to always complete.
     *
     * XXX
     */
    try {
      Thread.sleep(getBufferSizeMillis() + RtpPacket.PACKET_PERIOD);
    } catch (InterruptedException e) {
    }

    synchronized (speaker) {
      speaker.flush();
      speaker.stop();
      speaker.close();
    }

    if (Logger.logLevel >= Logger.LOG_MOREINFO) {
      Logger.println("Speaker closed");
    }
  }
Beispiel #3
0
 private void stopLine() {
   if (line == null) return;
   if (line.isRunning()) {
     lineStoppedManually = true;
     line.stop();
   }
 }
Beispiel #4
0
  public static void bang() throws LineUnavailableException, InterruptedException {
    AudioFormat af =
        new AudioFormat(
            SAMPLE_RATE, // sampleRate
            8, // sampleSizeInBits
            1, // channels
            true, // signed
            false); // bigEndian
    SourceDataLine sdl = AudioSystem.getSourceDataLine(af);
    sdl.open(af);
    sdl.start();

    byte[] buf = new byte[1];
    Random r = new Random();
    boolean silence = true;
    for (int i = 0; i < 8000; i++) {
      while (r.nextInt() % 10 != 0) {
        buf[0] =
            silence
                ? 0
                : (byte)
                    Math.abs(
                        r.nextInt()
                            % (int) (1. + 63. * (1. + Math.cos(((double) i) * Math.PI / 8000.))));
        i++;
        sdl.write(buf, 0, 1);
      }
      silence = !silence;
    }
    sdl.drain();
    sdl.stop();
    sdl.close();
  }
  public void deactivate() {
    active = false;
    microphone.stop();
    microphone.flush();

    speaker.stop();
    speaker.flush();
  }
  public void stop() {
    if (speaker == null) {
      return;
    }

    if (isRunning()) {
      speaker.stop();
    }
  }
 /**
  * Pauses the playback.<br>
  * Player Status = PAUSED.
  */
 protected void pausePlayback() {
   if (m_line != null) {
     if (m_status == PLAYING) {
       //                m_line.flush();
       m_line.stop();
       m_status = PAUSED;
       log.info("pausePlayback() completed");
       notifyEvent(BasicPlayerEvent.PAUSED, getEncodedStreamPosition(), -1, null);
     }
   }
 }
Beispiel #8
0
 public boolean close() {
   boolean res = true;
   try {
     sdl.stop();
     sdl.close();
   } catch (Exception e) {
     Logger.warning("Could not close or stop SoundDataLine: " + sdl);
     res = false;
   }
   opened = false;
   return res;
 }
Beispiel #9
0
  // http://stackoverflow.com/questions/13789063/get-sound-from-a-url-with-java
  private void playMP3(final String url) {

    try {

      // Create the JavaFX Panel for the WebView
      JFXPanel fxPanel = new JFXPanel();
      fxPanel.setLocation(new Point(0, 0));

      // Initialize the webView in a JavaFX-Thread
      Platform.runLater(
          new Runnable() {
            public void run() {
              MediaPlayer player = new MediaPlayer(new Media(url));
              player.play();
            }
          });

      if (true) return;

      AudioInputStream in = AudioSystem.getAudioInputStream(new URL(url));
      AudioFormat baseFormat = in.getFormat();
      AudioFormat decodedFormat =
          new AudioFormat(
              AudioFormat.Encoding.PCM_SIGNED,
              baseFormat.getSampleRate(),
              16,
              baseFormat.getChannels(),
              baseFormat.getChannels() * 2,
              baseFormat.getSampleRate(),
              false);
      AudioInputStream din = AudioSystem.getAudioInputStream(decodedFormat, in);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, decodedFormat);
      SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
      if (line != null) {
        line.open(decodedFormat);
        byte[] data = new byte[4096];
        // Start
        line.start();

        int nBytesRead;
        while ((nBytesRead = din.read(data, 0, data.length)) != -1) {
          line.write(data, 0, nBytesRead);
        }
        // Stop
        line.drain();
        line.stop();
        line.close();
        din.close();
      }
    } catch (Exception e) {
      App.debug("playing MP3 failed " + url + " " + e.toString());
    }
  }
Beispiel #10
0
 /*
  * Taken from the JOrbis Player
  */
 private SourceDataLine getOutputLine(int channels, int rate) {
   if (outputLine == null || this.rate != rate || this.channels != channels) {
     if (outputLine != null) {
       outputLine.drain();
       outputLine.stop();
       outputLine.close();
     }
     initJavaSound(channels, rate);
     outputLine.start();
   }
   return outputLine;
 }
 /**
  * Requests playback stop. Method initiate shutdown and return immediate. Playback stops later.
  */
 public void stop() {
   interruptPlayback(PlayerState.STOPPED);
   currentSeekPositionMcsec = 0;
   currentPlayTimeMcsec = 0;
   SourceDataLine l = currentDataLine;
   if (l != null) {
     try {
       l.stop();
     } catch (Throwable ignoredT) { // be silent
     }
   }
 }
Beispiel #12
0
 public void stop() {
   logger.log(Level.FINEST, "BigClip.stop()");
   active = false;
   // why did I have this commented out?
   dataLine.stop();
   if (thread != null) {
     try {
       active = false;
       thread.join();
     } catch (InterruptedException wakeAndContinue) {
     }
   }
 }
Beispiel #13
0
 /**
  * Sets the value of output to newOutput. The output variable specifies whether or not to play the
  * audio of this source on the server.
  *
  * @param newOutput The new value for output.
  */
 public void setOutput(boolean newOutput) {
   output = newOutput;
   if (output) {
     try {
       serverOutput.open();
     } catch (LineUnavailableException e) {
       e.printStackTrace();
     }
     serverOutput.start();
   } else {
     serverOutput.stop();
     serverOutput.close();
   }
 }
 /**
  * Stops the playback.<br>
  * Player Status = STOPPED.<br>
  * Thread should free Audio ressources.
  */
 protected void stopPlayback() {
   if ((m_status == PLAYING) || (m_status == PAUSED)) {
     if (m_line != null) {
       m_line.flush();
       m_line.stop();
     }
     m_status = STOPPED;
     synchronized (m_audioInputStream) {
       m_audioInputStream.notifyAll();
     }
     notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
     synchronized (m_audioInputStream) {
       closeStream();
     }
     log.info("stopPlayback() completed");
   }
 }
 protected void reset() {
   m_status = UNKNOWN;
   if (m_audioInputStream != null) {
     synchronized (m_audioInputStream) {
       closeStream();
     }
   }
   m_audioInputStream = null;
   m_audioFileFormat = null;
   m_encodedaudioInputStream = null;
   encodedLength = -1;
   if (m_line != null) {
     m_line.stop();
     m_line.close();
     m_line = null;
   }
   m_gainControl = null;
   m_panControl = null;
 }
Beispiel #16
0
    /**
     * Play a sound.
     *
     * @param in The <code>AudioInputStream</code> to play.
     * @return True if the stream was played without incident.
     */
    private boolean playSound(AudioInputStream in) throws IOException {
      boolean ret = false;

      SourceDataLine line = openLine(in.getFormat());
      if (line == null) return false;
      try {
        startPlaying();
        int rd;
        while (keepPlaying() && (rd = in.read(data)) > 0) {
          line.write(data, 0, rd);
        }
        ret = true;
      } finally {
        stopPlaying();
        line.drain();
        line.stop();
        line.close();
      }
      return ret;
    }
Beispiel #17
0
    public void run() {
      try {
        sourceDataLine = (SourceDataLine) AudioSystem.getLine(info);
        sourceDataLine.open(format, bufferSize);
        sourceDataLine.start();
      } catch (LineUnavailableException ex) {
        System.out.println("Unable to open the line: " + ex);
        return;
      }
      try {
        byte[] data = new byte[bufferSize];
        int numBytesRead = 0;
        int written = 0;
        while (running) {
          try {
            if ((numBytesRead = audioInputStrem.read(data)) == -1) break;
            int numBytesRemaining = numBytesRead;
            while (numBytesRemaining > 0) {
              written = sourceDataLine.write(data, 0, numBytesRemaining);
              numBytesRemaining -= written;
            }
          } catch (ArrayIndexOutOfBoundsException ae) {
            /**
             * Some capture devices eventually deliver larger buffers than they originally say they
             * would. Catch that and reset the data buffer
             */
            bufferSize = numBytesRead;
            data = new byte[bufferSize];
          } catch (Exception e) {
            e.printStackTrace();
            System.out.println("Error during playback: " + e);
            break;
          }
        }
        sourceDataLine.stop();
        sourceDataLine.flush();
        sourceDataLine.close();

      } catch (Exception e) {
      }
    }
Beispiel #18
0
 public static void tone(int hz, int msecs, double vol) throws LineUnavailableException {
   byte[] buf = new byte[1];
   AudioFormat af =
       new AudioFormat(
           SAMPLE_RATE, // sampleRate
           8, // sampleSizeInBits
           1, // channels
           true, // signed
           false); // bigEndian
   SourceDataLine sdl = AudioSystem.getSourceDataLine(af);
   sdl.open(af);
   sdl.start();
   for (int i = 0; i < msecs * 8; i++) {
     double angle = i / (SAMPLE_RATE / hz) * 2.0 * Math.PI;
     buf[0] = (byte) (Math.sin(angle) * 127.0 * vol);
     sdl.write(buf, 0, 1);
   }
   sdl.drain();
   sdl.stop();
   sdl.close();
 }
  public void run() {
    try {
      AudioFormat audioFormat = audioInputStream.getFormat();
      DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
      SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
      sourceDataLine.open(audioFormat);
      sourceDataLine.start();

      int cnt;
      while ((cnt = audioInputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
        if (cnt > 0) {
          sourceDataLine.write(tempBuffer, 0, cnt);
        }
      }
      sourceDataLine.drain();
      sourceDataLine.stop();
      sourceDataLine.close();
      sourceDataLine.close();
      audioInputStream.close();
    } catch (Exception e) {
      System.out.println("jMusic AudioFilePlayThread error");
      e.printStackTrace();
    }
  }
  /**
   * The playSong method is responsible for opening a Xuggler container to play song at provided
   * location.
   *
   * @param songURL The location of the song to play (local file path or url)
   */
  public void playSong(String songURL) {

    IContainer container = IContainer.make();

    IContainerFormat format = IContainerFormat.make();

    // Stream format must currently be mp3
    format.setInputFormat("mp3");

    //		int s = container.setInputBufferLength(6270);
    //
    //		if(s < 0){
    //			logger.warn("Input buffer was not set to desired length");
    //		}

    // Probe size value must be >50 for some reason. Native libraries throw an exception if it's
    // <50. Measured in bytes.
    if (container.setProperty("probesize", 50) < 0) {
      logger.warn("Probe size not set for input container.");
    }

    if (container.setProperty("analyzeduration", 1) < 0) {
      logger.warn("Analyze duration not changed for input container.");
    }

    container.setFlag(IContainer.Flags.FLAG_NONBLOCK, true);

    if (container.open(songURL, Type.READ, format, true, false) < 0) {
      throw new IllegalArgumentException("stream not found");
    }

    int numStreams = container.getNumStreams();

    // long streamRec = System.currentTimeMillis();

    logger.info("Number of Audio streams detected {}", numStreams);

    IPacket packet = IPacket.make();
    IStream stream = null;
    IStreamCoder audioCoder = null;

    Map<Integer, IStreamCoder> knownStreams = new HashMap<Integer, IStreamCoder>();

    long previousValue = 0;

    while (container.readNextPacket(packet) >= 0 && alive) {

      if (packet.isComplete()) {

        if (knownStreams.get(packet.getStreamIndex()) == null) {
          container.queryStreamMetaData(); // This method tends to take awhile when reading a stream
          stream = container.getStream(packet.getStreamIndex());
          knownStreams.put(packet.getStreamIndex(), stream.getStreamCoder());

          audioCoder = knownStreams.get(packet.getStreamIndex());

          audioCoder.setTimeBase(stream.getTimeBase());
        }

        if (!audioCoder.isOpen()) {
          if (audioCoder.open(null, null) < 0) {
            throw new RuntimeException("could not open audio decoder for container");
          }

          openSound(audioCoder);

          // System.out.println("Opening sound  " + (System.currentTimeMillis() - streamRec));
        }

        // System.err.println(audioCoder.getNumDroppedFrames());

        int offset = 0;

        IAudioSamples samples = IAudioSamples.make(1024, audioCoder.getChannels());

        while (offset < packet.getSize() && alive) {

          // Wait until the state is playing
          while (state != PlayBack_State.PLAYING) {

            if (state == PlayBack_State.TEARDOWN) {
              break;
            } else {
              try {
                synchronized (LOCK_OBJECT) {
                  // mLine.drain();
                  mLine.flush();
                  mLine.stop();

                  LOCK_OBJECT.wait();

                  mLine.start();
                }
              } catch (InterruptedException e) {
                logger.error("", e);
              }
            }
          }

          int bytesDecoded = audioCoder.decodeAudio(samples, packet, offset);

          if (bytesDecoded < 0) {
            logger.warn("Error occurred decoding audio");
            break;
            // throw new RuntimeException("got error decoding audio");
          }

          offset += bytesDecoded;

          if (samples.isComplete() && alive) {
            playJavaSound(samples);
          }

          // Send the time stamp to the GUI for updating the progress bar
          long newValue = (long) (packet.getTimeStamp() * packet.getTimeBase().getValue());

          // Update GUI every second that the stream is playing
          if (newValue > previousValue) {
            callback.notifyGUISongProgress(newValue);
            callback.isStreaming(true);
            previousValue = newValue;

            if (newValue == streamInfo.getSongDuration()) {
              alive = false;
            }
          }
        }
      }
    }

    closeJavaSound();

    if (audioCoder != null) {
      audioCoder.close();
      audioCoder = null;
    }
    if (container != null) {
      container.close();
      container = null;
    }
  }
 /**
  * Main loop.
  *
  * <p>Player Status == STOPPED || SEEKING => End of Thread + Freeing Audio Ressources.<br>
  * Player Status == PLAYING => Audio stream data sent to Audio line.<br>
  * Player Status == PAUSED => Waiting for another status.
  */
 public void run() {
   log.info("Thread Running");
   int nBytesRead = 1;
   byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
   int readIndex = 0; // 所有读进缓冲区的数量
   int writeIndex = 0; // 所有写出数量
   // Lock stream while playing.
   synchronized (m_audioInputStream) {
     boolean buffering = false;
     // Main play/pause loop.
     while ((nBytesRead != -1)
         && (m_status != STOPPED)
         && (m_status != SEEKING)
         && (m_status != UNKNOWN)) {
       if (m_status == PLAYING) {
         // Play.
         try {
           nBytesRead = m_audioInputStream.read(abData, 0, abData.length);
           if (nBytesRead >= 0) {
             byte[] pcm = new byte[nBytesRead];
             System.arraycopy(abData, 0, pcm, 0, nBytesRead);
             if (m_line.available() >= m_line.getBufferSize()) {
               //                                buffering = true;
               log.fine("缓冲区空虚 : " + m_line.available() + "/" + m_line.getBufferSize());
             }
             //                            if(m_line.available()==0){
             //                                buffering=false;
             //                            }
             if (buffering == false) {
               int nBytesWritten = m_line.write(abData, 0, nBytesRead);
               // Compute position in bytes in encoded stream.
               int nEncodedBytes = getEncodedStreamPosition();
               // Notify listeners
               Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator();
               while (it.hasNext()) {
                 BasicPlayerListener bpl = it.next();
                 if (m_audioInputStream instanceof PropertiesContainer) {
                   // Pass audio parameters such as instant bitrate, ...
                   Map properties = ((PropertiesContainer) m_audioInputStream).properties();
                   bpl.progress(nEncodedBytes, m_line.getMicrosecondPosition(), pcm, properties);
                 } else {
                   bpl.progress(nEncodedBytes, m_line.getMicrosecondPosition(), pcm, empty_map);
                 }
               }
             }
           }
         } catch (IOException e) {
           log.log(Level.SEVERE, "Thread cannot run()", e);
           m_status = STOPPED;
           notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
         }
         // Nice CPU usage.
         if (threadSleep > 0) {
           try {
             Thread.sleep(threadSleep);
           } catch (InterruptedException e) {
             log.log(Level.SEVERE, "Thread cannot sleep(" + threadSleep + ")", e);
           }
         }
       } else {
         synchronized (m_audioInputStream) {
           try {
             log.log(Level.INFO, "状态是不正在播放,要无限期的等待了.....");
             m_audioInputStream.wait();
             log.log(Level.INFO, "状态改过来了,等待被唤醒了.......");
           } catch (InterruptedException ex) {
             Logger.getLogger(BasicPlayer.class.getName()).log(Level.SEVERE, null, ex);
           }
         }
         // Pause
         //                    try {
         //                        Thread.sleep(500);
         //                    } catch (InterruptedException e) {
         //                        log.log(Level.SEVERE, "Thread cannot sleep(500)", e);
         //                    }
       }
     }
     // Free audio resources.
     if (m_line != null) {
       m_line.drain();
       m_line.stop();
       m_line.close();
       m_line = null;
     }
     // Notification of "End Of Media"
     if (nBytesRead == -1) {
       notifyEvent(BasicPlayerEvent.EOM, getEncodedStreamPosition(), -1, null);
     }
     // Close stream.
     closeStream();
   }
   m_status = STOPPED;
   notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
   log.info("Thread completed");
 }
Beispiel #22
0
  public void run() {
    if (Thread.currentThread() != this) {
      throw new IllegalStateException("not this thread");
    }
    try {
      SyncState syncState = this.oggSyncState = new SyncState();
      while (in != null) {
        int off = syncState.buffer(BUFFER_SIZE);
        int n = in.read(syncState.data, off, BUFFER_SIZE);
        if (n > 0) {
          syncState.wrote(n);
          pageOut();
        } else {
          break;
        }
      }
    } catch (EOFException e) {

    } catch (IOException e) {
      failure = e;
      e.printStackTrace();
    } finally {
      try {
        if (in != null) {
          in.close();
          in = null;
        }
      } catch (IOException e) {
        if (failure != null) {
          failure = e;
        }
        e.printStackTrace();
      }
      if (out != null) {
        out.stop();
        out.close();
      }
      if (vorbisBlock != null) {
        vorbisBlock.clear();
        vorbisBlock = null;
      }
      if (vorbisDspState != null) {
        vorbisDspState.clear();
        vorbisDspState = null;
      }
      if (vorbisInfo != null) {
        vorbisInfo.clear();
        vorbisInfo = null;
      }
      if (oggStreamState != null) {
        oggStreamState.clear();
        oggStreamState = null;
      }
      if (oggSyncState != null) {
        oggSyncState.clear();
        oggSyncState = null;
      }
      synchronized (this) {
        notifyAll();
      }
    }
  }
Beispiel #23
0
 /** Close standard audio. */
 public static void close() {
   line.drain();
   line.stop();
 }
 /** Stop the rendering process. */
 public void stop() {
   logger.info("stopping...");
   sourceLine.stop();
 }