public synchronized void done() {
    if (done) {
      return;
    }

    done = true;

    /*
     * There seems to be a bug in the Sun Ray audio system
     * where close() hangs sometimes if there is still data
     * in the speaker buffer.  By sleeping for the time
     * it would take to empty a full buffer (plus some slop),
     * the close() seems to always complete.
     *
     * XXX
     */
    try {
      Thread.sleep(getBufferSizeMillis() + RtpPacket.PACKET_PERIOD);
    } catch (InterruptedException e) {
    }

    synchronized (speaker) {
      speaker.flush();
      speaker.stop();
      speaker.close();
    }

    if (Logger.logLevel >= Logger.LOG_MOREINFO) {
      Logger.println("Speaker closed");
    }
  }
  public void flush() {
    if (speaker == null) {
      return;
    }

    start();
    speaker.flush();
  }
Example #3
0
  public void deactivate() {
    active = false;
    microphone.stop();
    microphone.flush();

    speaker.stop();
    speaker.flush();
  }
Example #4
0
 @Override
 public void onInactive() {
   if (audioLine != null) {
     audioLine.flush();
     audioLine.close();
     audioLine = null;
   }
 }
Example #5
0
  /** Stop the clip playing */
  public void stop() {
    if (stopped()) {
      return;
    }

    player = null;
    outputLine.flush(); // **** AGREGADO POR BUDOKA ****
    // outputLine.drain(); **** REMOVIDO POR BUDOKA ****
  }
Example #6
0
 public void activate() {
   active = true;
   microphone.flush();
   speaker.flush();
   speaker.start();
   blocker.release();
   microphone.start();
   microphone.flush();
 }
 /**
  * Stops the playback.<br>
  * Player Status = STOPPED.<br>
  * Thread should free Audio ressources.
  */
 protected void stopPlayback() {
   if ((m_status == PLAYING) || (m_status == PAUSED)) {
     if (m_line != null) {
       m_line.flush();
       m_line.stop();
     }
     m_status = STOPPED;
     synchronized (m_audioInputStream) {
       m_audioInputStream.notifyAll();
     }
     notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
     synchronized (m_audioInputStream) {
       closeStream();
     }
     log.info("stopPlayback() completed");
   }
 }
Example #8
0
    public void run() {
      try {
        sourceDataLine = (SourceDataLine) AudioSystem.getLine(info);
        sourceDataLine.open(format, bufferSize);
        sourceDataLine.start();
      } catch (LineUnavailableException ex) {
        System.out.println("Unable to open the line: " + ex);
        return;
      }
      try {
        byte[] data = new byte[bufferSize];
        int numBytesRead = 0;
        int written = 0;
        while (running) {
          try {
            if ((numBytesRead = audioInputStrem.read(data)) == -1) break;
            int numBytesRemaining = numBytesRead;
            while (numBytesRemaining > 0) {
              written = sourceDataLine.write(data, 0, numBytesRemaining);
              numBytesRemaining -= written;
            }
          } catch (ArrayIndexOutOfBoundsException ae) {
            /**
             * Some capture devices eventually deliver larger buffers than they originally say they
             * would. Catch that and reset the data buffer
             */
            bufferSize = numBytesRead;
            data = new byte[bufferSize];
          } catch (Exception e) {
            e.printStackTrace();
            System.out.println("Error during playback: " + e);
            break;
          }
        }
        sourceDataLine.stop();
        sourceDataLine.flush();
        sourceDataLine.close();

      } catch (Exception e) {
      }
    }
Example #9
0
 public void flush() {
   dataLine.flush();
 }
  /**
   * The playSong method is responsible for opening a Xuggler container to play song at provided
   * location.
   *
   * @param songURL The location of the song to play (local file path or url)
   */
  public void playSong(String songURL) {

    IContainer container = IContainer.make();

    IContainerFormat format = IContainerFormat.make();

    // Stream format must currently be mp3
    format.setInputFormat("mp3");

    //		int s = container.setInputBufferLength(6270);
    //
    //		if(s < 0){
    //			logger.warn("Input buffer was not set to desired length");
    //		}

    // Probe size value must be >50 for some reason. Native libraries throw an exception if it's
    // <50. Measured in bytes.
    if (container.setProperty("probesize", 50) < 0) {
      logger.warn("Probe size not set for input container.");
    }

    if (container.setProperty("analyzeduration", 1) < 0) {
      logger.warn("Analyze duration not changed for input container.");
    }

    container.setFlag(IContainer.Flags.FLAG_NONBLOCK, true);

    if (container.open(songURL, Type.READ, format, true, false) < 0) {
      throw new IllegalArgumentException("stream not found");
    }

    int numStreams = container.getNumStreams();

    // long streamRec = System.currentTimeMillis();

    logger.info("Number of Audio streams detected {}", numStreams);

    IPacket packet = IPacket.make();
    IStream stream = null;
    IStreamCoder audioCoder = null;

    Map<Integer, IStreamCoder> knownStreams = new HashMap<Integer, IStreamCoder>();

    long previousValue = 0;

    while (container.readNextPacket(packet) >= 0 && alive) {

      if (packet.isComplete()) {

        if (knownStreams.get(packet.getStreamIndex()) == null) {
          container.queryStreamMetaData(); // This method tends to take awhile when reading a stream
          stream = container.getStream(packet.getStreamIndex());
          knownStreams.put(packet.getStreamIndex(), stream.getStreamCoder());

          audioCoder = knownStreams.get(packet.getStreamIndex());

          audioCoder.setTimeBase(stream.getTimeBase());
        }

        if (!audioCoder.isOpen()) {
          if (audioCoder.open(null, null) < 0) {
            throw new RuntimeException("could not open audio decoder for container");
          }

          openSound(audioCoder);

          // System.out.println("Opening sound  " + (System.currentTimeMillis() - streamRec));
        }

        // System.err.println(audioCoder.getNumDroppedFrames());

        int offset = 0;

        IAudioSamples samples = IAudioSamples.make(1024, audioCoder.getChannels());

        while (offset < packet.getSize() && alive) {

          // Wait until the state is playing
          while (state != PlayBack_State.PLAYING) {

            if (state == PlayBack_State.TEARDOWN) {
              break;
            } else {
              try {
                synchronized (LOCK_OBJECT) {
                  // mLine.drain();
                  mLine.flush();
                  mLine.stop();

                  LOCK_OBJECT.wait();

                  mLine.start();
                }
              } catch (InterruptedException e) {
                logger.error("", e);
              }
            }
          }

          int bytesDecoded = audioCoder.decodeAudio(samples, packet, offset);

          if (bytesDecoded < 0) {
            logger.warn("Error occurred decoding audio");
            break;
            // throw new RuntimeException("got error decoding audio");
          }

          offset += bytesDecoded;

          if (samples.isComplete() && alive) {
            playJavaSound(samples);
          }

          // Send the time stamp to the GUI for updating the progress bar
          long newValue = (long) (packet.getTimeStamp() * packet.getTimeBase().getValue());

          // Update GUI every second that the stream is playing
          if (newValue > previousValue) {
            callback.notifyGUISongProgress(newValue);
            callback.isStreaming(true);
            previousValue = newValue;

            if (newValue == streamInfo.getSongDuration()) {
              alive = false;
            }
          }
        }
      }
    }

    closeJavaSound();

    if (audioCoder != null) {
      audioCoder.close();
      audioCoder = null;
    }
    if (container != null) {
      container.close();
      container = null;
    }
  }
Example #11
0
 @Override
 public void onAudioFlush() {
   if (audioLine != null) audioLine.flush();
 }