Пример #1
0
 /**
  * Inits AudioInputStream and AudioFileFormat from the data source.
  *
  * @throws BasicPlayerException
  */
 protected void initAudioInputStream() throws BasicPlayerException {
   try {
     reset();
     notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
     if (m_dataSource instanceof URL) {
       initAudioInputStream((URL) m_dataSource);
     } else if (m_dataSource instanceof File) {
       initAudioInputStream((File) m_dataSource);
     } else if (m_dataSource instanceof InputStream) {
       initAudioInputStream((InputStream) m_dataSource);
     }
     createLine();
     // Notify listeners with AudioFileFormat properties.
     Map properties = null;
     if (m_audioFileFormat instanceof TAudioFileFormat) {
       // Tritonus SPI compliant audio file format.
       properties = ((TAudioFileFormat) m_audioFileFormat).properties();
       // Clone the Map because it is not mutable.
       properties = deepCopy(properties);
     } else {
       properties = new HashMap();
     }
     // Add JavaSound properties.
     if (m_audioFileFormat.getByteLength() > 0) {
       properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
     }
     if (m_audioFileFormat.getFrameLength() > 0) {
       properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
     }
     if (m_audioFileFormat.getType() != null) {
       properties.put("audio.type", (m_audioFileFormat.getType().toString()));
     }
     // Audio format.
     AudioFormat audioFormat = m_audioFileFormat.getFormat();
     if (audioFormat.getFrameRate() > 0) {
       properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
     }
     if (audioFormat.getFrameSize() > 0) {
       properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
     }
     if (audioFormat.getSampleRate() > 0) {
       properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
     }
     if (audioFormat.getSampleSizeInBits() > 0) {
       properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
     }
     if (audioFormat.getChannels() > 0) {
       properties.put("audio.channels", new Integer(audioFormat.getChannels()));
     }
     if (audioFormat instanceof TAudioFormat) {
       // Tritonus SPI compliant audio format.
       Map addproperties = ((TAudioFormat) audioFormat).properties();
       properties.putAll(addproperties);
     }
     // Add SourceDataLine
     properties.put("basicplayer.sourcedataline", m_line);
     Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator();
     while (it.hasNext()) {
       BasicPlayerListener bpl = it.next();
       bpl.opened(m_dataSource, properties);
     }
     m_status = OPENED;
     notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
   } catch (LineUnavailableException e) {
     throw new BasicPlayerException(e);
   } catch (UnsupportedAudioFileException e) {
     throw new BasicPlayerException(e);
   } catch (IOException e) {
     throw new BasicPlayerException(e);
   }
 }
Пример #2
0
 /**
  * Main loop.
  *
  * <p>Player Status == STOPPED || SEEKING => End of Thread + Freeing Audio Ressources.<br>
  * Player Status == PLAYING => Audio stream data sent to Audio line.<br>
  * Player Status == PAUSED => Waiting for another status.
  */
 public void run() {
   log.info("Thread Running");
   int nBytesRead = 1;
   byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
   int readIndex = 0; // 所有读进缓冲区的数量
   int writeIndex = 0; // 所有写出数量
   // Lock stream while playing.
   synchronized (m_audioInputStream) {
     boolean buffering = false;
     // Main play/pause loop.
     while ((nBytesRead != -1)
         && (m_status != STOPPED)
         && (m_status != SEEKING)
         && (m_status != UNKNOWN)) {
       if (m_status == PLAYING) {
         // Play.
         try {
           nBytesRead = m_audioInputStream.read(abData, 0, abData.length);
           if (nBytesRead >= 0) {
             byte[] pcm = new byte[nBytesRead];
             System.arraycopy(abData, 0, pcm, 0, nBytesRead);
             if (m_line.available() >= m_line.getBufferSize()) {
               //                                buffering = true;
               log.fine("缓冲区空虚 : " + m_line.available() + "/" + m_line.getBufferSize());
             }
             //                            if(m_line.available()==0){
             //                                buffering=false;
             //                            }
             if (buffering == false) {
               int nBytesWritten = m_line.write(abData, 0, nBytesRead);
               // Compute position in bytes in encoded stream.
               int nEncodedBytes = getEncodedStreamPosition();
               // Notify listeners
               Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator();
               while (it.hasNext()) {
                 BasicPlayerListener bpl = it.next();
                 if (m_audioInputStream instanceof PropertiesContainer) {
                   // Pass audio parameters such as instant bitrate, ...
                   Map properties = ((PropertiesContainer) m_audioInputStream).properties();
                   bpl.progress(nEncodedBytes, m_line.getMicrosecondPosition(), pcm, properties);
                 } else {
                   bpl.progress(nEncodedBytes, m_line.getMicrosecondPosition(), pcm, empty_map);
                 }
               }
             }
           }
         } catch (IOException e) {
           log.log(Level.SEVERE, "Thread cannot run()", e);
           m_status = STOPPED;
           notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
         }
         // Nice CPU usage.
         if (threadSleep > 0) {
           try {
             Thread.sleep(threadSleep);
           } catch (InterruptedException e) {
             log.log(Level.SEVERE, "Thread cannot sleep(" + threadSleep + ")", e);
           }
         }
       } else {
         synchronized (m_audioInputStream) {
           try {
             log.log(Level.INFO, "状态是不正在播放,要无限期的等待了.....");
             m_audioInputStream.wait();
             log.log(Level.INFO, "状态改过来了,等待被唤醒了.......");
           } catch (InterruptedException ex) {
             Logger.getLogger(BasicPlayer.class.getName()).log(Level.SEVERE, null, ex);
           }
         }
         // Pause
         //                    try {
         //                        Thread.sleep(500);
         //                    } catch (InterruptedException e) {
         //                        log.log(Level.SEVERE, "Thread cannot sleep(500)", e);
         //                    }
       }
     }
     // Free audio resources.
     if (m_line != null) {
       m_line.drain();
       m_line.stop();
       m_line.close();
       m_line = null;
     }
     // Notification of "End Of Media"
     if (nBytesRead == -1) {
       notifyEvent(BasicPlayerEvent.EOM, getEncodedStreamPosition(), -1, null);
     }
     // Close stream.
     closeStream();
   }
   m_status = STOPPED;
   notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
   log.info("Thread completed");
 }
Пример #3
0
 /**
  * Return registered listeners.
  *
  * @return
  */
 public Collection getListeners() {
   return laucher.getBasicPlayerListeners();
 }