public synchronized boolean waitForState(int state) {
      switch (state) {
        case Processor.Configured:
          p.configure();
          break;
        case Controller.Realized:
          p.realize();
          break;
        case Controller.Prefetched:
          p.prefetch();
          break;
        case Controller.Started:
          p.start();
          break;
      }

      while (p.getState() < state && !error) {
        try {
          wait(1000);
        } catch (Exception e) {
        }
      }
      // p.removeControllerListener(this);
      return !(error);
    }
示例#2
0
  private synchronized boolean waitForState(Processor p, int state) {
    p.addControllerListener(new StateListener());
    failed = false;

    // Call the required method on the processor
    if (state == Processor.Configured) {
      p.configure();
    } else if (state == Processor.Realized) {
      p.realize();
    }

    // Wait until we get an event that confirms the
    // success of the method, or a failure event.
    // See StateListener inner class
    while (p.getState() < state && !failed) {
      synchronized (getStateLock()) {
        try {
          getStateLock().wait();
        } catch (InterruptedException ie) {
          return false;
        }
      }
    }

    if (failed) return false;
    else return true;
  }
示例#3
0
 /**
  * Initializes the decoder. The file will be opened, decoding will start, and the first frame of
  * the movie will be sent to the handlers.
  *
  * @param mediaFile - the URL of the media file to be loaded
  */
 public void init(String mediaFile) {
   try {
     java.net.URL url = new java.net.URL(mediaFile);
     processor = Manager.createProcessor(url);
     processor.addControllerListener(this);
     processor.configure();
   } catch (Exception e) {
     // Debug.trace("Movie player exception "+e);
   }
 }
示例#4
0
  private static synchronized boolean waitForState(Processor p, int state) {
    p.addControllerListener(new StateListener());
    stateFailed = false;

    if (state == Processor.Configured) {
      p.configure();
    } else if (state == Processor.Realized) {
      p.realize();
    }

    while (p.getState() < state && !stateFailed) {
      synchronized (stateLock) {
        try {
          stateLock.wait();
        } catch (InterruptedException ie) {
          return false;
        }
      }
    }

    return (!stateFailed);
  }
 public boolean blockingConfigure() {
   setState(Processor.Configured);
   ((Processor) controller).configure();
   return waitForState();
 }
  public static void main(String[] args) {
    if (args.length != 2) {
      System.out.println("Usage: rtpaudio <targetIP> <targetPort>");
      System.exit(0);
    }

    try {
      RegistryDefaults.setDefaultFlags(RegistryDefaults.FMJ);

      // create a clean registry
      RegistryDefaults.unRegisterAll(RegistryDefaults.ALL);
      RegistryDefaults.registerAll(RegistryDefaults.FMJ);

      // remove all capture devices
      Vector deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      for (int i = 0; i < deviceList.size(); i++) {
        CaptureDeviceInfo cdi = (CaptureDeviceInfo) deviceList.elementAt(i);
        CaptureDeviceManager.removeDevice(cdi);
      }

      // update capture device list
      new net.sf.fmj.media.cdp.javasound.CaptureDevicePlugger().addCaptureDevices();
      PlugInManager.commit();

      deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      if ((null == deviceList) || (deviceList.size() == 0)) {
        System.out.println("### ERROR found no audio capture device");
        System.exit(0);
      }

      // enumerate all codec
      Vector codecList = PlugInManager.getPlugInList(null, null, PlugInManager.CODEC);
      System.out.println("found " + codecList.size() + " codec");
      for (int i = 0; i < codecList.size(); i++) {
        String aCodecClass = (String) codecList.elementAt(i);
        System.out.println("# " + (i + 1) + " " + aCodecClass);
      }

      // fetch first available audio capture device
      deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      CaptureDeviceInfo captureDeviceInfo = (CaptureDeviceInfo) deviceList.elementAt(0);
      System.out.println("### using " + captureDeviceInfo.getName());
      System.out.println("### locator " + captureDeviceInfo.getLocator());

      javax.media.protocol.DataSource dataSource =
          javax.media.Manager.createDataSource(
              new javax.media.MediaLocator(captureDeviceInfo.getLocator().toString()));
      // javax.media.protocol.DataSource dataSource =
      // javax.media.Manager.createDataSource(new
      // javax.media.MediaLocator("javasound://"));
      System.out.println("### created datasource " + dataSource.getClass().getName());

      javax.media.control.FormatControl[] formatControls =
          ((javax.media.protocol.CaptureDevice) dataSource).getFormatControls();
      System.out.println("got format control " + formatControls[0].getClass().getName());

      System.out.println("current format is " + formatControls[0].getFormat());

      // set audio capture format
      javax.media.Format[] formats = formatControls[0].getSupportedFormats();
      for (int i = 0; i < formats.length; i++) {
        javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) formats[i];
        if ((af.getChannels() == 1) && (af.getSampleSizeInBits() == 16)) {
          if (af.getSampleRate() == Format.NOT_SPECIFIED) {
            javax.media.format.AudioFormat newAudioFormat =
                new javax.media.format.AudioFormat(
                    af.getEncoding(),
                    8000.0f,
                    javax.media.Format.NOT_SPECIFIED,
                    javax.media.Format.NOT_SPECIFIED);
            // javax.media.format.AudioFormat newAudioFormat = new
            // javax.media.format.AudioFormat(af.getEncoding(),
            // 44100.0f, javax.media.Format.NOT_SPECIFIED,
            // javax.media.Format.NOT_SPECIFIED);
            formatControls[0].setFormat(newAudioFormat.intersects(af));
            break;
          }
        }
      }
      System.out.println("current format is now " + formatControls[0].getFormat());

      FrameProcessingControl fpc = null;

      // adujst recording buffer ( to adjust latency )
      dataSource.stop();
      Object[] controls = dataSource.getControls();
      for (int i = 0; i < controls.length; i++) {
        String className = controls[i].getClass().getName();
        if (-1 != className.indexOf("JavaSoundBufferControl")) {
          javax.media.control.BufferControl bc = (javax.media.control.BufferControl) controls[i];
          System.out.println(
              "### current javasound buffer length is " + bc.getBufferLength() + " ms");
          bc.setBufferLength(40);
          System.out.println(
              "### current javasound buffer length is " + bc.getBufferLength() + " ms");
        } else if (-1 != className.indexOf("JitterBufferControl")) {
          javax.media.control.BufferControl bc = (javax.media.control.BufferControl) controls[i];
          System.out.println("### current jitter buffer length is " + bc.getBufferLength() + " ms");
          bc.setBufferLength(80);
          System.out.println("### current jitter buffer length is " + bc.getBufferLength() + " ms");
        } else if (-1 != className.indexOf("FPC")) {
          fpc = (FrameProcessingControl) controls[i];
          System.out.println("### found bitrate control " + fpc.getClass());
        }
      }
      dataSource.start();

      // create processor
      javax.media.Processor processor = javax.media.Manager.createProcessor(dataSource);
      System.out.println("### created processor " + processor.getClass().getName());

      processor.configure();
      for (int idx = 0; idx < 100; idx++) {
        if (processor.getState() == Processor.Configured) {
          break;
        }
        Thread.sleep(100);
      }
      System.out.println("### processor state " + processor.getState());

      processor.setContentDescriptor(
          new javax.media.protocol.ContentDescriptor(ContentDescriptor.RAW_RTP));

      javax.media.control.TrackControl[] tracks = processor.getTrackControls();
      // /tracks[0].setFormat(new
      // javax.media.format.AudioFormat(javax.media.format.AudioFormat.ULAW_RTP,
      // 8000, 8, 1));
      tracks[0].setFormat(
          new javax.media.format.AudioFormat(javax.media.format.AudioFormat.GSM_RTP, 8000, 8, 1));

      processor.realize();
      for (int idx = 0; idx < 100; idx++) {
        if (processor.getState() == Controller.Realized) {
          break;
        }
        Thread.sleep(100);
      }
      System.out.println("### processor state " + processor.getState());

      javax.media.protocol.DataSource dataOutput = processor.getDataOutput();
      System.out.println("### processor data output " + dataOutput.getClass().getName());

      // BitRateControl
      BitRateControl bitrateControl = null;

      Object[] controls2 = dataOutput.getControls();
      for (int i = 0; i < controls2.length; i++) {
        if (controls2[i] instanceof BitRateControl) {
          bitrateControl = (BitRateControl) controls2[i];
          System.out.println("### found bitrate control " + bitrateControl.getClass());
          break;
        }
      }

      // PacketSizeControl
      Object[] controls3 = processor.getControls();
      for (int i = 0; i < controls3.length; i++) {
        if (controls3[i] instanceof PacketSizeControl) {
          PacketSizeControl psc = (PacketSizeControl) controls3[i];
          System.out.println("### current packetsize is " + psc.getPacketSize() + " bytes");
          psc.setPacketSize(66);
          System.out.println("### current packetsize is " + psc.getPacketSize() + " bytes");
          break;
        }
      }

      // enumerate all controls of the processor
      Object[] pcontrols = processor.getControls();
      for (int i = 0; i < pcontrols.length; i++) {
        System.out.println("processor control " + i + " " + pcontrols[i]);
      }

      javax.media.rtp.RTPManager rtpManager = javax.media.rtp.RTPManager.newInstance();

      javax.media.rtp.SessionAddress local =
          new javax.media.rtp.SessionAddress(
              InetAddress.getLocalHost(), Integer.valueOf(args[1]).intValue());
      javax.media.rtp.SessionAddress target =
          new javax.media.rtp.SessionAddress(
              InetAddress.getByName(args[0]), Integer.valueOf(args[1]).intValue());

      rtpManager.initialize(local);
      rtpManager.addTarget(target);

      javax.media.rtp.SendStream sendStream = rtpManager.createSendStream(dataOutput, 0);
      sendStream.start();

      processor.start();
      Thread.sleep(1000);

      System.out.println("\n>>>>>>  TRANSMITTING ULAW/RTP AUDIO NOW");
      while (2 > 1) {
        Thread.sleep(1000);

        if (null != bitrateControl) {
          TransmissionStats stats = sendStream.getSourceTransmissionStats();
          System.out.println(
              "rtp audio send: bitrate="
                  + bitrateControl.getBitRate()
                  + " (pdu="
                  + stats.getPDUTransmitted()
                  + " bytes="
                  + stats.getBytesTransmitted()
                  + " overrun="
                  + fpc.getFramesDropped()
                  + ")");
        }
      }
    } catch (Exception ex) {
      ex.printStackTrace();
    }

    System.exit(0);
  }
示例#7
0
  /**
   * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}.
   *
   * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s.
   */
  @Override
  public void update(ReceiveStreamEvent event) {
    if (event == null) return;
    ReceiveStream receiveStream = event.getReceiveStream();

    if (event instanceof NewReceiveStreamEvent) {
      if (receiveStream == null) {
        logger.warn("NewReceiveStreamEvent: null");
        return;
      }

      final long ssrc = getReceiveStreamSSRC(receiveStream);

      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc);

      if (receiveStreamDesc != null) {
        String s = "NewReceiveStreamEvent for an existing SSRC. ";
        if (receiveStream != receiveStreamDesc.receiveStream)
          s += "(but different ReceiveStream object)";
        logger.warn(s);
        return;
      } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream);

      if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc);

      // Find the format of the ReceiveStream
      DataSource dataSource = receiveStream.getDataSource();
      if (dataSource instanceof PushBufferDataSource) {
        Format format = null;
        PushBufferDataSource pbds = (PushBufferDataSource) dataSource;
        for (PushBufferStream pbs : pbds.getStreams()) {
          if ((format = pbs.getFormat()) != null) break;
        }

        if (format == null) {
          logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format");
          return;
        }

        receiveStreamDesc.format = format;
      } else {
        logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource");
        return;
      }

      int rtpClockRate = -1;
      if (receiveStreamDesc.format instanceof AudioFormat)
        rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate();
      else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000;
      getSynchronizer().setRtpClockRate(ssrc, rtpClockRate);

      // create a Processor and configure it
      Processor processor = null;
      try {
        processor = Manager.createProcessor(receiveStream.getDataSource());
      } catch (NoProcessorException npe) {
        logger.error("Failed to create Processor: ", npe);
        return;
      } catch (IOException ioe) {
        logger.error("Failed to create Processor: ", ioe);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc);

      processor.addControllerListener(this);
      receiveStreamDesc.processor = processor;

      final int streamCount;
      synchronized (receiveStreams) {
        receiveStreams.add(receiveStreamDesc);
        streamCount = receiveStreams.size();
      }

      /*
       * XXX TODO IRBABOON
       * This is a terrible hack which works around a failure to realize()
       * some of the Processor-s for audio streams, when multiple streams
       * start nearly simultaneously. The cause of the problem is currently
       * unknown (and synchronizing all FMJ calls in RecorderRtpImpl
       * does not help).
       * XXX TODO NOOBABRI
       */
      if (receiveStreamDesc.format instanceof AudioFormat) {
        final Processor p = processor;
        new Thread() {
          @Override
          public void run() {
            // delay configuring the processors for the different
            // audio streams to decrease the probability that they
            // run together.
            try {
              int ms = 450 * (streamCount - 1);
              logger.warn(
                  "Sleeping for "
                      + ms
                      + "ms before"
                      + " configuring processor for SSRC="
                      + ssrc
                      + " "
                      + System.currentTimeMillis());
              Thread.sleep(ms);
            } catch (Exception e) {
            }

            p.configure();
          }
        }.run();
      } else {
        processor.configure();
      }
    } else if (event instanceof TimeoutEvent) {
      if (receiveStream == null) {
        // TODO: we might want to get the list of ReceiveStream-s from
        // rtpManager and compare it to our list, to see if we should
        // remove a stream.
        logger.warn("TimeoutEvent: null.");
        return;
      }

      // FMJ silently creates new ReceiveStream instances, so we have to
      // recognize them by the SSRC.
      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream));
      if (receiveStreamDesc != null) {
        if (logger.isInfoEnabled()) {
          logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc);
        }

        removeReceiveStream(receiveStreamDesc, true);
      }
    } else if (event != null && logger.isInfoEnabled()) {
      logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event);
    }
  }