/** push bytes back, to be read again later. */
 private void pushback(byte[] bytes, int len) {
   if (pushbackBufferLen == 0) {
     pushbackBuffer = bytes; // TODO: copy?
     pushbackBufferLen = len;
     pushbackBufferOffset = 0;
   } else {
     final byte[] newPushbackBuffer = new byte[pushbackBufferLen + len];
     System.arraycopy(pushbackBuffer, 0, newPushbackBuffer, 0, pushbackBufferLen);
     System.arraycopy(bytes, 0, newPushbackBuffer, pushbackBufferLen, len);
     pushbackBuffer = newPushbackBuffer;
     pushbackBufferLen = pushbackBufferLen + len;
     pushbackBufferOffset = 0;
   }
 }
  public static void main(String[] args) {
    String arg[] = {"-o", "file:/c:/foo.wav", "file:/c:/1.mp3", "file:/c:/1.wav"};
    args = arg;
    Vector<String> inputURL = new Vector<String>();
    String outputURL = null;

    if (args.length == 0) {
      prUsage();
    }

    // Parse the arguments.
    int i = 0;
    while (i < args.length) {

      if (args[i].equals("-o")) {
        i++;
        if (i >= args.length) prUsage();
        outputURL = args[i];
      } else {
        inputURL.addElement(args[i]);
      }
      i++;
    }

    if (inputURL.size() == 0) {
      System.err.println("No input url is specified");
      prUsage();
    }

    if (outputURL == null) {
      System.err.println("No output url is specified");
      prUsage();
    }

    // Generate the input and output media locators.
    MediaLocator iml[] = new MediaLocator[inputURL.size()];
    MediaLocator oml;

    for (i = 0; i < inputURL.size(); i++) {
      if ((iml[i] = createMediaLocator(inputURL.elementAt(i))) == null) {
        System.err.println("Cannot build media locator from: " + inputURL);
        System.exit(0);
      }
    }

    if ((oml = createMediaLocator(outputURL)) == null) {
      System.err.println("Cannot build media locator from: " + outputURL);
      System.exit(0);
    }

    FilesConcator concat = new FilesConcator();

    if (!concat.doIt(iml, oml)) {
      System.err.println("Failed to concatenate the inputs");
    }

    System.exit(0);
  }
 /** Controller Listener. */
 @Override
 public void controllerUpdate(ControllerEvent evt) {
   if (evt instanceof ControllerErrorEvent) {
     System.err.println("Failed to concatenate the files.");
     System.exit(-1);
   } else if (evt instanceof EndOfMediaEvent) {
     evt.getSourceController().close();
   }
 }
Ejemplo n.º 4
0
  /**
   * Copies the content of the most recently received packet into <tt>data</tt>.
   *
   * @param buffer an optional <tt>Buffer</tt> instance associated with the specified <tt>data</tt>,
   *     <tt>offset</tt> and <tt>length</tt> and provided to the method in case the implementation
   *     would like to provide additional <tt>Buffer</tt> properties such as <tt>flags</tt>
   * @param data the <tt>byte[]</tt> that we'd like to copy the content of the packet to.
   * @param offset the position where we are supposed to start writing in <tt>data</tt>.
   * @param length the number of <tt>byte</tt>s available for writing in <tt>data</tt>.
   * @return the number of bytes read
   * @throws IOException if <tt>length</tt> is less than the size of the packet.
   */
  protected int read(Buffer buffer, byte[] data, int offset, int length) throws IOException {
    if (data == null) throw new NullPointerException("data");

    if (ioError) return -1;

    RawPacket pkt;

    synchronized (pktSyncRoot) {
      pkt = this.pkt;
      this.pkt = null;
    }

    int pktLength;

    if (pkt == null) {
      pktLength = 0;
    } else {
      // By default, pkt will be returned to the pool after it was read.
      boolean poolPkt = true;

      try {
        pktLength = pkt.getLength();
        if (length < pktLength) {
          /*
           * If pkt is still the latest RawPacket made available to
           * reading, reinstate it for the next invocation of read;
           * otherwise, return it to the pool.
           */
          poolPkt = false;
          throw new IOException("Input buffer not big enough for " + pktLength);
        } else {
          byte[] pktBuffer = pkt.getBuffer();

          if (pktBuffer == null) {
            throw new NullPointerException(
                "pkt.buffer null, pkt.length " + pktLength + ", pkt.offset " + pkt.getOffset());
          } else {
            System.arraycopy(pkt.getBuffer(), pkt.getOffset(), data, offset, pktLength);
            if (buffer != null) buffer.setFlags(pkt.getFlags());
          }
        }
      } finally {
        if (!poolPkt) {
          synchronized (pktSyncRoot) {
            if (this.pkt == null) this.pkt = pkt;
            else poolPkt = true;
          }
        }
        if (poolPkt) {
          // Return pkt to the pool because it was successfully read.
          poolRawPacket(pkt);
        }
      }
    }

    return pktLength;
  }
Ejemplo n.º 5
0
      public int write(byte[] buffer, int offset, int length, boolean transform) {
        RawPacket pkt = rawPacketArray[0];
        if (pkt == null) pkt = new RawPacket();
        rawPacketArray[0] = pkt;

        byte[] pktBuf = pkt.getBuffer();
        if (pktBuf == null || pktBuf.length < length) {
          pktBuf = new byte[length];
          pkt.setBuffer(pktBuf);
        }
        System.arraycopy(buffer, offset, pktBuf, 0, length);
        pkt.setOffset(0);
        pkt.setLength(length);

        if (transform) {
          PacketTransformer packetTransformer =
              isControlStream ? rtcpPacketTransformer : rtpPacketTransformer;

          if (packetTransformer != null)
            rawPacketArray = packetTransformer.reverseTransform(rawPacketArray);
        }

        SourceTransferHandler transferHandler;
        PushSourceStream pushSourceStream;

        try {
          if (isControlStream) {
            transferHandler = controlTransferHandler;
            pushSourceStream = getControlInputStream();
          } else {
            transferHandler = dataTransferHandler;
            pushSourceStream = getDataInputStream();
          }
        } catch (IOException ioe) {
          throw new UndeclaredThrowableException(ioe);
        }

        for (int i = 0; i < rawPacketArray.length; i++) {
          RawPacket packet = rawPacketArray[i];

          // keep the first element for reuse
          if (i != 0) rawPacketArray[i] = null;

          if (packet != null) {
            if (isControlStream) pendingControlPacket = packet;
            else pendingDataPacket = packet;

            if (transferHandler != null) {
              transferHandler.transferData(pushSourceStream);
            }
          }
        }

        return length;
      }
Ejemplo n.º 6
0
  /**
   * ************************************************************** Sample Usage for VideoTransmit
   * class **************************************************************
   */
  public static void main(String[] args) {
    // We need three parameters to do the transmission
    // For example,
    //   java VideoTransmit file:/C:/media/test.mov  129.130.131.132 42050

    if (args.length < 3) {
      System.err.println("Usage: VideoTransmit <sourceURL> <destIP> <destPort>");
      System.exit(-1);
    }

    // Create a video transmit object with the specified params.
    VideoTransmit vt = new VideoTransmit(new MediaLocator(args[0]), args[1], args[2]);
    // Start the transmission
    String result = vt.start();

    // result will be non-null if there was an error. The return
    // value is a String describing the possible error. Print it.
    if (result != null) {
      System.err.println("Error : " + result);
      System.exit(0);
    }

    System.err.println("Start transmission for 60 seconds...");

    // Transmit for 60 seconds and then close the processor
    // This is a safeguard when using a capture data source
    // so that the capture device will be properly released
    // before quitting.
    // The right thing to do would be to have a GUI with a
    // "Stop" button that would call stop on VideoTransmit
    try {
      Thread.currentThread().sleep(60000);
    } catch (InterruptedException ie) {
    }

    // Stop the transmission
    vt.stop();

    System.err.println("...transmission ended.");

    System.exit(0);
  }
 /** supports pushback. */
 private int read(byte[] buffer, int offset, int length) throws IOException {
   if (pushbackBufferLen > 0) { // read from pushback buffer
     final int lenToCopy = length < pushbackBufferLen ? length : pushbackBufferLen;
     System.arraycopy(pushbackBuffer, pushbackBufferOffset, buffer, offset, lenToCopy);
     pushbackBufferLen -= lenToCopy;
     pushbackBufferOffset += lenToCopy;
     return lenToCopy;
   } else {
     return stream.read(buffer, offset, length);
   }
 }
Ejemplo n.º 8
0
 /**
  * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this
  * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed,
  * and that the new active stream has SSRC <tt>ssrc</tt>.
  *
  * @param ssrc the SSRC of the new active stream.
  */
 @Override
 public void activeSpeakerChanged(long ssrc) {
   if (eventHandler != null) {
     RecorderEvent e = new RecorderEvent();
     e.setAudioSsrc(ssrc);
     // TODO: how do we time this?
     e.setInstant(System.currentTimeMillis());
     e.setType(RecorderEvent.Type.SPEAKER_CHANGED);
     e.setMediaType(MediaType.VIDEO);
     eventHandler.handleEvent(e);
   }
 }
Ejemplo n.º 9
0
  /**
   * Adds a <tt>DatagramPacketFilter</tt> which allows dropping <tt>DatagramPacket</tt>s before they
   * are converted into <tt>RawPacket</tt>s.
   *
   * @param datagramPacketFilter the <tt>DatagramPacketFilter</tt> which allows dropping
   *     <tt>DatagramPacket</tt>s before they are converted into <tt>RawPacket</tt>s
   */
  public synchronized void addDatagramPacketFilter(DatagramPacketFilter datagramPacketFilter) {
    if (datagramPacketFilter == null) throw new NullPointerException("datagramPacketFilter");

    if (datagramPacketFilters == null) {
      datagramPacketFilters = new DatagramPacketFilter[] {datagramPacketFilter};
    } else {
      final int length = datagramPacketFilters.length;

      for (int i = 0; i < length; i++)
        if (datagramPacketFilter.equals(datagramPacketFilters[i])) return;

      DatagramPacketFilter[] newDatagramPacketFilters = new DatagramPacketFilter[length + 1];

      System.arraycopy(datagramPacketFilters, 0, newDatagramPacketFilters, 0, length);
      newDatagramPacketFilters[length] = datagramPacketFilter;
      datagramPacketFilters = newDatagramPacketFilters;
    }
  }
Ejemplo n.º 10
0
      @Override
      public int read(byte[] buffer, int offset, int length) throws IOException {

        RawPacket pendingPacket;
        if (isControlStream) {
          pendingPacket = pendingControlPacket;
        } else {
          pendingPacket = pendingDataPacket;
        }
        int bytesToRead = 0;
        byte[] pendingPacketBuffer = pendingPacket.getBuffer();
        if (pendingPacketBuffer != null) {
          int pendingPacketLength = pendingPacket.getLength();
          bytesToRead = length > pendingPacketLength ? pendingPacketLength : length;
          System.arraycopy(
              pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead);
        }
        return bytesToRead;
      }
  /** Create a media locator from the given string. */
  @SuppressWarnings("unused")
  static MediaLocator createMediaLocator(String url) {
    MediaLocator ml;

    if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null) {
      return ml;
    }

    if (url.startsWith(File.separator)) {
      if ((ml = new MediaLocator("file:" + url)) != null) {
        return ml;
      }
    } else {
      String file = "file:" + System.getProperty("user.dir") + File.separator + url;
      if ((ml = new MediaLocator(file)) != null) {
        return ml;
      }
    }

    return null;
  }
Ejemplo n.º 12
0
  public void chooseFile() {
    if (!h4JmfPlugin.jmf_ok) return;
    if (h4JmfPlugin.playMP3 != null) {
      h4JmfPlugin.cnsl.append("playMP3!=null");
      return;
    }

    String tmpdir = System.getProperty("java.io.tmpdir");
    String[] paths =
        GUIUtilities.showVFSFileDialog(
            view, tmpdir + File.separator, JFileChooser.OPEN_DIALOG, false);
    // if(paths!=null && !paths[0].equals(filename))
    if (paths != null) {
      String filename = paths[0];
      h4JmfPlugin.cnsl.append("filename=" + filename);
      try {
        URL url = new URL("file://" + filename);
        player_begin(url);
      } catch (Exception e) {
        logger.severe(e.getMessage());
        h4JmfPlugin.cnsl.append(e);
      }
    }
  } // chooseFile
Ejemplo n.º 13
0
  public static void main(String[] args) {

    // ---------------- CUT HERE START ----------------- //

    Format formats[] = new Format[2];
    formats[0] = new AudioFormat(AudioFormat.IMA4);
    formats[1] = new VideoFormat(VideoFormat.CINEPAK);
    FileTypeDescriptor outputType = new FileTypeDescriptor(FileTypeDescriptor.QUICKTIME);
    Processor p = null;

    try {
      p = Manager.createRealizedProcessor(new ProcessorModel(formats, outputType));
    } catch (IOException e) {
      System.exit(-1);
    } catch (NoProcessorException e) {
      System.exit(-1);
    } catch (CannotRealizeException e) {
      System.exit(-1);
    }
    // get the output of the processor
    DataSource source = p.getDataOutput();
    // create a File protocol MediaLocator with the location of the file to
    // which bits are to be written
    MediaLocator dest = new MediaLocator("file://foo.mov");
    // create a datasink to do the file writing & open the sink to make sure
    // we can write to it.
    DataSink filewriter = null;
    try {
      filewriter = Manager.createDataSink(source, dest);
      filewriter.open();
    } catch (NoDataSinkException e) {
      System.exit(-1);
    } catch (IOException e) {
      System.exit(-1);
    } catch (SecurityException e) {
      System.exit(-1);
    }
    // now start the filewriter and processor
    try {
      filewriter.start();
    } catch (IOException e) {
      System.exit(-1);
    }
    p.start();
    // stop and close the processor when done capturing...
    // close the datasink when EndOfStream event is received...

    // ----------------- CUT HERE END ---------------- //
    try {
      Thread.currentThread().sleep(4000);
    } catch (InterruptedException ie) {
    }
    p.stop();
    p.close();
    try {
      Thread.currentThread().sleep(1000);
    } catch (InterruptedException ie) {
    }
    filewriter.close();
    try {
      Thread.currentThread().sleep(4000);
    } catch (InterruptedException ie) {
    }

    System.exit(0);
  }
Ejemplo n.º 14
0
  /**
   * Creates the video advanced settings.
   *
   * @return video advanced settings panel.
   */
  private static Component createVideoAdvancedSettings() {
    ResourceManagementService resources = NeomediaActivator.getResources();

    final DeviceConfiguration deviceConfig = mediaService.getDeviceConfiguration();

    TransparentPanel centerPanel = new TransparentPanel(new GridBagLayout());
    centerPanel.setMaximumSize(new Dimension(WIDTH, 150));

    JButton resetDefaultsButton =
        new JButton(resources.getI18NString("impl.media.configform.VIDEO_RESET"));
    JPanel resetButtonPanel = new TransparentPanel(new FlowLayout(FlowLayout.RIGHT));
    resetButtonPanel.add(resetDefaultsButton);

    final JPanel centerAdvancedPanel = new TransparentPanel(new BorderLayout());
    centerAdvancedPanel.add(centerPanel, BorderLayout.NORTH);
    centerAdvancedPanel.add(resetButtonPanel, BorderLayout.SOUTH);

    GridBagConstraints constraints = new GridBagConstraints();
    constraints.fill = GridBagConstraints.HORIZONTAL;
    constraints.anchor = GridBagConstraints.NORTHWEST;
    constraints.insets = new Insets(5, 5, 0, 0);
    constraints.gridx = 0;
    constraints.weightx = 0;
    constraints.weighty = 0;
    constraints.gridy = 0;

    centerPanel.add(
        new JLabel(resources.getI18NString("impl.media.configform.VIDEO_RESOLUTION")), constraints);
    constraints.gridy = 1;
    constraints.insets = new Insets(0, 0, 0, 0);
    final JCheckBox frameRateCheck =
        new SIPCommCheckBox(resources.getI18NString("impl.media.configform.VIDEO_FRAME_RATE"));
    centerPanel.add(frameRateCheck, constraints);
    constraints.gridy = 2;
    constraints.insets = new Insets(5, 5, 0, 0);
    centerPanel.add(
        new JLabel(resources.getI18NString("impl.media.configform.VIDEO_PACKETS_POLICY")),
        constraints);

    constraints.weightx = 1;
    constraints.gridx = 1;
    constraints.gridy = 0;
    constraints.insets = new Insets(5, 0, 0, 5);
    Object[] resolutionValues = new Object[DeviceConfiguration.SUPPORTED_RESOLUTIONS.length + 1];
    System.arraycopy(
        DeviceConfiguration.SUPPORTED_RESOLUTIONS,
        0,
        resolutionValues,
        1,
        DeviceConfiguration.SUPPORTED_RESOLUTIONS.length);
    final JComboBox sizeCombo = new JComboBox(resolutionValues);
    sizeCombo.setRenderer(new ResolutionCellRenderer());
    sizeCombo.setEditable(false);
    centerPanel.add(sizeCombo, constraints);

    // default value is 20
    final JSpinner frameRate = new JSpinner(new SpinnerNumberModel(20, 5, 30, 1));
    frameRate.addChangeListener(
        new ChangeListener() {
          public void stateChanged(ChangeEvent e) {
            deviceConfig.setFrameRate(
                ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue());
          }
        });
    constraints.gridy = 1;
    constraints.insets = new Insets(0, 0, 0, 5);
    centerPanel.add(frameRate, constraints);

    frameRateCheck.addActionListener(
        new ActionListener() {
          public void actionPerformed(ActionEvent e) {
            if (frameRateCheck.isSelected()) {
              deviceConfig.setFrameRate(
                  ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue());
            } else // unlimited framerate
            deviceConfig.setFrameRate(-1);

            frameRate.setEnabled(frameRateCheck.isSelected());
          }
        });

    final JSpinner videoMaxBandwidth =
        new JSpinner(
            new SpinnerNumberModel(deviceConfig.getVideoMaxBandwidth(), 1, Integer.MAX_VALUE, 1));
    videoMaxBandwidth.addChangeListener(
        new ChangeListener() {
          public void stateChanged(ChangeEvent e) {
            deviceConfig.setVideoMaxBandwidth(
                ((SpinnerNumberModel) videoMaxBandwidth.getModel()).getNumber().intValue());
          }
        });
    constraints.gridx = 1;
    constraints.gridy = 2;
    constraints.insets = new Insets(0, 0, 5, 5);
    centerPanel.add(videoMaxBandwidth, constraints);

    resetDefaultsButton.addActionListener(
        new ActionListener() {
          public void actionPerformed(ActionEvent e) {
            // reset to defaults
            sizeCombo.setSelectedIndex(0);
            frameRateCheck.setSelected(false);
            frameRate.setEnabled(false);
            frameRate.setValue(20);
            // unlimited framerate
            deviceConfig.setFrameRate(-1);
            videoMaxBandwidth.setValue(DeviceConfiguration.DEFAULT_VIDEO_MAX_BANDWIDTH);
          }
        });

    // load selected value or auto
    Dimension videoSize = deviceConfig.getVideoSize();

    if ((videoSize.getHeight() != DeviceConfiguration.DEFAULT_VIDEO_HEIGHT)
        && (videoSize.getWidth() != DeviceConfiguration.DEFAULT_VIDEO_WIDTH))
      sizeCombo.setSelectedItem(deviceConfig.getVideoSize());
    else sizeCombo.setSelectedIndex(0);
    sizeCombo.addActionListener(
        new ActionListener() {
          public void actionPerformed(ActionEvent e) {
            Dimension selectedVideoSize = (Dimension) sizeCombo.getSelectedItem();

            if (selectedVideoSize == null) {
              // the auto value, default one
              selectedVideoSize =
                  new Dimension(
                      DeviceConfiguration.DEFAULT_VIDEO_WIDTH,
                      DeviceConfiguration.DEFAULT_VIDEO_HEIGHT);
            }
            deviceConfig.setVideoSize(selectedVideoSize);
          }
        });

    frameRateCheck.setSelected(
        deviceConfig.getFrameRate() != DeviceConfiguration.DEFAULT_VIDEO_FRAMERATE);
    frameRate.setEnabled(frameRateCheck.isSelected());

    if (frameRate.isEnabled()) frameRate.setValue(deviceConfig.getFrameRate());

    return centerAdvancedPanel;
  }
Ejemplo n.º 15
0
  public static void main(String[] args) {
    if (args.length != 2) {
      System.out.println("Usage: rtpaudio <targetIP> <targetPort>");
      System.exit(0);
    }

    try {
      RegistryDefaults.setDefaultFlags(RegistryDefaults.FMJ);

      // create a clean registry
      RegistryDefaults.unRegisterAll(RegistryDefaults.ALL);
      RegistryDefaults.registerAll(RegistryDefaults.FMJ);

      // remove all capture devices
      Vector deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      for (int i = 0; i < deviceList.size(); i++) {
        CaptureDeviceInfo cdi = (CaptureDeviceInfo) deviceList.elementAt(i);
        CaptureDeviceManager.removeDevice(cdi);
      }

      // update capture device list
      new net.sf.fmj.media.cdp.javasound.CaptureDevicePlugger().addCaptureDevices();
      PlugInManager.commit();

      deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      if ((null == deviceList) || (deviceList.size() == 0)) {
        System.out.println("### ERROR found no audio capture device");
        System.exit(0);
      }

      // enumerate all codec
      Vector codecList = PlugInManager.getPlugInList(null, null, PlugInManager.CODEC);
      System.out.println("found " + codecList.size() + " codec");
      for (int i = 0; i < codecList.size(); i++) {
        String aCodecClass = (String) codecList.elementAt(i);
        System.out.println("# " + (i + 1) + " " + aCodecClass);
      }

      // fetch first available audio capture device
      deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      CaptureDeviceInfo captureDeviceInfo = (CaptureDeviceInfo) deviceList.elementAt(0);
      System.out.println("### using " + captureDeviceInfo.getName());
      System.out.println("### locator " + captureDeviceInfo.getLocator());

      javax.media.protocol.DataSource dataSource =
          javax.media.Manager.createDataSource(
              new javax.media.MediaLocator(captureDeviceInfo.getLocator().toString()));
      // javax.media.protocol.DataSource dataSource =
      // javax.media.Manager.createDataSource(new
      // javax.media.MediaLocator("javasound://"));
      System.out.println("### created datasource " + dataSource.getClass().getName());

      javax.media.control.FormatControl[] formatControls =
          ((javax.media.protocol.CaptureDevice) dataSource).getFormatControls();
      System.out.println("got format control " + formatControls[0].getClass().getName());

      System.out.println("current format is " + formatControls[0].getFormat());

      // set audio capture format
      javax.media.Format[] formats = formatControls[0].getSupportedFormats();
      for (int i = 0; i < formats.length; i++) {
        javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) formats[i];
        if ((af.getChannels() == 1) && (af.getSampleSizeInBits() == 16)) {
          if (af.getSampleRate() == Format.NOT_SPECIFIED) {
            javax.media.format.AudioFormat newAudioFormat =
                new javax.media.format.AudioFormat(
                    af.getEncoding(),
                    8000.0f,
                    javax.media.Format.NOT_SPECIFIED,
                    javax.media.Format.NOT_SPECIFIED);
            // javax.media.format.AudioFormat newAudioFormat = new
            // javax.media.format.AudioFormat(af.getEncoding(),
            // 44100.0f, javax.media.Format.NOT_SPECIFIED,
            // javax.media.Format.NOT_SPECIFIED);
            formatControls[0].setFormat(newAudioFormat.intersects(af));
            break;
          }
        }
      }
      System.out.println("current format is now " + formatControls[0].getFormat());

      FrameProcessingControl fpc = null;

      // adujst recording buffer ( to adjust latency )
      dataSource.stop();
      Object[] controls = dataSource.getControls();
      for (int i = 0; i < controls.length; i++) {
        String className = controls[i].getClass().getName();
        if (-1 != className.indexOf("JavaSoundBufferControl")) {
          javax.media.control.BufferControl bc = (javax.media.control.BufferControl) controls[i];
          System.out.println(
              "### current javasound buffer length is " + bc.getBufferLength() + " ms");
          bc.setBufferLength(40);
          System.out.println(
              "### current javasound buffer length is " + bc.getBufferLength() + " ms");
        } else if (-1 != className.indexOf("JitterBufferControl")) {
          javax.media.control.BufferControl bc = (javax.media.control.BufferControl) controls[i];
          System.out.println("### current jitter buffer length is " + bc.getBufferLength() + " ms");
          bc.setBufferLength(80);
          System.out.println("### current jitter buffer length is " + bc.getBufferLength() + " ms");
        } else if (-1 != className.indexOf("FPC")) {
          fpc = (FrameProcessingControl) controls[i];
          System.out.println("### found bitrate control " + fpc.getClass());
        }
      }
      dataSource.start();

      // create processor
      javax.media.Processor processor = javax.media.Manager.createProcessor(dataSource);
      System.out.println("### created processor " + processor.getClass().getName());

      processor.configure();
      for (int idx = 0; idx < 100; idx++) {
        if (processor.getState() == Processor.Configured) {
          break;
        }
        Thread.sleep(100);
      }
      System.out.println("### processor state " + processor.getState());

      processor.setContentDescriptor(
          new javax.media.protocol.ContentDescriptor(ContentDescriptor.RAW_RTP));

      javax.media.control.TrackControl[] tracks = processor.getTrackControls();
      // /tracks[0].setFormat(new
      // javax.media.format.AudioFormat(javax.media.format.AudioFormat.ULAW_RTP,
      // 8000, 8, 1));
      tracks[0].setFormat(
          new javax.media.format.AudioFormat(javax.media.format.AudioFormat.GSM_RTP, 8000, 8, 1));

      processor.realize();
      for (int idx = 0; idx < 100; idx++) {
        if (processor.getState() == Controller.Realized) {
          break;
        }
        Thread.sleep(100);
      }
      System.out.println("### processor state " + processor.getState());

      javax.media.protocol.DataSource dataOutput = processor.getDataOutput();
      System.out.println("### processor data output " + dataOutput.getClass().getName());

      // BitRateControl
      BitRateControl bitrateControl = null;

      Object[] controls2 = dataOutput.getControls();
      for (int i = 0; i < controls2.length; i++) {
        if (controls2[i] instanceof BitRateControl) {
          bitrateControl = (BitRateControl) controls2[i];
          System.out.println("### found bitrate control " + bitrateControl.getClass());
          break;
        }
      }

      // PacketSizeControl
      Object[] controls3 = processor.getControls();
      for (int i = 0; i < controls3.length; i++) {
        if (controls3[i] instanceof PacketSizeControl) {
          PacketSizeControl psc = (PacketSizeControl) controls3[i];
          System.out.println("### current packetsize is " + psc.getPacketSize() + " bytes");
          psc.setPacketSize(66);
          System.out.println("### current packetsize is " + psc.getPacketSize() + " bytes");
          break;
        }
      }

      // enumerate all controls of the processor
      Object[] pcontrols = processor.getControls();
      for (int i = 0; i < pcontrols.length; i++) {
        System.out.println("processor control " + i + " " + pcontrols[i]);
      }

      javax.media.rtp.RTPManager rtpManager = javax.media.rtp.RTPManager.newInstance();

      javax.media.rtp.SessionAddress local =
          new javax.media.rtp.SessionAddress(
              InetAddress.getLocalHost(), Integer.valueOf(args[1]).intValue());
      javax.media.rtp.SessionAddress target =
          new javax.media.rtp.SessionAddress(
              InetAddress.getByName(args[0]), Integer.valueOf(args[1]).intValue());

      rtpManager.initialize(local);
      rtpManager.addTarget(target);

      javax.media.rtp.SendStream sendStream = rtpManager.createSendStream(dataOutput, 0);
      sendStream.start();

      processor.start();
      Thread.sleep(1000);

      System.out.println("\n>>>>>>  TRANSMITTING ULAW/RTP AUDIO NOW");
      while (2 > 1) {
        Thread.sleep(1000);

        if (null != bitrateControl) {
          TransmissionStats stats = sendStream.getSourceTransmissionStats();
          System.out.println(
              "rtp audio send: bitrate="
                  + bitrateControl.getBitRate()
                  + " (pdu="
                  + stats.getPDUTransmitted()
                  + " bytes="
                  + stats.getBytesTransmitted()
                  + " overrun="
                  + fpc.getFramesDropped()
                  + ")");
        }
      }
    } catch (Exception ex) {
      ex.printStackTrace();
    }

    System.exit(0);
  }
Ejemplo n.º 16
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }
 static void prUsage() {
   System.err.println("Usage: java Concat -o <output> <input> ...");
   System.err.println("     <output>: input URL or file name");
   System.err.println("     <input>: output URL or file name");
   System.exit(0);
 }