예제 #1
1
  /**
   * {@inheritDoc}
   *
   * @param format unused, since this implementation records multiple streams using potentially
   *     different formats.
   * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the
   *     recorded media files.
   */
  @Override
  public void start(String format, String dirname) throws IOException, MediaException {
    if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode());
    path = dirname;

    MediaService mediaService = LibJitsi.getMediaService();

    /*
     * Note that we use only one RTPConnector for both the RTPTranslator
     * and the RTPManager instances. The this.translator will write to its
     * output streams, and this.rtpManager will read from its input streams.
     */
    rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType);

    rtpManager = RTPManager.newInstance();

    /*
     * Add the formats that we know about.
     */
    rtpManager.addFormat(vp8RtpFormat, vp8PayloadType);
    rtpManager.addFormat(opusFormat, opusPayloadType);
    rtpManager.addReceiveStreamListener(this);

    /*
     * Note: When this.rtpManager sends RTCP sender/receiver reports, they
     * will end up being written to its own input stream. This is not
     * expected to cause problems, but might be something to keep an eye on.
     */
    rtpManager.initialize(rtpConnector);

    /*
     * Register a fake call participant.
     * TODO: can we use a more generic MediaStream here?
     */
    streamRTPManager =
        new StreamRTPManager(
            mediaService.createMediaStream(
                new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)),
            translator);

    streamRTPManager.initialize(rtpConnector);

    rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender();

    translator.addFormat(streamRTPManager, opusFormat, opusPayloadType);

    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat,
    // redPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat,
    // ulpfecPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager,
    // mediaFormatImpl.getFormat(), vp8PayloadType);

    started = true;
  }
  /**
   * Gets the <tt>MediaFormat</tt>s among the specified <tt>mediaFormats</tt> which have the
   * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>.
   *
   * @param mediaFormats the <tt>MediaFormat</tt>s from which to filter out only the ones which have
   *     the specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>
   * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved
   * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link
   *     #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable
   * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s among <tt>mediaFormats</tt> which have the
   *     specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>
   */
  private List<MediaFormat> getMatchingMediaFormats(
      MediaFormat[] mediaFormats, String encoding, double clockRate) {
    /*
     * XXX Use String#equalsIgnoreCase(String) because some clients transmit
     * some of the codecs starting with capital letters.
     */

    /*
     * As per RFC 3551.4.5.2, because of a mistake in RFC 1890 and for
     * backward compatibility, G.722 should always be announced as 8000 even
     * though it is wideband. So, if someone is looking for G722/16000,
     * then: Forgive them, for they know not what they do!
     */
    if ("G722".equalsIgnoreCase(encoding) && (16000 == clockRate)) {
      clockRate = 8000;
      if (logger.isInfoEnabled()) logger.info("Suppressing erroneous 16000 announcement for G.722");
    }

    List<MediaFormat> supportedMediaFormats = new ArrayList<MediaFormat>();

    for (MediaFormat mediaFormat : mediaFormats) {
      if (mediaFormat.getEncoding().equalsIgnoreCase(encoding)
          && ((CLOCK_RATE_NOT_SPECIFIED == clockRate)
              || (mediaFormat.getClockRate() == clockRate))) {
        supportedMediaFormats.add(mediaFormat);
      }
    }
    return supportedMediaFormats;
  }
예제 #3
0
  private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) {
    if (receiveStream.format instanceof VideoFormat) {
      rtpConnector.packetBuffer.disable(receiveStream.ssrc);
      emptyPacketBuffer(receiveStream.ssrc);
    }

    if (receiveStream.dataSink != null) {
      try {
        receiveStream.dataSink.stop();
      } catch (IOException e) {
        logger.error("Failed to stop DataSink " + e);
      }

      receiveStream.dataSink.close();
    }

    if (receiveStream.processor != null) {
      receiveStream.processor.stop();
      receiveStream.processor.close();
    }

    DataSource dataSource = receiveStream.receiveStream.getDataSource();
    if (dataSource != null) {
      try {
        dataSource.stop();
      } catch (IOException ioe) {
        logger.warn("Failed to stop DataSource");
      }
      dataSource.disconnect();
    }

    synchronized (receiveStreams) {
      receiveStreams.remove(receiveStream);
    }
  }
예제 #4
0
  /**
   * Retransmits a packet to {@link #channel}. If the destination supports the RTX format, the
   * packet will be encapsulated in RTX, otherwise, the packet will be retransmitted as-is.
   *
   * @param pkt the packet to retransmit.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  public boolean retransmit(RawPacket pkt, TransformEngine after) {
    boolean destinationSupportsRtx = rtxPayloadType != -1;
    boolean retransmitPlain;

    if (destinationSupportsRtx) {
      long rtxSsrc = getRtxSsrc(pkt);

      if (rtxSsrc == -1) {
        logger.warn(
            "Cannot find SSRC for RTX, retransmitting plain. " + "SSRC=" + pkt.getSSRCAsLong());
        retransmitPlain = true;
      } else {
        retransmitPlain = !encapsulateInRtxAndTransmit(pkt, rtxSsrc, after);
      }
    } else {
      retransmitPlain = true;
    }

    if (retransmitPlain) {
      MediaStream mediaStream = channel.getStream();

      if (mediaStream != null) {
        try {
          mediaStream.injectPacket(pkt, /* data */ true, after);
        } catch (TransmissionFailedException tfe) {
          logger.warn("Failed to retransmit a packet.");
          return false;
        }
      }
    }

    return true;
  }
예제 #5
0
  /**
   * Restarts the recording for a specific SSRC.
   *
   * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording).
   */
  private void resetRecording(long ssrc, long timestamp) {
    ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);

    // we only restart audio recordings
    if (receiveStream != null && receiveStream.format instanceof AudioFormat) {
      String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX);

      // flush the buffer contained in the MP3 encoder
      String s = "trying to flush ssrc=" + ssrc;
      Processor p = receiveStream.processor;
      if (p != null) {
        s += " p!=null";
        for (TrackControl tc : p.getTrackControls()) {
          Object o = tc.getControl(FlushableControl.class.getName());
          if (o != null) ((FlushableControl) o).flush();
        }
      }

      if (logger.isInfoEnabled()) {
        logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename);
      }

      receiveStream.dataSink.close();
      receiveStream.dataSink = null;

      // flush the FMJ jitter buffer
      // DataSource ds = receiveStream.receiveStream.getDataSource();
      // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource)
      //    ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush();

      receiveStream.filename = newFilename;
      try {
        receiveStream.dataSink =
            Manager.createDataSink(
                receiveStream.dataSource, new MediaLocator("file:" + newFilename));
      } catch (NoDataSinkException ndse) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse);
        removeReceiveStream(receiveStream, false);
      }

      try {
        receiveStream.dataSink.open();
        receiveStream.dataSink.start();
      } catch (IOException ioe) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe);
        removeReceiveStream(receiveStream, false);
      }

      audioRecordingStarted(ssrc, timestamp);
    }
  }
예제 #6
0
  /**
   * Tries to find an SSRC paired with {@code ssrc} in an FID group in one of the channels from
   * {@link #channel}'s {@code Content}. Returns -1 on failure.
   *
   * @param pkt the {@code RawPacket} that holds the RTP packet for which to find a paired SSRC.
   * @return An SSRC paired with {@code ssrc} in an FID group, or -1.
   */
  private long getRtxSsrc(RawPacket pkt) {
    StreamRTPManager receiveRTPManager =
        channel.getStream().getRTPTranslator().findStreamRTPManagerByReceiveSSRC(pkt.getSSRC());

    MediaStreamTrackReceiver receiver = null;
    if (receiveRTPManager != null) {
      MediaStream receiveStream = receiveRTPManager.getMediaStream();
      if (receiveStream != null) {
        receiver = receiveStream.getMediaStreamTrackReceiver();
      }
    }

    if (receiver == null) {
      return -1;
    }

    RTPEncoding encoding = receiver.resolveRTPEncoding(pkt);
    if (encoding == null) {
      logger.warn(
          "encoding_not_found"
              + ",stream_hash="
              + channel.getStream().hashCode()
              + " ssrc="
              + pkt.getSSRCAsLong());
      return -1;
    }

    return encoding.getRTXSSRC();
  }
예제 #7
0
  /**
   * Removes the RTX encapsulation from a packet.
   *
   * @param pkt the packet to remove the RTX encapsulation from.
   * @return the original media packet represented by {@code pkt}, or null if we couldn't
   *     reconstruct the original packet.
   */
  private RawPacket deRtx(RawPacket pkt) {
    boolean success = false;

    if (pkt.getPayloadLength() - pkt.getPaddingSize() < 2) {
      // We need at least 2 bytes to read the OSN field.
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet with padding only: " + pkt);
      }
      return null;
    }

    long mediaSsrc = getPrimarySsrc(pkt);
    if (mediaSsrc != -1) {
      if (rtxAssociatedPayloadType != -1) {
        int osn = pkt.getOriginalSequenceNumber();
        // Remove the RTX header by moving the RTP header two bytes
        // right.
        byte[] buf = pkt.getBuffer();
        int off = pkt.getOffset();
        System.arraycopy(buf, off, buf, off + 2, pkt.getHeaderLength());

        pkt.setOffset(off + 2);
        pkt.setLength(pkt.getLength() - 2);

        pkt.setSSRC((int) mediaSsrc);
        pkt.setSequenceNumber(osn);
        pkt.setPayloadType(rtxAssociatedPayloadType);
        success = true;
      } else {
        logger.warn(
            "RTX packet received, but no APT is defined. Packet "
                + "SSRC "
                + pkt.getSSRCAsLong()
                + ", associated media"
                + " SSRC "
                + mediaSsrc);
      }
    }

    // If we failed to handle the RTX packet, drop it.
    return success ? pkt : null;
  }
예제 #8
0
  /**
   * Returns the SSRC paired with <tt>ssrc</tt> in an FID source-group, if any. If none is found,
   * returns -1.
   *
   * @return the SSRC paired with <tt>ssrc</tt> in an FID source-group, if any. If none is found,
   *     returns -1.
   */
  private long getPrimarySsrc(RawPacket pkt) {
    MediaStreamTrackReceiver receiver = channel.getStream().getMediaStreamTrackReceiver();

    if (receiver == null) {
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet from an unknown source.");
      }
      return -1;
    }

    RTPEncoding encoding = receiver.resolveRTPEncoding(pkt);
    if (encoding == null) {
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet from an unknown source.");
      }
      return -1;
    }

    return encoding.getPrimarySSRC();
  }
 public static CodecInfo getCodecInfo(MediaCodecInfo codecInfo) {
   String[] types = codecInfo.getSupportedTypes();
   for (String type : types) {
     try {
       if (type.equals(MEDIA_CODEC_TYPE_H263)) return new H263CodecInfo(codecInfo);
       else if (type.equals(MEDIA_CODEC_TYPE_H264)) return new H264CodecInfo(codecInfo);
       else if (type.equals(MEDIA_CODEC_TYPE_VP8)) return new VP8CodecInfo(codecInfo);
     } catch (IllegalArgumentException e) {
       logger.error(
           "Error initializing codec info: " + codecInfo.getName() + ", type: " + type, e);
     }
   }
   return null;
 }
예제 #10
0
  @Override
  public void stop() {
    if (started) {
      if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode());

      // remove the recorder from the translator (e.g. stop new packets from
      // being written to rtpConnector
      if (streamRTPManager != null) streamRTPManager.dispose();

      HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>();
      synchronized (receiveStreams) {
        streamsToRemove.addAll(receiveStreams);
      }

      for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false);

      rtpConnector.rtcpPacketTransformer.close();
      rtpConnector.rtpPacketTransformer.close();
      rtpManager.dispose();

      started = false;
    }
  }
예제 #11
0
  private void emptyPacketBuffer(long ssrc) {
    RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc);
    RTPConnectorImpl.OutputDataStreamImpl dataStream;

    try {
      dataStream = rtpConnector.getDataOutputStream();
    } catch (IOException ioe) {
      logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe);
      return;
    }
    for (RawPacket pkt : pkts)
      dataStream.write(
          pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */);
  }
예제 #12
0
  /**
   * Encapsulates {@code pkt} in the RTX format, using {@code rtxSsrc} as its SSRC, and transmits it
   * to {@link #channel} by injecting it in the {@code MediaStream}.
   *
   * @param pkt the packet to transmit.
   * @param rtxSsrc the SSRC for the RTX stream.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  private boolean encapsulateInRtxAndTransmit(RawPacket pkt, long rtxSsrc, TransformEngine after) {
    byte[] buf = pkt.getBuffer();
    int len = pkt.getLength();
    int off = pkt.getOffset();

    byte[] newBuf = new byte[len + 2];
    RawPacket rtxPkt = new RawPacket(newBuf, 0, len + 2);

    int osn = pkt.getSequenceNumber();
    int headerLength = pkt.getHeaderLength();
    int payloadLength = pkt.getPayloadLength();

    // Copy the header.
    System.arraycopy(buf, off, newBuf, 0, headerLength);

    // Set the OSN field.
    newBuf[headerLength] = (byte) ((osn >> 8) & 0xff);
    newBuf[headerLength + 1] = (byte) (osn & 0xff);

    // Copy the payload.
    System.arraycopy(buf, off + headerLength, newBuf, headerLength + 2, payloadLength);

    MediaStream mediaStream = channel.getStream();
    if (mediaStream != null) {
      rtxPkt.setSSRC((int) rtxSsrc);
      rtxPkt.setPayloadType(rtxPayloadType);
      // Only call getNextRtxSequenceNumber() when we're sure we're going
      // to transmit a packet, because it consumes a sequence number.
      rtxPkt.setSequenceNumber(getNextRtxSequenceNumber(rtxSsrc));
      try {
        mediaStream.injectPacket(rtxPkt, /* data */ true, after);
      } catch (TransmissionFailedException tfe) {
        logger.warn("Failed to transmit an RTX packet.");
        return false;
      }
    }

    return true;
  }
  static {
    bannedYuvCodecs = new ArrayList<String>();

    // Banned H264 encoders/decoders
    // Crashes
    bannedYuvCodecs.add("OMX.SEC.avc.enc");
    bannedYuvCodecs.add("OMX.SEC.h263.enc");
    // Don't support 3.1 profile used by Jitsi
    bannedYuvCodecs.add("OMX.Nvidia.h264.decode");
    // bannedYuvCodecs.add("OMX.SEC.avc.dec");

    // Banned VP8 encoders/decoders
    bannedYuvCodecs.add("OMX.SEC.vp8.dec");
    // This one works only for res 176x144
    bannedYuvCodecs.add("OMX.google.vpx.encoder");

    for (int codecIndex = 0, codecCount = MediaCodecList.getCodecCount();
        codecIndex < codecCount;
        codecIndex++) {
      MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(codecIndex);
      logger.info(
          "Discovered codec: "
              + codecInfo.getName()
              + "/"
              + Arrays.toString(codecInfo.getSupportedTypes()));
      CodecInfo ci = CodecInfo.getCodecInfo(codecInfo);
      if (ci != null) {
        codecs.add(ci);
        ci.setBanned(bannedYuvCodecs.contains(ci.getName()));
      }
    }
    logger.info("Selected H264 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, true));
    logger.info("Selected H264 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, false));
    logger.info("Selected H263 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, true));
    logger.info("Selected H263 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, false));
    logger.info("Selected VP8 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, true));
    logger.info("Selected VP8 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, false));
  }
예제 #14
0
/**
 * Intercepts RTX (RFC-4588) packets coming from an {@link RtpChannel}, and removes their RTX
 * encapsulation. Allows packets to be retransmitted to a channel (using the RTX format if the
 * destination supports it).
 *
 * @author Boris Grozev
 * @author George Politis
 */
public class RtxTransformer extends SinglePacketTransformerAdapter implements TransformEngine {
  /**
   * The {@link Logger} used by the {@link RtxTransformer} class to print debug information. Note
   * that {@link Conference} instances should use {@link #logger} instead.
   */
  private static final Logger classLogger = Logger.getLogger(RtxTransformer.class);

  /** The <tt>RtpChannel</tt> for the transformer. */
  private RtpChannel channel;

  /** Maps an RTX SSRC to the last RTP sequence number sent with that SSRC. */
  private final Map<Long, Integer> rtxSequenceNumbers = new HashMap<>();

  /** The {@link Logger} to be used by this instance to print debug information. */
  private final Logger logger;

  /**
   * The payload type number configured for RTX (RFC-4588), or -1 if none is configured (the other
   * end does not support rtx).
   */
  private byte rtxPayloadType = -1;

  /** The "associated payload type" number for RTX. */
  private byte rtxAssociatedPayloadType = -1;

  /**
   * Initializes a new <tt>RtxTransformer</tt> with a specific <tt>RtpChannel</tt>.
   *
   * @param channel the <tt>RtpChannel</tt> for the transformer.
   */
  RtxTransformer(RtpChannel channel) {
    super(RTPPacketPredicate.INSTANCE);

    this.channel = channel;
    this.logger = Logger.getLogger(classLogger, channel.getContent().getConference().getLogger());
  }

  /** Implements {@link PacketTransformer#transform(RawPacket[])}. {@inheritDoc} */
  @Override
  public RawPacket reverseTransform(RawPacket pkt) {
    if (isRtx(pkt)) {
      pkt = deRtx(pkt);
    }

    return pkt;
  }

  /**
   * Determines whether {@code pkt} is an RTX packet.
   *
   * @param pkt the packet to check.
   * @return {@code true} iff {@code pkt} is an RTX packet.
   */
  private boolean isRtx(RawPacket pkt) {
    byte rtxPt = rtxPayloadType;
    return rtxPt != -1 && rtxPt == pkt.getPayloadType();
  }

  /**
   * Removes the RTX encapsulation from a packet.
   *
   * @param pkt the packet to remove the RTX encapsulation from.
   * @return the original media packet represented by {@code pkt}, or null if we couldn't
   *     reconstruct the original packet.
   */
  private RawPacket deRtx(RawPacket pkt) {
    boolean success = false;

    if (pkt.getPayloadLength() - pkt.getPaddingSize() < 2) {
      // We need at least 2 bytes to read the OSN field.
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet with padding only: " + pkt);
      }
      return null;
    }

    long mediaSsrc = getPrimarySsrc(pkt);
    if (mediaSsrc != -1) {
      if (rtxAssociatedPayloadType != -1) {
        int osn = pkt.getOriginalSequenceNumber();
        // Remove the RTX header by moving the RTP header two bytes
        // right.
        byte[] buf = pkt.getBuffer();
        int off = pkt.getOffset();
        System.arraycopy(buf, off, buf, off + 2, pkt.getHeaderLength());

        pkt.setOffset(off + 2);
        pkt.setLength(pkt.getLength() - 2);

        pkt.setSSRC((int) mediaSsrc);
        pkt.setSequenceNumber(osn);
        pkt.setPayloadType(rtxAssociatedPayloadType);
        success = true;
      } else {
        logger.warn(
            "RTX packet received, but no APT is defined. Packet "
                + "SSRC "
                + pkt.getSSRCAsLong()
                + ", associated media"
                + " SSRC "
                + mediaSsrc);
      }
    }

    // If we failed to handle the RTX packet, drop it.
    return success ? pkt : null;
  }

  /** Implements {@link TransformEngine#getRTPTransformer()}. */
  @Override
  public PacketTransformer getRTPTransformer() {
    return this;
  }

  /** Implements {@link TransformEngine#getRTCPTransformer()}. */
  @Override
  public PacketTransformer getRTCPTransformer() {
    return null;
  }

  /**
   * Returns the sequence number to use for a specific RTX packet, which is based on the packet's
   * original sequence number.
   *
   * <p>Because we terminate the RTX format, and with simulcast we might translate RTX packets from
   * multiple SSRCs into the same SSRC, we keep count of the RTX packets (and their sequence
   * numbers) which we sent for each SSRC.
   *
   * @param ssrc the SSRC of the RTX stream for the packet.
   * @return the sequence number which should be used for the next RTX packet sent using SSRC
   *     <tt>ssrc</tt>.
   */
  private int getNextRtxSequenceNumber(long ssrc) {
    Integer seq;
    synchronized (rtxSequenceNumbers) {
      seq = rtxSequenceNumbers.get(ssrc);
      if (seq == null) seq = new Random().nextInt(0xffff);
      else seq++;

      rtxSequenceNumbers.put(ssrc, seq);
    }

    return seq;
  }

  /**
   * Tries to find an SSRC paired with {@code ssrc} in an FID group in one of the channels from
   * {@link #channel}'s {@code Content}. Returns -1 on failure.
   *
   * @param pkt the {@code RawPacket} that holds the RTP packet for which to find a paired SSRC.
   * @return An SSRC paired with {@code ssrc} in an FID group, or -1.
   */
  private long getRtxSsrc(RawPacket pkt) {
    StreamRTPManager receiveRTPManager =
        channel.getStream().getRTPTranslator().findStreamRTPManagerByReceiveSSRC(pkt.getSSRC());

    MediaStreamTrackReceiver receiver = null;
    if (receiveRTPManager != null) {
      MediaStream receiveStream = receiveRTPManager.getMediaStream();
      if (receiveStream != null) {
        receiver = receiveStream.getMediaStreamTrackReceiver();
      }
    }

    if (receiver == null) {
      return -1;
    }

    RTPEncoding encoding = receiver.resolveRTPEncoding(pkt);
    if (encoding == null) {
      logger.warn(
          "encoding_not_found"
              + ",stream_hash="
              + channel.getStream().hashCode()
              + " ssrc="
              + pkt.getSSRCAsLong());
      return -1;
    }

    return encoding.getRTXSSRC();
  }
  /**
   * Retransmits a packet to {@link #channel}. If the destination supports the RTX format, the
   * packet will be encapsulated in RTX, otherwise, the packet will be retransmitted as-is.
   *
   * @param pkt the packet to retransmit.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  public boolean retransmit(RawPacket pkt, TransformEngine after) {
    boolean destinationSupportsRtx = rtxPayloadType != -1;
    boolean retransmitPlain;

    if (destinationSupportsRtx) {
      long rtxSsrc = getRtxSsrc(pkt);

      if (rtxSsrc == -1) {
        logger.warn(
            "Cannot find SSRC for RTX, retransmitting plain. " + "SSRC=" + pkt.getSSRCAsLong());
        retransmitPlain = true;
      } else {
        retransmitPlain = !encapsulateInRtxAndTransmit(pkt, rtxSsrc, after);
      }
    } else {
      retransmitPlain = true;
    }

    if (retransmitPlain) {
      MediaStream mediaStream = channel.getStream();

      if (mediaStream != null) {
        try {
          mediaStream.injectPacket(pkt, /* data */ true, after);
        } catch (TransmissionFailedException tfe) {
          logger.warn("Failed to retransmit a packet.");
          return false;
        }
      }
    }

    return true;
  }

  /**
   * Notifies this instance that the dynamic payload types of the associated {@link MediaStream}
   * have changed.
   */
  public void onDynamicPayloadTypesChanged() {
    rtxPayloadType = -1;
    rtxAssociatedPayloadType = -1;

    MediaStream mediaStream = channel.getStream();

    Map<Byte, MediaFormat> mediaFormatMap = mediaStream.getDynamicRTPPayloadTypes();

    Iterator<Map.Entry<Byte, MediaFormat>> it = mediaFormatMap.entrySet().iterator();

    while (it.hasNext() && rtxPayloadType == -1) {
      Map.Entry<Byte, MediaFormat> entry = it.next();
      MediaFormat format = entry.getValue();
      if (!Constants.RTX.equalsIgnoreCase(format.getEncoding())) {
        continue;
      }

      // XXX(gp) we freak out if multiple codecs with RTX support are
      // present.
      rtxPayloadType = entry.getKey();
      rtxAssociatedPayloadType = Byte.parseByte(format.getFormatParameters().get("apt"));
    }
  }

  /**
   * Encapsulates {@code pkt} in the RTX format, using {@code rtxSsrc} as its SSRC, and transmits it
   * to {@link #channel} by injecting it in the {@code MediaStream}.
   *
   * @param pkt the packet to transmit.
   * @param rtxSsrc the SSRC for the RTX stream.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  private boolean encapsulateInRtxAndTransmit(RawPacket pkt, long rtxSsrc, TransformEngine after) {
    byte[] buf = pkt.getBuffer();
    int len = pkt.getLength();
    int off = pkt.getOffset();

    byte[] newBuf = new byte[len + 2];
    RawPacket rtxPkt = new RawPacket(newBuf, 0, len + 2);

    int osn = pkt.getSequenceNumber();
    int headerLength = pkt.getHeaderLength();
    int payloadLength = pkt.getPayloadLength();

    // Copy the header.
    System.arraycopy(buf, off, newBuf, 0, headerLength);

    // Set the OSN field.
    newBuf[headerLength] = (byte) ((osn >> 8) & 0xff);
    newBuf[headerLength + 1] = (byte) (osn & 0xff);

    // Copy the payload.
    System.arraycopy(buf, off + headerLength, newBuf, headerLength + 2, payloadLength);

    MediaStream mediaStream = channel.getStream();
    if (mediaStream != null) {
      rtxPkt.setSSRC((int) rtxSsrc);
      rtxPkt.setPayloadType(rtxPayloadType);
      // Only call getNextRtxSequenceNumber() when we're sure we're going
      // to transmit a packet, because it consumes a sequence number.
      rtxPkt.setSequenceNumber(getNextRtxSequenceNumber(rtxSsrc));
      try {
        mediaStream.injectPacket(rtxPkt, /* data */ true, after);
      } catch (TransmissionFailedException tfe) {
        logger.warn("Failed to transmit an RTX packet.");
        return false;
      }
    }

    return true;
  }

  /**
   * Returns the SSRC paired with <tt>ssrc</tt> in an FID source-group, if any. If none is found,
   * returns -1.
   *
   * @return the SSRC paired with <tt>ssrc</tt> in an FID source-group, if any. If none is found,
   *     returns -1.
   */
  private long getPrimarySsrc(RawPacket pkt) {
    MediaStreamTrackReceiver receiver = channel.getStream().getMediaStreamTrackReceiver();

    if (receiver == null) {
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet from an unknown source.");
      }
      return -1;
    }

    RTPEncoding encoding = receiver.resolveRTPEncoding(pkt);
    if (encoding == null) {
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet from an unknown source.");
      }
      return -1;
    }

    return encoding.getPrimarySSRC();
  }
}
예제 #15
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }
예제 #16
0
/**
 * A <tt>Recorder</tt> implementation which attaches to an <tt>RTPTranslator</tt>.
 *
 * @author Vladimir Marinov
 * @author Boris Grozev
 */
public class RecorderRtpImpl
    implements Recorder, ReceiveStreamListener, ActiveSpeakerChangedListener, ControllerListener {
  /**
   * The <tt>Logger</tt> used by the <tt>RecorderRtpImpl</tt> class and its instances for logging
   * output.
   */
  private static final Logger logger = Logger.getLogger(RecorderRtpImpl.class);

  // values hard-coded to match chrome
  // TODO: allow to set them dynamically
  private static final byte redPayloadType = 116;
  private static final byte ulpfecPayloadType = 117;
  private static final byte vp8PayloadType = 100;
  private static final byte opusPayloadType = 111;
  private static final Format redFormat = new VideoFormat(Constants.RED);
  private static final Format ulpfecFormat = new VideoFormat(Constants.ULPFEC);
  private static final Format vp8RtpFormat = new VideoFormat(Constants.VP8_RTP);
  private static final Format vp8Format = new VideoFormat(Constants.VP8);
  private static final Format opusFormat =
      new AudioFormat(Constants.OPUS_RTP, 48000, Format.NOT_SPECIFIED, Format.NOT_SPECIFIED);

  private static final int FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE = 300;

  /** The <tt>ContentDescriptor</tt> to use when saving audio. */
  private static final ContentDescriptor AUDIO_CONTENT_DESCRIPTOR =
      new ContentDescriptor(FileTypeDescriptor.MPEG_AUDIO);

  /** The suffix for audio file names. */
  private static final String AUDIO_FILENAME_SUFFIX = ".mp3";

  /** The suffix for video file names. */
  private static final String VIDEO_FILENAME_SUFFIX = ".webm";

  static {
    Registry.set("video_jitter_buffer_MIN_SIZE", FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE);
  }

  /** The <tt>RTPTranslator</tt> that this recorder is/will be attached to. */
  private RTPTranslatorImpl translator;

  /**
   * The custom <tt>RTPConnector</tt> that this instance uses to read from {@link #translator} and
   * write to {@link #rtpManager}.
   */
  private RTPConnectorImpl rtpConnector;

  /** Path to the directory where the output files will be stored. */
  private String path;

  /** The <tt>RTCPFeedbackMessageSender</tt> that we use to send RTCP FIR messages. */
  private RTCPFeedbackMessageSender rtcpFeedbackSender;

  /**
   * The {@link RTPManager} instance we use to handle the packets coming from
   * <tt>RTPTranslator</tt>.
   */
  private RTPManager rtpManager;

  /**
   * The instance which should be notified when events related to recordings (such as the start or
   * end of a recording) occur.
   */
  private RecorderEventHandlerImpl eventHandler;

  /**
   * Holds the <tt>ReceiveStreams</tt> added to this instance by {@link #rtpManager} and additional
   * information associated with each one (e.g. the <tt>Processor</tt>, if any, used for it).
   */
  private final HashSet<ReceiveStreamDesc> receiveStreams = new HashSet<ReceiveStreamDesc>();

  private final Set<Long> activeVideoSsrcs = new HashSet<Long>();

  /**
   * The <tt>ActiveSpeakerDetector</tt> which will listen to the audio receive streams of this
   * <tt>RecorderRtpImpl</tt> and notify it about changes to the active speaker via calls to {@link
   * #activeSpeakerChanged(long)}
   */
  private ActiveSpeakerDetector activeSpeakerDetector = null;

  StreamRTPManager streamRTPManager;

  private SynchronizerImpl synchronizer;
  private boolean started = false;

  /**
   * Constructor.
   *
   * @param translator the <tt>RTPTranslator</tt> to which this instance will attach in order to
   *     record media.
   */
  public RecorderRtpImpl(RTPTranslator translator) {
    this.translator = (RTPTranslatorImpl) translator;
    activeSpeakerDetector = new ActiveSpeakerDetectorImpl();
    activeSpeakerDetector.addActiveSpeakerChangedListener(this);
  }

  /** Implements {@link Recorder#addListener(Recorder.Listener)}. */
  @Override
  public void addListener(Listener listener) {}

  /** Implements {@link Recorder#removeListener(Recorder.Listener)}. */
  @Override
  public void removeListener(Listener listener) {}

  /** Implements {@link Recorder#getSupportedFormats()}. */
  @Override
  public List<String> getSupportedFormats() {
    return null;
  }

  /** Implements {@link Recorder#setMute(boolean)}. */
  @Override
  public void setMute(boolean mute) {}

  /**
   * Implements {@link Recorder#getFilename()}. Returns null, since we don't have a (single)
   * associated filename.
   */
  @Override
  public String getFilename() {
    return null;
  }

  /**
   * Sets the instance which should be notified when events related to recordings (such as the start
   * or end of a recording) occur.
   */
  public void setEventHandler(RecorderEventHandler eventHandler) {
    if (this.eventHandler == null
        || (this.eventHandler != eventHandler && this.eventHandler.handler != eventHandler)) {
      if (this.eventHandler == null) this.eventHandler = new RecorderEventHandlerImpl(eventHandler);
      else this.eventHandler.handler = eventHandler;
    }
  }

  /**
   * {@inheritDoc}
   *
   * @param format unused, since this implementation records multiple streams using potentially
   *     different formats.
   * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the
   *     recorded media files.
   */
  @Override
  public void start(String format, String dirname) throws IOException, MediaException {
    if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode());
    path = dirname;

    MediaService mediaService = LibJitsi.getMediaService();

    /*
     * Note that we use only one RTPConnector for both the RTPTranslator
     * and the RTPManager instances. The this.translator will write to its
     * output streams, and this.rtpManager will read from its input streams.
     */
    rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType);

    rtpManager = RTPManager.newInstance();

    /*
     * Add the formats that we know about.
     */
    rtpManager.addFormat(vp8RtpFormat, vp8PayloadType);
    rtpManager.addFormat(opusFormat, opusPayloadType);
    rtpManager.addReceiveStreamListener(this);

    /*
     * Note: When this.rtpManager sends RTCP sender/receiver reports, they
     * will end up being written to its own input stream. This is not
     * expected to cause problems, but might be something to keep an eye on.
     */
    rtpManager.initialize(rtpConnector);

    /*
     * Register a fake call participant.
     * TODO: can we use a more generic MediaStream here?
     */
    streamRTPManager =
        new StreamRTPManager(
            mediaService.createMediaStream(
                new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)),
            translator);

    streamRTPManager.initialize(rtpConnector);

    rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender();

    translator.addFormat(streamRTPManager, opusFormat, opusPayloadType);

    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat,
    // redPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat,
    // ulpfecPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager,
    // mediaFormatImpl.getFormat(), vp8PayloadType);

    started = true;
  }

  @Override
  public void stop() {
    if (started) {
      if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode());

      // remove the recorder from the translator (e.g. stop new packets from
      // being written to rtpConnector
      if (streamRTPManager != null) streamRTPManager.dispose();

      HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>();
      synchronized (receiveStreams) {
        streamsToRemove.addAll(receiveStreams);
      }

      for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false);

      rtpConnector.rtcpPacketTransformer.close();
      rtpConnector.rtpPacketTransformer.close();
      rtpManager.dispose();

      started = false;
    }
  }

  /**
   * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}.
   *
   * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s.
   */
  @Override
  public void update(ReceiveStreamEvent event) {
    if (event == null) return;
    ReceiveStream receiveStream = event.getReceiveStream();

    if (event instanceof NewReceiveStreamEvent) {
      if (receiveStream == null) {
        logger.warn("NewReceiveStreamEvent: null");
        return;
      }

      final long ssrc = getReceiveStreamSSRC(receiveStream);

      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc);

      if (receiveStreamDesc != null) {
        String s = "NewReceiveStreamEvent for an existing SSRC. ";
        if (receiveStream != receiveStreamDesc.receiveStream)
          s += "(but different ReceiveStream object)";
        logger.warn(s);
        return;
      } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream);

      if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc);

      // Find the format of the ReceiveStream
      DataSource dataSource = receiveStream.getDataSource();
      if (dataSource instanceof PushBufferDataSource) {
        Format format = null;
        PushBufferDataSource pbds = (PushBufferDataSource) dataSource;
        for (PushBufferStream pbs : pbds.getStreams()) {
          if ((format = pbs.getFormat()) != null) break;
        }

        if (format == null) {
          logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format");
          return;
        }

        receiveStreamDesc.format = format;
      } else {
        logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource");
        return;
      }

      int rtpClockRate = -1;
      if (receiveStreamDesc.format instanceof AudioFormat)
        rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate();
      else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000;
      getSynchronizer().setRtpClockRate(ssrc, rtpClockRate);

      // create a Processor and configure it
      Processor processor = null;
      try {
        processor = Manager.createProcessor(receiveStream.getDataSource());
      } catch (NoProcessorException npe) {
        logger.error("Failed to create Processor: ", npe);
        return;
      } catch (IOException ioe) {
        logger.error("Failed to create Processor: ", ioe);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc);

      processor.addControllerListener(this);
      receiveStreamDesc.processor = processor;

      final int streamCount;
      synchronized (receiveStreams) {
        receiveStreams.add(receiveStreamDesc);
        streamCount = receiveStreams.size();
      }

      /*
       * XXX TODO IRBABOON
       * This is a terrible hack which works around a failure to realize()
       * some of the Processor-s for audio streams, when multiple streams
       * start nearly simultaneously. The cause of the problem is currently
       * unknown (and synchronizing all FMJ calls in RecorderRtpImpl
       * does not help).
       * XXX TODO NOOBABRI
       */
      if (receiveStreamDesc.format instanceof AudioFormat) {
        final Processor p = processor;
        new Thread() {
          @Override
          public void run() {
            // delay configuring the processors for the different
            // audio streams to decrease the probability that they
            // run together.
            try {
              int ms = 450 * (streamCount - 1);
              logger.warn(
                  "Sleeping for "
                      + ms
                      + "ms before"
                      + " configuring processor for SSRC="
                      + ssrc
                      + " "
                      + System.currentTimeMillis());
              Thread.sleep(ms);
            } catch (Exception e) {
            }

            p.configure();
          }
        }.run();
      } else {
        processor.configure();
      }
    } else if (event instanceof TimeoutEvent) {
      if (receiveStream == null) {
        // TODO: we might want to get the list of ReceiveStream-s from
        // rtpManager and compare it to our list, to see if we should
        // remove a stream.
        logger.warn("TimeoutEvent: null.");
        return;
      }

      // FMJ silently creates new ReceiveStream instances, so we have to
      // recognize them by the SSRC.
      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream));
      if (receiveStreamDesc != null) {
        if (logger.isInfoEnabled()) {
          logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc);
        }

        removeReceiveStream(receiveStreamDesc, true);
      }
    } else if (event != null && logger.isInfoEnabled()) {
      logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event);
    }
  }

  private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) {
    if (receiveStream.format instanceof VideoFormat) {
      rtpConnector.packetBuffer.disable(receiveStream.ssrc);
      emptyPacketBuffer(receiveStream.ssrc);
    }

    if (receiveStream.dataSink != null) {
      try {
        receiveStream.dataSink.stop();
      } catch (IOException e) {
        logger.error("Failed to stop DataSink " + e);
      }

      receiveStream.dataSink.close();
    }

    if (receiveStream.processor != null) {
      receiveStream.processor.stop();
      receiveStream.processor.close();
    }

    DataSource dataSource = receiveStream.receiveStream.getDataSource();
    if (dataSource != null) {
      try {
        dataSource.stop();
      } catch (IOException ioe) {
        logger.warn("Failed to stop DataSource");
      }
      dataSource.disconnect();
    }

    synchronized (receiveStreams) {
      receiveStreams.remove(receiveStream);
    }
  }

  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }

  /**
   * Restarts the recording for a specific SSRC.
   *
   * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording).
   */
  private void resetRecording(long ssrc, long timestamp) {
    ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);

    // we only restart audio recordings
    if (receiveStream != null && receiveStream.format instanceof AudioFormat) {
      String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX);

      // flush the buffer contained in the MP3 encoder
      String s = "trying to flush ssrc=" + ssrc;
      Processor p = receiveStream.processor;
      if (p != null) {
        s += " p!=null";
        for (TrackControl tc : p.getTrackControls()) {
          Object o = tc.getControl(FlushableControl.class.getName());
          if (o != null) ((FlushableControl) o).flush();
        }
      }

      if (logger.isInfoEnabled()) {
        logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename);
      }

      receiveStream.dataSink.close();
      receiveStream.dataSink = null;

      // flush the FMJ jitter buffer
      // DataSource ds = receiveStream.receiveStream.getDataSource();
      // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource)
      //    ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush();

      receiveStream.filename = newFilename;
      try {
        receiveStream.dataSink =
            Manager.createDataSink(
                receiveStream.dataSource, new MediaLocator("file:" + newFilename));
      } catch (NoDataSinkException ndse) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse);
        removeReceiveStream(receiveStream, false);
      }

      try {
        receiveStream.dataSink.open();
        receiveStream.dataSink.start();
      } catch (IOException ioe) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe);
        removeReceiveStream(receiveStream, false);
      }

      audioRecordingStarted(ssrc, timestamp);
    }
  }

  private void audioRecordingStarted(long ssrc, long timestamp) {
    ReceiveStreamDesc desc = findReceiveStream(ssrc);
    if (desc == null) return;

    RecorderEvent event = new RecorderEvent();
    event.setType(RecorderEvent.Type.RECORDING_STARTED);
    event.setMediaType(MediaType.AUDIO);
    event.setSsrc(ssrc);
    event.setRtpTimestamp(timestamp);
    event.setFilename(desc.filename);

    if (eventHandler != null) eventHandler.handleEvent(event);
  }

  /**
   * Handles a request from a specific <tt>DataSink</tt> to request a keyframe by sending an RTCP
   * feedback FIR message to the media source.
   *
   * @param dataSink the <tt>DataSink</tt> which requests that a keyframe be requested with a FIR
   *     message.
   * @return <tt>true</tt> if a keyframe was successfully requested, <tt>false</tt> otherwise
   */
  private boolean requestFIR(WebmDataSink dataSink) {
    ReceiveStreamDesc desc = findReceiveStream(dataSink);
    if (desc != null && rtcpFeedbackSender != null) {
      return rtcpFeedbackSender.sendFIR((int) desc.ssrc);
    }

    return false;
  }

  /**
   * Returns "prefix"+"suffix" if the file with this name does not exist. Otherwise, returns the
   * first inexistant filename of the form "prefix-"+i+"suffix", for an integer i. i is bounded by
   * 100 to prevent hanging, and on failure to find an inexistant filename the method will return
   * null.
   *
   * @param prefix
   * @param suffix
   * @return
   */
  private String getNextFilename(String prefix, String suffix) {
    if (!new File(prefix + suffix).exists()) return prefix + suffix;

    int i = 1;
    String s;
    do {
      s = prefix + "-" + i + suffix;
      if (!new File(s).exists()) return s;
      i++;
    } while (i < 1000); // don't hang indefinitely...

    return null;
  }

  /**
   * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>
   *
   * @param processor The <tt>Processor</tt> to match.
   * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>, or <tt>null</tt>.
   */
  private ReceiveStreamDesc findReceiveStream(Processor processor) {
    if (processor == null) return null;

    synchronized (receiveStreams) {
      for (ReceiveStreamDesc r : receiveStreams) if (processor.equals(r.processor)) return r;
    }

    return null;
  }

  /**
   * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>
   *
   * @param dataSink The <tt>DataSink</tt> to match.
   * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>, or <tt>null</tt>.
   */
  private ReceiveStreamDesc findReceiveStream(DataSink dataSink) {
    if (dataSink == null) return null;

    synchronized (receiveStreams) {
      for (ReceiveStreamDesc r : receiveStreams) if (dataSink.equals(r.dataSink)) return r;
    }

    return null;
  }

  /**
   * Finds the <tt>ReceiveStreamDesc</tt> with a particular SSRC.
   *
   * @param ssrc The SSRC to match.
   * @return the <tt>ReceiveStreamDesc</tt> with a particular SSRC, or <tt>null</tt>.
   */
  private ReceiveStreamDesc findReceiveStream(long ssrc) {
    synchronized (receiveStreams) {
      for (ReceiveStreamDesc r : receiveStreams) if (ssrc == r.ssrc) return r;
    }

    return null;
  }

  /**
   * Gets the SSRC of a <tt>ReceiveStream</tt> as a (non-negative) <tt>long</tt>.
   *
   * <p>FMJ stores the 32-bit SSRC values in <tt>int</tt>s, and the <tt>ReceiveStream.getSSRC()</tt>
   * implementation(s) don't take care of converting the negative <tt>int</tt> values sometimes
   * resulting from reading of a 32-bit field into the correct unsigned <tt>long</tt> value. So do
   * the conversion here.
   *
   * @param receiveStream the <tt>ReceiveStream</tt> for which to get the SSRC.
   * @return the SSRC of <tt>receiveStream</tt> an a (non-negative) <tt>long</tt>.
   */
  private long getReceiveStreamSSRC(ReceiveStream receiveStream) {
    return 0xffffffffL & receiveStream.getSSRC();
  }

  /**
   * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this
   * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed,
   * and that the new active stream has SSRC <tt>ssrc</tt>.
   *
   * @param ssrc the SSRC of the new active stream.
   */
  @Override
  public void activeSpeakerChanged(long ssrc) {
    if (eventHandler != null) {
      RecorderEvent e = new RecorderEvent();
      e.setAudioSsrc(ssrc);
      // TODO: how do we time this?
      e.setInstant(System.currentTimeMillis());
      e.setType(RecorderEvent.Type.SPEAKER_CHANGED);
      e.setMediaType(MediaType.VIDEO);
      eventHandler.handleEvent(e);
    }
  }

  private void handleRtpPacket(RawPacket pkt) {
    if (pkt != null && pkt.getPayloadType() == vp8PayloadType) {
      int ssrc = pkt.getSSRC();
      if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) {
        synchronized (activeVideoSsrcs) {
          if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) {
            activeVideoSsrcs.add(ssrc & 0xffffffffL);
            rtcpFeedbackSender.sendFIR(ssrc);
          }
        }
      }
    }
  }

  private void handleRtcpPacket(RawPacket pkt) {
    getSynchronizer().addRTCPPacket(pkt);
    eventHandler.nudge();
  }

  public SynchronizerImpl getSynchronizer() {
    if (synchronizer == null) synchronizer = new SynchronizerImpl();
    return synchronizer;
  }

  public void setSynchronizer(Synchronizer synchronizer) {
    if (synchronizer instanceof SynchronizerImpl) {
      this.synchronizer = (SynchronizerImpl) synchronizer;
    }
  }

  public void connect(Recorder recorder) {
    if (!(recorder instanceof RecorderRtpImpl)) return;

    ((RecorderRtpImpl) recorder).setSynchronizer(getSynchronizer());
  }

  private void emptyPacketBuffer(long ssrc) {
    RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc);
    RTPConnectorImpl.OutputDataStreamImpl dataStream;

    try {
      dataStream = rtpConnector.getDataOutputStream();
    } catch (IOException ioe) {
      logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe);
      return;
    }
    for (RawPacket pkt : pkts)
      dataStream.write(
          pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */);
  }
  /** The <tt>RTPConnector</tt> implementation used by this <tt>RecorderRtpImpl</tt>. */
  private class RTPConnectorImpl implements RTPConnector {
    private PushSourceStreamImpl controlInputStream;
    private OutputDataStreamImpl controlOutputStream;

    private PushSourceStreamImpl dataInputStream;
    private OutputDataStreamImpl dataOutputStream;

    private SourceTransferHandler dataTransferHandler;
    private SourceTransferHandler controlTransferHandler;

    private RawPacket pendingDataPacket = new RawPacket();
    private RawPacket pendingControlPacket = new RawPacket();

    private PacketTransformer rtpPacketTransformer = null;
    private PacketTransformer rtcpPacketTransformer = null;

    /** The PacketBuffer instance which we use as a jitter buffer. */
    private PacketBuffer packetBuffer;

    private RTPConnectorImpl(byte redPT, byte ulpfecPT) {
      packetBuffer = new PacketBuffer();
      // The chain of transformers will be applied in reverse order for
      // incoming packets.
      TransformEngine transformEngine =
          new TransformEngineChain(
              new TransformEngine[] {
                packetBuffer,
                new TransformEngineImpl(),
                new CompoundPacketEngine(),
                new FECTransformEngine(ulpfecPT, (byte) -1),
                new REDTransformEngine(redPT, (byte) -1)
              });

      rtpPacketTransformer = transformEngine.getRTPTransformer();
      rtcpPacketTransformer = transformEngine.getRTCPTransformer();
    }

    private RTPConnectorImpl() {}

    @Override
    public void close() {
      try {
        if (dataOutputStream != null) dataOutputStream.close();
        if (controlOutputStream != null) controlOutputStream.close();
      } catch (IOException ioe) {
        throw new UndeclaredThrowableException(ioe);
      }
    }

    @Override
    public PushSourceStream getControlInputStream() throws IOException {
      if (controlInputStream == null) {
        controlInputStream = new PushSourceStreamImpl(true);
      }

      return controlInputStream;
    }

    @Override
    public OutputDataStream getControlOutputStream() throws IOException {
      if (controlOutputStream == null) {
        controlOutputStream = new OutputDataStreamImpl(true);
      }

      return controlOutputStream;
    }

    @Override
    public PushSourceStream getDataInputStream() throws IOException {
      if (dataInputStream == null) {
        dataInputStream = new PushSourceStreamImpl(false);
      }

      return dataInputStream;
    }

    @Override
    public OutputDataStreamImpl getDataOutputStream() throws IOException {
      if (dataOutputStream == null) {
        dataOutputStream = new OutputDataStreamImpl(false);
      }

      return dataOutputStream;
    }

    @Override
    public double getRTCPBandwidthFraction() {
      return -1;
    }

    @Override
    public double getRTCPSenderBandwidthFraction() {
      return -1;
    }

    @Override
    public int getReceiveBufferSize() {
      // TODO Auto-generated method stub
      return 0;
    }

    @Override
    public int getSendBufferSize() {
      // TODO Auto-generated method stub
      return 0;
    }

    @Override
    public void setReceiveBufferSize(int arg0) throws IOException {
      // TODO Auto-generated method stub

    }

    @Override
    public void setSendBufferSize(int arg0) throws IOException {
      // TODO Auto-generated method stub
    }

    private class OutputDataStreamImpl implements OutputDataStream {
      boolean isControlStream;
      private RawPacket[] rawPacketArray = new RawPacket[1];

      public OutputDataStreamImpl(boolean isControlStream) {
        this.isControlStream = isControlStream;
      }

      public int write(byte[] buffer, int offset, int length) {
        return write(buffer, offset, length, true);
      }

      public int write(byte[] buffer, int offset, int length, boolean transform) {
        RawPacket pkt = rawPacketArray[0];
        if (pkt == null) pkt = new RawPacket();
        rawPacketArray[0] = pkt;

        byte[] pktBuf = pkt.getBuffer();
        if (pktBuf == null || pktBuf.length < length) {
          pktBuf = new byte[length];
          pkt.setBuffer(pktBuf);
        }
        System.arraycopy(buffer, offset, pktBuf, 0, length);
        pkt.setOffset(0);
        pkt.setLength(length);

        if (transform) {
          PacketTransformer packetTransformer =
              isControlStream ? rtcpPacketTransformer : rtpPacketTransformer;

          if (packetTransformer != null)
            rawPacketArray = packetTransformer.reverseTransform(rawPacketArray);
        }

        SourceTransferHandler transferHandler;
        PushSourceStream pushSourceStream;

        try {
          if (isControlStream) {
            transferHandler = controlTransferHandler;
            pushSourceStream = getControlInputStream();
          } else {
            transferHandler = dataTransferHandler;
            pushSourceStream = getDataInputStream();
          }
        } catch (IOException ioe) {
          throw new UndeclaredThrowableException(ioe);
        }

        for (int i = 0; i < rawPacketArray.length; i++) {
          RawPacket packet = rawPacketArray[i];

          // keep the first element for reuse
          if (i != 0) rawPacketArray[i] = null;

          if (packet != null) {
            if (isControlStream) pendingControlPacket = packet;
            else pendingDataPacket = packet;

            if (transferHandler != null) {
              transferHandler.transferData(pushSourceStream);
            }
          }
        }

        return length;
      }

      public void close() throws IOException {}
    }

    /**
     * A dummy implementation of {@link PushSourceStream}.
     *
     * @author Vladimir Marinov
     */
    private class PushSourceStreamImpl implements PushSourceStream {

      private boolean isControlStream = false;

      public PushSourceStreamImpl(boolean isControlStream) {
        this.isControlStream = isControlStream;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public boolean endOfStream() {
        return false;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public ContentDescriptor getContentDescriptor() {
        return null;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public long getContentLength() {
        return 0;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public Object getControl(String arg0) {
        return null;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public Object[] getControls() {
        return null;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public int getMinimumTransferSize() {
        if (isControlStream) {
          if (pendingControlPacket.getBuffer() != null) {
            return pendingControlPacket.getLength();
          }
        } else {
          if (pendingDataPacket.getBuffer() != null) {
            return pendingDataPacket.getLength();
          }
        }

        return 0;
      }

      @Override
      public int read(byte[] buffer, int offset, int length) throws IOException {

        RawPacket pendingPacket;
        if (isControlStream) {
          pendingPacket = pendingControlPacket;
        } else {
          pendingPacket = pendingDataPacket;
        }
        int bytesToRead = 0;
        byte[] pendingPacketBuffer = pendingPacket.getBuffer();
        if (pendingPacketBuffer != null) {
          int pendingPacketLength = pendingPacket.getLength();
          bytesToRead = length > pendingPacketLength ? pendingPacketLength : length;
          System.arraycopy(
              pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead);
        }
        return bytesToRead;
      }

      /**
       * {@inheritDoc}
       *
       * <p>We keep the first non-null <tt>SourceTransferHandler</tt> that was set, because we don't
       * want it to be overwritten when we initialize a second <tt>RTPManager</tt> with this
       * <tt>RTPConnector</tt>.
       *
       * <p>See {@link RecorderRtpImpl#start(String, String)}
       */
      @Override
      public void setTransferHandler(SourceTransferHandler transferHandler) {
        if (isControlStream) {
          if (RTPConnectorImpl.this.controlTransferHandler == null) {
            RTPConnectorImpl.this.controlTransferHandler = transferHandler;
          }
        } else {
          if (RTPConnectorImpl.this.dataTransferHandler == null) {
            RTPConnectorImpl.this.dataTransferHandler = transferHandler;
          }
        }
      }
    }

    /**
     * A transform engine implementation which allows <tt>RecorderRtpImpl</tt> to intercept RTP and
     * RTCP packets in.
     */
    private class TransformEngineImpl implements TransformEngine {
      SinglePacketTransformer rtpTransformer =
          new SinglePacketTransformer() {
            @Override
            public RawPacket transform(RawPacket pkt) {
              return pkt;
            }

            @Override
            public RawPacket reverseTransform(RawPacket pkt) {
              RecorderRtpImpl.this.handleRtpPacket(pkt);
              return pkt;
            }

            @Override
            public void close() {}
          };

      SinglePacketTransformer rtcpTransformer =
          new SinglePacketTransformer() {
            @Override
            public RawPacket transform(RawPacket pkt) {
              return pkt;
            }

            @Override
            public RawPacket reverseTransform(RawPacket pkt) {
              RecorderRtpImpl.this.handleRtcpPacket(pkt);
              if (pkt != null && pkt.getRTCPPayloadType() == 203) {
                // An RTCP BYE packet. Remove the receive stream before
                // it gets to FMJ, because we want to, for example,
                // flush the packet buffer before that.

                long ssrc = pkt.getRTCPSSRC() & 0xffffffffl;
                if (logger.isInfoEnabled()) logger.info("RTCP BYE for SSRC=" + ssrc);

                ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);
                if (receiveStream != null) removeReceiveStream(receiveStream, false);
              }

              return pkt;
            }

            @Override
            public void close() {}
          };

      @Override
      public PacketTransformer getRTPTransformer() {
        return rtpTransformer;
      }

      @Override
      public PacketTransformer getRTCPTransformer() {
        return rtcpTransformer;
      }
    }
  }

  private class RecorderEventHandlerImpl implements RecorderEventHandler {
    private RecorderEventHandler handler;
    private final Set<RecorderEvent> pendingEvents = new HashSet<RecorderEvent>();

    private RecorderEventHandlerImpl(RecorderEventHandler handler) {
      this.handler = handler;
    }

    @Override
    public boolean handleEvent(RecorderEvent ev) {
      if (ev == null) return true;
      if (RecorderEvent.Type.RECORDING_STARTED.equals(ev.getType())) {
        long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp());
        if (instant != -1) {
          ev.setInstant(instant);
          return handler.handleEvent(ev);
        } else {
          pendingEvents.add(ev);
          return true;
        }
      }
      return handler.handleEvent(ev);
    }

    private void nudge() {
      for (Iterator<RecorderEvent> iter = pendingEvents.iterator(); iter.hasNext(); ) {
        RecorderEvent ev = iter.next();
        long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp());
        if (instant != -1) {
          iter.remove();
          ev.setInstant(instant);
          handler.handleEvent(ev);
        }
      }
    }

    @Override
    public void close() {
      for (RecorderEvent ev : pendingEvents) handler.handleEvent(ev);
    }
  }

  /** Represents a <tt>ReceiveStream</tt> for the purposes of this <tt>RecorderRtpImpl</tt>. */
  private class ReceiveStreamDesc {
    /**
     * The actual <tt>ReceiveStream</tt> which is represented by this <tt>ReceiveStreamDesc</tt>.
     */
    private ReceiveStream receiveStream;

    /** The SSRC of the stream. */
    long ssrc;

    /**
     * The <tt>Processor</tt> used to transcode this receive stream into a format appropriate for
     * saving to a file.
     */
    private Processor processor;

    /** The <tt>DataSink</tt> which saves the <tt>this.dataSource</tt> to a file. */
    private DataSink dataSink;

    /**
     * The <tt>DataSource</tt> for this receive stream which is to be saved using a
     * <tt>DataSink</tt> (i.e. the <tt>DataSource</tt> "after" all needed transcoding is done).
     */
    private DataSource dataSource;

    /** The name of the file into which this stream is being saved. */
    private String filename;

    /** The (original) format of this receive stream. */
    private Format format;

    /** The <tt>SilenceEffect</tt> used for this stream (for audio streams only). */
    private SilenceEffect silenceEffect;

    private ReceiveStreamDesc(ReceiveStream receiveStream) {
      this.receiveStream = receiveStream;
      this.ssrc = getReceiveStreamSSRC(receiveStream);
    }
  }
}
예제 #17
0
  /**
   * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}.
   *
   * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s.
   */
  @Override
  public void update(ReceiveStreamEvent event) {
    if (event == null) return;
    ReceiveStream receiveStream = event.getReceiveStream();

    if (event instanceof NewReceiveStreamEvent) {
      if (receiveStream == null) {
        logger.warn("NewReceiveStreamEvent: null");
        return;
      }

      final long ssrc = getReceiveStreamSSRC(receiveStream);

      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc);

      if (receiveStreamDesc != null) {
        String s = "NewReceiveStreamEvent for an existing SSRC. ";
        if (receiveStream != receiveStreamDesc.receiveStream)
          s += "(but different ReceiveStream object)";
        logger.warn(s);
        return;
      } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream);

      if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc);

      // Find the format of the ReceiveStream
      DataSource dataSource = receiveStream.getDataSource();
      if (dataSource instanceof PushBufferDataSource) {
        Format format = null;
        PushBufferDataSource pbds = (PushBufferDataSource) dataSource;
        for (PushBufferStream pbs : pbds.getStreams()) {
          if ((format = pbs.getFormat()) != null) break;
        }

        if (format == null) {
          logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format");
          return;
        }

        receiveStreamDesc.format = format;
      } else {
        logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource");
        return;
      }

      int rtpClockRate = -1;
      if (receiveStreamDesc.format instanceof AudioFormat)
        rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate();
      else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000;
      getSynchronizer().setRtpClockRate(ssrc, rtpClockRate);

      // create a Processor and configure it
      Processor processor = null;
      try {
        processor = Manager.createProcessor(receiveStream.getDataSource());
      } catch (NoProcessorException npe) {
        logger.error("Failed to create Processor: ", npe);
        return;
      } catch (IOException ioe) {
        logger.error("Failed to create Processor: ", ioe);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc);

      processor.addControllerListener(this);
      receiveStreamDesc.processor = processor;

      final int streamCount;
      synchronized (receiveStreams) {
        receiveStreams.add(receiveStreamDesc);
        streamCount = receiveStreams.size();
      }

      /*
       * XXX TODO IRBABOON
       * This is a terrible hack which works around a failure to realize()
       * some of the Processor-s for audio streams, when multiple streams
       * start nearly simultaneously. The cause of the problem is currently
       * unknown (and synchronizing all FMJ calls in RecorderRtpImpl
       * does not help).
       * XXX TODO NOOBABRI
       */
      if (receiveStreamDesc.format instanceof AudioFormat) {
        final Processor p = processor;
        new Thread() {
          @Override
          public void run() {
            // delay configuring the processors for the different
            // audio streams to decrease the probability that they
            // run together.
            try {
              int ms = 450 * (streamCount - 1);
              logger.warn(
                  "Sleeping for "
                      + ms
                      + "ms before"
                      + " configuring processor for SSRC="
                      + ssrc
                      + " "
                      + System.currentTimeMillis());
              Thread.sleep(ms);
            } catch (Exception e) {
            }

            p.configure();
          }
        }.run();
      } else {
        processor.configure();
      }
    } else if (event instanceof TimeoutEvent) {
      if (receiveStream == null) {
        // TODO: we might want to get the list of ReceiveStream-s from
        // rtpManager and compare it to our list, to see if we should
        // remove a stream.
        logger.warn("TimeoutEvent: null.");
        return;
      }

      // FMJ silently creates new ReceiveStream instances, so we have to
      // recognize them by the SSRC.
      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream));
      if (receiveStreamDesc != null) {
        if (logger.isInfoEnabled()) {
          logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc);
        }

        removeReceiveStream(receiveStreamDesc, true);
      }
    } else if (event != null && logger.isInfoEnabled()) {
      logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event);
    }
  }
/**
 * Class used to manage codecs information for <tt>MediaCodec</tt>.
 *
 * @author Pawel Domas
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public abstract class CodecInfo {
  /** The logger */
  private static final Logger logger = Logger.getLogger(CodecInfo.class);

  /**
   * The mime type of H.264-encoded media data as defined by Android's <tt>MediaCodec</tt> class.
   */
  public static final String MEDIA_CODEC_TYPE_H264 = "video/avc";

  /** The mime type of VP8-encoded media data as defined by Android's <tt>MediaCodec</tt> class. */
  public static final String MEDIA_CODEC_TYPE_VP8 = "video/x-vnd.on2.vp8";

  /**
   * The mime type of H.263-encoded media data as defined by Android's <tt>MediaCodec</tt> class.
   */
  public static final String MEDIA_CODEC_TYPE_H263 = "video/3gpp";

  /** List of crashing codecs */
  private static final List<String> bannedYuvCodecs;

  /** List of all codecs discovered in the system. */
  private static final List<CodecInfo> codecs = new ArrayList<CodecInfo>();

  static {
    bannedYuvCodecs = new ArrayList<String>();

    // Banned H264 encoders/decoders
    // Crashes
    bannedYuvCodecs.add("OMX.SEC.avc.enc");
    bannedYuvCodecs.add("OMX.SEC.h263.enc");
    // Don't support 3.1 profile used by Jitsi
    bannedYuvCodecs.add("OMX.Nvidia.h264.decode");
    // bannedYuvCodecs.add("OMX.SEC.avc.dec");

    // Banned VP8 encoders/decoders
    bannedYuvCodecs.add("OMX.SEC.vp8.dec");
    // This one works only for res 176x144
    bannedYuvCodecs.add("OMX.google.vpx.encoder");

    for (int codecIndex = 0, codecCount = MediaCodecList.getCodecCount();
        codecIndex < codecCount;
        codecIndex++) {
      MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(codecIndex);
      logger.info(
          "Discovered codec: "
              + codecInfo.getName()
              + "/"
              + Arrays.toString(codecInfo.getSupportedTypes()));
      CodecInfo ci = CodecInfo.getCodecInfo(codecInfo);
      if (ci != null) {
        codecs.add(ci);
        ci.setBanned(bannedYuvCodecs.contains(ci.getName()));
      }
    }
    logger.info("Selected H264 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, true));
    logger.info("Selected H264 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, false));
    logger.info("Selected H263 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, true));
    logger.info("Selected H263 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, false));
    logger.info("Selected VP8 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, true));
    logger.info("Selected VP8 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, false));
  }

  /** <tt>MediaCodecInfo</tt> encapsulated by this instance. */
  protected final MediaCodecInfo codecInfo;

  /** <tt>MediaCodecInfo.CodecCapabilities</tt> encapsulated by this instance. */
  protected final MediaCodecInfo.CodecCapabilities caps;

  /** List of color formats supported by subject <tt>MediaCodec</tt>. */
  protected final ArrayList<CodecColorFormat> colors;

  /** Media type of this <tt>CodecInfo</tt>. */
  private final String mediaType;

  /** Profile levels supported by subject <tt>MediaCodec</tt>. */
  private ProfileLevel[] profileLevels;

  /**
   * Flag indicates that this codec is known to cause some troubles and is disabled(will be ignored
   * during codec select phase).
   */
  private boolean banned;

  /**
   * Creates new instance of <tt>CodecInfo</tt> that will encapsulate given <tt>codecInfo</tt>.
   *
   * @param codecInfo the codec info object to encapsulate.
   * @param mediaType media type of the codec
   */
  public CodecInfo(MediaCodecInfo codecInfo, String mediaType) {
    this.codecInfo = codecInfo;
    this.mediaType = mediaType;
    this.caps = codecInfo.getCapabilitiesForType(mediaType);

    this.colors = new ArrayList<CodecColorFormat>();
    int[] colorFormats = caps.colorFormats;
    for (int colorFormat : colorFormats) {
      colors.add(CodecColorFormat.fromInt(colorFormat));
    }
  }

  /**
   * Returns codec name that can be used to obtain <tt>MediaCodec</tt>.
   *
   * @return codec name that can be used to obtain <tt>MediaCodec</tt>.
   */
  public String getName() {
    return codecInfo.getName();
  }

  /**
   * Finds the codec for given <tt>mimeType</tt>.
   *
   * @param mimeType mime type of the codec.
   * @param isEncoder <tt>true</tt> if encoder should be returned or <tt>false</tt> for decoder.
   * @return the codec for given <tt>mimeType</tt>.
   */
  public static CodecInfo getCodecForType(String mimeType, boolean isEncoder) {
    for (CodecInfo codec : codecs) {
      if (!codec.isBanned()
          && codec.mediaType.equals(mimeType)
          && codec.codecInfo.isEncoder() == isEncoder) {
        return codec;
      }
    }
    return null;
  }

  /**
   * Returns the list of detected codecs.
   *
   * @return the list of detected codecs.
   */
  public static List<CodecInfo> getSupportedCodecs() {
    return Collections.unmodifiableList(codecs);
  }

  /**
   * Returns the list of profiles supported.
   *
   * @return the list of profiles supported.
   */
  protected abstract Profile[] getProfileSet();

  /**
   * Returns the list supported levels.
   *
   * @return the list supported levels.
   */
  protected abstract Level[] getLevelSet();

  private Profile getProfile(int profileInt) {
    for (Profile p : getProfileSet()) {
      if (p.value == profileInt) return p;
    }
    return new Profile("Unknown", profileInt);
  }

  private Level getLevel(int levelInt) {
    for (Level l : getLevelSet()) {
      if (l.value == levelInt) return l;
    }
    return new Level("Unknown", levelInt);
  }

  public ProfileLevel[] getProfileLevels() {
    if (profileLevels == null) {
      MediaCodecInfo.CodecProfileLevel[] plArray = caps.profileLevels;
      profileLevels = new ProfileLevel[plArray.length];
      for (int i = 0; i < profileLevels.length; i++) {
        Profile p = getProfile(plArray[i].profile);
        Level l = getLevel(plArray[i].level);
        profileLevels[i] = new ProfileLevel(p, l);
      }
    }
    return profileLevels;
  }

  @Override
  public String toString() {
    StringBuilder colorStr = new StringBuilder("\ncolors:\n");
    for (int i = 0; i < colors.size(); i++) {
      colorStr.append(colors.get(i));
      if (i != colors.size() - 1) colorStr.append(", \n");
    }

    StringBuilder plStr = new StringBuilder("\nprofiles:\n");
    ProfileLevel[] profiles = getProfileLevels();
    for (int i = 0; i < profiles.length; i++) {
      plStr.append(profiles[i].toString());
      if (i != profiles.length - 1) plStr.append(", \n");
    }

    return codecInfo.getName() + "(" + getLibjitsiEncoding() + ")" + colorStr + plStr;
  }

  public static CodecInfo getCodecInfo(MediaCodecInfo codecInfo) {
    String[] types = codecInfo.getSupportedTypes();
    for (String type : types) {
      try {
        if (type.equals(MEDIA_CODEC_TYPE_H263)) return new H263CodecInfo(codecInfo);
        else if (type.equals(MEDIA_CODEC_TYPE_H264)) return new H264CodecInfo(codecInfo);
        else if (type.equals(MEDIA_CODEC_TYPE_VP8)) return new VP8CodecInfo(codecInfo);
      } catch (IllegalArgumentException e) {
        logger.error(
            "Error initializing codec info: " + codecInfo.getName() + ", type: " + type, e);
      }
    }
    return null;
  }

  public void setBanned(boolean banned) {
    this.banned = banned;
  }

  public boolean isBanned() {
    return banned;
  }

  public boolean isEncoder() {
    return codecInfo.isEncoder();
  }

  public boolean isNominated() {
    return getCodecForType(mediaType, isEncoder()) == this;
  }

  public String getLibjitsiEncoding() {
    if (mediaType.equals(MEDIA_CODEC_TYPE_H263)) {
      return Constants.H263P;
    } else if (mediaType.equals(MEDIA_CODEC_TYPE_H264)) {
      return Constants.H264;
    } else if (mediaType.equals(MEDIA_CODEC_TYPE_VP8)) {
      return Constants.VP8;
    } else {
      return mediaType;
    }
  }

  public static class ProfileLevel {
    private final Profile profile;
    private final Level level;

    public ProfileLevel(Profile p, Level l) {
      this.profile = p;
      this.level = l;
    }

    @Override
    public String toString() {
      return "P: " + profile.toString() + " L: " + level.toString();
    }
  }

  public static class Profile {
    private final int value;

    private final String name;

    public Profile(String name, int value) {
      this.value = value;
      this.name = name;
    }

    @Override
    public String toString() {
      return name + "(0x" + Integer.toString(value, 16) + ")";
    }
  }

  public static class Level {
    private final int value;

    private final String name;

    public Level(String name, int value) {
      this.value = value;
      this.name = name;
    }

    @Override
    public String toString() {
      return name + "(0x" + Integer.toString(value, 16) + ")";
    }
  }

  static class H264CodecInfo extends CodecInfo {
    private final CodecInfo.Profile[] PROFILES =
        new CodecInfo.Profile[] {
          // from OMX_VIDEO_AVCPROFILETYPE
          new Profile("ProfileBaseline", 0x01),
          new Profile("ProfileMain", 0x02),
          new Profile("ProfileExtended", 0x04),
          new Profile("ProfileHigh", 0x08),
          new Profile("ProfileHigh10", 0x10),
          new Profile("ProfileHigh422", 0x20),
          new Profile("ProfileHigh444", 0x40)
        };

    private final CodecInfo.Level[] LEVELS =
        new CodecInfo.Level[] {
          // from OMX_VIDEO_AVCLEVELTYPE
          new Level("Level1", 0x01),
          new Level("Level1b", 0x02),
          new Level("Level11", 0x04),
          new Level("Level12", 0x08),
          new Level("Level13", 0x10),
          new Level("Level2", 0x20),
          new Level("Level21", 0x40),
          new Level("Level22", 0x80),
          new Level("Level3", 0x100),
          new Level("Level31", 0x200),
          new Level("Level32", 0x400),
          new Level("Level4", 0x800),
          new Level("Level41", 0x1000),
          new Level("Level42", 0x2000),
          new Level("Level5", 0x4000),
          new Level("Level51", 0x8000)
        };

    public H264CodecInfo(MediaCodecInfo codecInfo) {
      super(codecInfo, MEDIA_CODEC_TYPE_H264);
    }

    @Override
    protected Profile[] getProfileSet() {
      return PROFILES;
    }

    @Override
    protected Level[] getLevelSet() {
      return LEVELS;
    }
  }

  static class H263CodecInfo extends CodecInfo {
    private final CodecInfo.Profile[] PROFILES =
        new CodecInfo.Profile[] {
          // from OMX_VIDEO_H263PROFILETYPE
          new Profile("Baseline", 0x01),
          new Profile("H320Coding", 0x02),
          new Profile("BackwardCompatible", 0x04),
          new Profile("ISWV2", 0x08),
          new Profile("ISWV3", 0x10),
          new Profile("HighCompression", 0x20),
          new Profile("Internet", 0x40),
          new Profile("Interlace", 0x80),
          new Profile("HighLatency", 0x100)
        };

    private final CodecInfo.Level[] LEVELS =
        new CodecInfo.Level[] {
          // from OMX_VIDEO_H263LEVELTYPE
          new Level("Level10", 0x01),
          new Level("Level20", 0x02),
          new Level("Level30", 0x04),
          new Level("Level40", 0x08),
          new Level("Level45", 0x10),
          new Level("Level50", 0x20),
          new Level("Level60", 0x40),
          new Level("Level70", 0x80)
        };

    public H263CodecInfo(MediaCodecInfo codecInfo) {
      super(codecInfo, MEDIA_CODEC_TYPE_H263);
    }

    @Override
    protected Profile[] getProfileSet() {
      return PROFILES;
    }

    @Override
    protected Level[] getLevelSet() {
      return LEVELS;
    }
  }

  static class VP8CodecInfo extends CodecInfo {
    private final Profile[] PROFILES =
        new Profile[] {
          // from OMX_VIDEO_VP8PROFILETYPE
          new Profile("ProfileMain", 0x01)
        };

    private final Level[] LEVELS =
        new Level[] {
          // from OMX_VIDEO_VP8LEVELTYPE
          new Level("Version0", 0x01),
          new Level("Version1", 0x02),
          new Level("Version2", 0x04),
          new Level("Version3", 0x08)
        };

    public VP8CodecInfo(MediaCodecInfo codecInfo) {
      super(codecInfo, MEDIA_CODEC_TYPE_VP8);
    }

    @Override
    protected Profile[] getProfileSet() {
      return PROFILES;
    }

    @Override
    protected Level[] getLevelSet() {
      return LEVELS;
    }
  }
}
예제 #19
0
  /** {@inheritDoc} */
  @Override
  protected int doProcess(Buffer inBuffer, Buffer outBuffer) {
    byte[] inData = (byte[]) inBuffer.getData();
    int inOffset = inBuffer.getOffset();

    if (!VP8PayloadDescriptor.isValid(inData, inOffset)) {
      logger.warn("Invalid RTP/VP8 packet discarded.");
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK?
    }

    long inSeq = inBuffer.getSequenceNumber();
    long inRtpTimestamp = inBuffer.getRtpTimeStamp();
    int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset);
    boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0;
    boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset);
    int inLength = inBuffer.getLength();
    int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset);
    int inPayloadLength = inLength - inPdSize;

    if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) {
      if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    if (!empty) {
      // if the incoming packet has a different PictureID or timestamp
      // than those of the current frame, then it belongs to a different
      // frame.
      if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId)
          | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) {
        if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq
        {
          // the packet belongs to a previous frame. discard it
          if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq);
          outBuffer.setDiscard(true);
          return BUFFER_PROCESSED_OK;
        } else // inSeq > firstSeq (and also presumably isSeq > lastSeq)
        {
          // the packet belongs to a subsequent frame (to the one
          // currently being held). Drop the current frame.

          if (logger.isInfoEnabled())
            logger.info(
                "Discarding saved packets on arrival of"
                    + " a packet for a subsequent frame: "
                    + inSeq);

          // TODO: this would be the place to complain about the
          // not-well-received PictureID by sending a RTCP SLI or NACK.
          reinit();
        }
      }
    }

    // a whole frame in a single packet. avoid the extra copy to
    // this.data and output it immediately.
    if (empty && inMarker && inIsStartOfFrame) {
      byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false);
      System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength);
      outBuffer.setOffset(0);
      outBuffer.setLength(inPayloadLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);

      lastSentSeq = inSeq;

      return BUFFER_PROCESSED_OK;
    }

    // add to this.data
    Container container = free.poll();
    if (container == null) container = new Container();
    if (container.buf == null || container.buf.length < inPayloadLength)
      container.buf = new byte[inPayloadLength];

    if (data.get(inSeq) != null) {
      if (logger.isInfoEnabled())
        logger.info("(Probable) duplicate packet detected, discarding " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength);
    container.len = inPayloadLength;
    data.put(inSeq, container);

    // update fields
    frameLength += inPayloadLength;
    if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq;
    if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq;

    if (empty) {
      // the first received packet for the current frame was just added
      empty = false;
      timestamp = inRtpTimestamp;
      pictureId = inPictureId;
    }

    if (inMarker) haveEnd = true;
    if (inIsStartOfFrame) haveStart = true;

    // check if we have a full frame
    if (frameComplete()) {
      byte[] outData = validateByteArraySize(outBuffer, frameLength, false);
      int ptr = 0;
      Container b;
      for (Map.Entry<Long, Container> entry : data.entrySet()) {
        b = entry.getValue();
        System.arraycopy(b.buf, 0, outData, ptr, b.len);
        ptr += b.len;
      }

      outBuffer.setOffset(0);
      outBuffer.setLength(frameLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);
      lastSentSeq = lastSeq;

      // prepare for the next frame
      reinit();

      return BUFFER_PROCESSED_OK;
    } else {
      // frame not complete yet
      outBuffer.setDiscard(true);
      return OUTPUT_BUFFER_NOT_FILLED;
    }
  }
예제 #20
0
 /** {@inheritDoc} */
 @Override
 protected void doOpen() throws ResourceUnavailableException {
   if (logger.isInfoEnabled()) logger.info("Opened VP8 depacketizer");
 }
예제 #21
0
  /**
   * Initializes a new <tt>RtxTransformer</tt> with a specific <tt>RtpChannel</tt>.
   *
   * @param channel the <tt>RtpChannel</tt> for the transformer.
   */
  RtxTransformer(RtpChannel channel) {
    super(RTPPacketPredicate.INSTANCE);

    this.channel = channel;
    this.logger = Logger.getLogger(classLogger, channel.getContent().getConference().getLogger());
  }
예제 #22
0
/**
 * A depacketizer from VP8. See {@link "http://tools.ietf.org/html/draft-ietf-payload-vp8-11"}
 *
 * @author Boris Grozev
 * @author George Politis
 */
public class DePacketizer extends AbstractCodec2 {
  /**
   * The <tt>Logger</tt> used by the <tt>DePacketizer</tt> class and its instances for logging
   * output.
   */
  private static final Logger logger = Logger.getLogger(DePacketizer.class);

  /** Whether trace logging is enabled. */
  private static final boolean TRACE = logger.isTraceEnabled();

  /**
   * A <tt>Comparator</tt> implementation for RTP sequence numbers. Compares <tt>a</tt> and
   * <tt>b</tt>, taking into account the wrap at 2^16.
   *
   * <p>IMPORTANT: This is a valid <tt>Comparator</tt> implementation only if used for subsets of
   * [0, 2^16) which don't span more than 2^15 elements.
   *
   * <p>E.g. it works for: [0, 2^15-1] and ([50000, 2^16) u [0, 10000]) Doesn't work for: [0, 2^15]
   * and ([0, 2^15-1] u {2^16-1}) and [0, 2^16)
   *
   * <p>NOTE: An identical implementation for Integers can be found in the class SeqNumComparator.
   * Sequence numbers are 16 bits and unsigned, so an Integer should be sufficient to hold that.
   */
  private static final Comparator<? super Long> seqNumComparator =
      new Comparator<Long>() {
        @Override
        public int compare(Long a, Long b) {
          if (a.equals(b)) return 0;
          else if (a > b) {
            if (a - b < 32768) return 1;
            else return -1;
          } else // a < b
          {
            if (b - a < 32768) return -1;
            else return 1;
          }
        }
      };

  /**
   * Stores the RTP payloads (VP8 payload descriptor stripped) from RTP packets belonging to a
   * single VP8 compressed frame.
   */
  private SortedMap<Long, Container> data = new TreeMap<Long, Container>(seqNumComparator);

  /** Stores unused <tt>Container</tt>'s. */
  private Queue<Container> free = new ArrayBlockingQueue<Container>(100);

  /**
   * Stores the first (earliest) sequence number stored in <tt>data</tt>, or -1 if <tt>data</tt> is
   * empty.
   */
  private long firstSeq = -1;

  /**
   * Stores the last (latest) sequence number stored in <tt>data</tt>, or -1 if <tt>data</tt> is
   * empty.
   */
  private long lastSeq = -1;

  /**
   * Stores the value of the <tt>PictureID</tt> field for the VP8 compressed frame, parts of which
   * are currently stored in <tt>data</tt>, or -1 if the <tt>PictureID</tt> field is not in use or
   * <tt>data</tt> is empty.
   */
  private int pictureId = -1;

  /**
   * Stores the RTP timestamp of the packets stored in <tt>data</tt>, or -1 if they don't have a
   * timestamp set.
   */
  private long timestamp = -1;

  /** Whether we have stored any packets in <tt>data</tt>. Equivalent to <tt>data.isEmpty()</tt>. */
  private boolean empty = true;

  /**
   * Whether we have stored in <tt>data</tt> the last RTP packet of the VP8 compressed frame, parts
   * of which are currently stored in <tt>data</tt>.
   */
  private boolean haveEnd = false;

  /**
   * Whether we have stored in <tt>data</tt> the first RTP packet of the VP8 compressed frame, parts
   * of which are currently stored in <tt>data</tt>.
   */
  private boolean haveStart = false;

  /**
   * Stores the sum of the lengths of the data stored in <tt>data</tt>, that is the total length of
   * the VP8 compressed frame to be constructed.
   */
  private int frameLength = 0;

  /** The sequence number of the last RTP packet, which was included in the output. */
  private long lastSentSeq = -1;

  /** Initializes a new <tt>JNIEncoder</tt> instance. */
  public DePacketizer() {
    super(
        "VP8 RTP DePacketizer",
        VideoFormat.class,
        new VideoFormat[] {new VideoFormat(Constants.VP8)});
    inputFormats = new VideoFormat[] {new VideoFormat(Constants.VP8_RTP)};
  }

  /** {@inheritDoc} */
  @Override
  protected void doClose() {}

  /** {@inheritDoc} */
  @Override
  protected void doOpen() throws ResourceUnavailableException {
    if (logger.isInfoEnabled()) logger.info("Opened VP8 depacketizer");
  }

  /**
   * Re-initializes the fields which store information about the currently held data. Empties
   * <tt>data</tt>.
   */
  private void reinit() {
    firstSeq = lastSeq = timestamp = -1;
    pictureId = -1;
    empty = true;
    haveEnd = haveStart = false;
    frameLength = 0;

    Iterator<Map.Entry<Long, Container>> it = data.entrySet().iterator();
    Map.Entry<Long, Container> e;
    while (it.hasNext()) {
      e = it.next();
      free.offer(e.getValue());
      it.remove();
    }
  }

  /**
   * Checks whether the currently held VP8 compressed frame is complete (e.g all its packets are
   * stored in <tt>data</tt>).
   *
   * @return <tt>true</tt> if the currently help VP8 compressed frame is complete, <tt>false</tt>
   *     otherwise.
   */
  private boolean frameComplete() {
    return haveStart && haveEnd && !haveMissing();
  }

  /**
   * Checks whether there are packets with sequence numbers between <tt>firstSeq</tt> and
   * <tt>lastSeq</tt> which are *not* stored in <tt>data</tt>.
   *
   * @return <tt>true</tt> if there are packets with sequence numbers between <tt>firstSeq</tt> and
   *     <tt>lastSeq</tt> which are *not* stored in <tt>data</tt>.
   */
  private boolean haveMissing() {
    Set<Long> seqs = data.keySet();
    long s = firstSeq;
    while (s != lastSeq) {
      if (!seqs.contains(s)) return true;
      s = (s + 1) % (1 << 16);
    }
    return false;
  }

  /** {@inheritDoc} */
  @Override
  protected int doProcess(Buffer inBuffer, Buffer outBuffer) {
    byte[] inData = (byte[]) inBuffer.getData();
    int inOffset = inBuffer.getOffset();

    if (!VP8PayloadDescriptor.isValid(inData, inOffset)) {
      logger.warn("Invalid RTP/VP8 packet discarded.");
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK?
    }

    long inSeq = inBuffer.getSequenceNumber();
    long inRtpTimestamp = inBuffer.getRtpTimeStamp();
    int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset);
    boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0;
    boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset);
    int inLength = inBuffer.getLength();
    int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset);
    int inPayloadLength = inLength - inPdSize;

    if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) {
      if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    if (!empty) {
      // if the incoming packet has a different PictureID or timestamp
      // than those of the current frame, then it belongs to a different
      // frame.
      if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId)
          | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) {
        if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq
        {
          // the packet belongs to a previous frame. discard it
          if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq);
          outBuffer.setDiscard(true);
          return BUFFER_PROCESSED_OK;
        } else // inSeq > firstSeq (and also presumably isSeq > lastSeq)
        {
          // the packet belongs to a subsequent frame (to the one
          // currently being held). Drop the current frame.

          if (logger.isInfoEnabled())
            logger.info(
                "Discarding saved packets on arrival of"
                    + " a packet for a subsequent frame: "
                    + inSeq);

          // TODO: this would be the place to complain about the
          // not-well-received PictureID by sending a RTCP SLI or NACK.
          reinit();
        }
      }
    }

    // a whole frame in a single packet. avoid the extra copy to
    // this.data and output it immediately.
    if (empty && inMarker && inIsStartOfFrame) {
      byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false);
      System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength);
      outBuffer.setOffset(0);
      outBuffer.setLength(inPayloadLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);

      lastSentSeq = inSeq;

      return BUFFER_PROCESSED_OK;
    }

    // add to this.data
    Container container = free.poll();
    if (container == null) container = new Container();
    if (container.buf == null || container.buf.length < inPayloadLength)
      container.buf = new byte[inPayloadLength];

    if (data.get(inSeq) != null) {
      if (logger.isInfoEnabled())
        logger.info("(Probable) duplicate packet detected, discarding " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength);
    container.len = inPayloadLength;
    data.put(inSeq, container);

    // update fields
    frameLength += inPayloadLength;
    if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq;
    if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq;

    if (empty) {
      // the first received packet for the current frame was just added
      empty = false;
      timestamp = inRtpTimestamp;
      pictureId = inPictureId;
    }

    if (inMarker) haveEnd = true;
    if (inIsStartOfFrame) haveStart = true;

    // check if we have a full frame
    if (frameComplete()) {
      byte[] outData = validateByteArraySize(outBuffer, frameLength, false);
      int ptr = 0;
      Container b;
      for (Map.Entry<Long, Container> entry : data.entrySet()) {
        b = entry.getValue();
        System.arraycopy(b.buf, 0, outData, ptr, b.len);
        ptr += b.len;
      }

      outBuffer.setOffset(0);
      outBuffer.setLength(frameLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);
      lastSentSeq = lastSeq;

      // prepare for the next frame
      reinit();

      return BUFFER_PROCESSED_OK;
    } else {
      // frame not complete yet
      outBuffer.setDiscard(true);
      return OUTPUT_BUFFER_NOT_FILLED;
    }
  }

  /**
   * Returns true if the buffer contains a VP8 key frame at offset <tt>offset</tt>.
   *
   * @param buff the byte buffer to check
   * @param off the offset in the byte buffer where the actual data starts
   * @param len the length of the data in the byte buffer
   * @return true if the buffer contains a VP8 key frame at offset <tt>offset</tt>.
   */
  public static boolean isKeyFrame(byte[] buff, int off, int len) {
    if (buff == null || buff.length < off + len || len < RawPacket.FIXED_HEADER_SIZE) {
      return false;
    }

    // Check if this is the start of a VP8 partition in the payload
    // descriptor.
    if (!DePacketizer.VP8PayloadDescriptor.isValid(buff, off)) {
      return false;
    }

    if (!DePacketizer.VP8PayloadDescriptor.isStartOfFrame(buff, off)) {
      return false;
    }

    int szVP8PayloadDescriptor = DePacketizer.VP8PayloadDescriptor.getSize(buff, off);

    return DePacketizer.VP8PayloadHeader.isKeyFrame(buff, off + szVP8PayloadDescriptor);
  }

  /**
   * A class that represents the VP8 Payload Descriptor structure defined in {@link
   * "http://tools.ietf.org/html/draft-ietf-payload-vp8-10"}
   */
  public static class VP8PayloadDescriptor {
    /** I bit from the X byte of the Payload Descriptor. */
    private static final byte I_BIT = (byte) 0x80;

    /** K bit from the X byte of the Payload Descriptor. */
    private static final byte K_BIT = (byte) 0x10;
    /** L bit from the X byte of the Payload Descriptor. */
    private static final byte L_BIT = (byte) 0x40;

    /** I bit from the I byte of the Payload Descriptor. */
    private static final byte M_BIT = (byte) 0x80;
    /** Maximum length of a VP8 Payload Descriptor. */
    public static final int MAX_LENGTH = 6;
    /** S bit from the first byte of the Payload Descriptor. */
    private static final byte S_BIT = (byte) 0x10;
    /** T bit from the X byte of the Payload Descriptor. */
    private static final byte T_BIT = (byte) 0x20;

    /** X bit from the first byte of the Payload Descriptor. */
    private static final byte X_BIT = (byte) 0x80;

    /**
     * Gets the temporal layer index (TID), if that's set.
     *
     * @param buf the byte buffer that holds the VP8 packet.
     * @param off the offset in the byte buffer where the VP8 packet starts.
     * @param len the length of the VP8 packet.
     * @return the temporal layer index (TID), if that's set, -1 otherwise.
     */
    public static int getTemporalLayerIndex(byte[] buf, int off, int len) {
      if (buf == null || buf.length < off + len || len < 2) {
        return -1;
      }

      if ((buf[off] & X_BIT) == 0 || (buf[off + 1] & T_BIT) == 0) {
        return -1;
      }

      int sz = getSize(buf, off);
      if (buf.length < off + sz || sz < 1) {
        return -1;
      }

      return (buf[off + sz - 1] & 0xc0) >> 6;
    }

    /**
     * Returns a simple Payload Descriptor, with PartID = 0, the 'start of partition' bit set
     * according to <tt>startOfPartition</tt>, and all other bits set to 0.
     *
     * @param startOfPartition whether to 'start of partition' bit should be set
     * @return a simple Payload Descriptor, with PartID = 0, the 'start of partition' bit set
     *     according to <tt>startOfPartition</tt>, and all other bits set to 0.
     */
    public static byte[] create(boolean startOfPartition) {
      byte[] pd = new byte[1];
      pd[0] = startOfPartition ? (byte) 0x10 : 0;
      return pd;
    }

    /**
     * The size in bytes of the Payload Descriptor at offset <tt>offset</tt> in <tt>input</tt>. The
     * size is between 1 and 6.
     *
     * @param input input
     * @param offset offset
     * @return The size in bytes of the Payload Descriptor at offset <tt>offset</tt> in
     *     <tt>input</tt>, or -1 if the input is not a valid VP8 Payload Descriptor. The size is
     *     between 1 and 6.
     */
    public static int getSize(byte[] input, int offset) {
      if (!isValid(input, offset)) return -1;

      if ((input[offset] & X_BIT) == 0) return 1;

      int size = 2;
      if ((input[offset + 1] & I_BIT) != 0) {
        size++;
        if ((input[offset + 2] & M_BIT) != 0) size++;
      }
      if ((input[offset + 1] & L_BIT) != 0) size++;
      if ((input[offset + 1] & (T_BIT | K_BIT)) != 0) size++;

      return size;
    }

    /**
     * Gets the value of the PictureID field of a VP8 Payload Descriptor.
     *
     * @param input
     * @param offset
     * @return the value of the PictureID field of a VP8 Payload Descriptor, or -1 if the fields is
     *     not present.
     */
    private static int getPictureId(byte[] input, int offset) {
      if (!isValid(input, offset)) return -1;

      if ((input[offset] & X_BIT) == 0 || (input[offset + 1] & I_BIT) == 0) return -1;

      boolean isLong = (input[offset + 2] & M_BIT) != 0;
      if (isLong) return (input[offset + 2] & 0x7f) << 8 | (input[offset + 3] & 0xff);
      else return input[offset + 2] & 0x7f;
    }

    public static boolean isValid(byte[] input, int offset) {
      return true;
    }

    /**
     * Checks whether the '<tt>start of partition</tt>' bit is set in the VP8 Payload Descriptor at
     * offset <tt>offset</tt> in <tt>input</tt>.
     *
     * @param input input
     * @param offset offset
     * @return <tt>true</tt> if the '<tt>start of partition</tt>' bit is set, <tt>false</tt>
     *     otherwise.
     */
    public static boolean isStartOfPartition(byte[] input, int offset) {
      return (input[offset] & S_BIT) != 0;
    }

    /**
     * Returns <tt>true</tt> if both the '<tt>start of partition</tt>' bit is set and the
     * <tt>PID</tt> fields has value 0 in the VP8 Payload Descriptor at offset <tt>offset</tt> in
     * <tt>input</tt>.
     *
     * @param input
     * @param offset
     * @return <tt>true</tt> if both the '<tt>start of partition</tt>' bit is set and the
     *     <tt>PID</tt> fields has value 0 in the VP8 Payload Descriptor at offset <tt>offset</tt>
     *     in <tt>input</tt>.
     */
    public static boolean isStartOfFrame(byte[] input, int offset) {
      return isStartOfPartition(input, offset) && getPartitionId(input, offset) == 0;
    }

    /**
     * Returns the value of the <tt>PID</tt> (partition ID) field of the VP8 Payload Descriptor at
     * offset <tt>offset</tt> in <tt>input</tt>.
     *
     * @param input
     * @param offset
     * @return the value of the <tt>PID</tt> (partition ID) field of the VP8 Payload Descriptor at
     *     offset <tt>offset</tt> in <tt>input</tt>.
     */
    public static int getPartitionId(byte[] input, int offset) {
      return input[offset] & 0x07;
    }
  }

  /**
   * A class that represents the VP8 Payload Header structure defined in {@link
   * "http://tools.ietf.org/html/draft-ietf-payload-vp8-10"}
   */
  public static class VP8PayloadHeader {
    /** S bit of the Payload Descriptor. */
    private static final byte S_BIT = (byte) 0x01;

    /**
     * Returns true if the <tt>P</tt> (inverse key frame flag) field of the VP8 Payload Header at
     * offset <tt>offset</tt> in <tt>input</tt> is 0.
     *
     * @return true if the <tt>P</tt> (inverse key frame flag) field of the VP8 Payload Header at
     *     offset <tt>offset</tt> in <tt>input</tt> is 0, false otherwise.
     */
    public static boolean isKeyFrame(byte[] input, int offset) {
      // When set to 0 the current frame is a key frame.  When set to 1
      // the current frame is an interframe. Defined in [RFC6386]

      return (input[offset] & S_BIT) == 0;
    }
  }

  /** A simple container for a <tt>byte[]</tt> and an integer. */
  private static class Container {
    /** This <tt>Container</tt>'s data. */
    private byte[] buf;

    /** Length used. */
    private int len = 0;
  }
}
/**
 * Implements <tt>MediaFormatFactory</tt> for the JMF <tt>Format</tt> types.
 *
 * @author Lyubomir Marinov
 */
public class MediaFormatFactoryImpl implements MediaFormatFactory {
  /**
   * The <tt>Logger</tt> used by the <tt>MediaFormatFactoryImpl</tt> class and its instances for
   * logging output.
   */
  private static final Logger logger = Logger.getLogger(MediaFormatFactoryImpl.class);

  /**
   * Creates an unknown <tt>MediaFormat</tt>.
   *
   * @param type <tt>MediaType</tt>
   * @return unknown <tt>MediaFormat</tt>
   */
  public MediaFormat createUnknownMediaFormat(MediaType type) {
    Format unknown = null;

    /*
     * FIXME Why is a VideoFormat instance created for MediaType.AUDIO and
     * an AudioFormat instance for MediaType.VIDEO?
     */
    if (type.equals(MediaType.AUDIO)) unknown = new VideoFormat("unknown");
    else if (type.equals(MediaType.VIDEO)) unknown = new AudioFormat("unknown");
    return MediaFormatImpl.createInstance(unknown);
  }

  /**
   * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt> with default clock rate and
   * set of format parameters. If <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>,
   * returns a <tt>MediaFormat</tt> which is either an <tt>AudioMediaFormat</tt> or a
   * <tt>VideoMediaFormat</tt> instance. Otherwise, returns <tt>null</tt>.
   *
   * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for
   * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt> which is either an
   *     <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is
   *     known to this <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt>
   * @see MediaFormatFactory#createMediaFormat(String)
   */
  public MediaFormat createMediaFormat(String encoding) {
    return createMediaFormat(encoding, CLOCK_RATE_NOT_SPECIFIED);
  }

  /**
   * Creates a <tt>MediaFormat</tt> for the specified RTP payload type with default clock rate and
   * set of format parameters. If <tt>rtpPayloadType</tt> is known to this
   * <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an
   * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns
   * <tt>null</tt>.
   *
   * @param rtpPayloadType the RTP payload type of the <tt>MediaFormat</tt> to create
   * @return a <tt>MediaFormat</tt> with the specified <tt>rtpPayloadType</tt> which is either an
   *     <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if
   *     <tt>rtpPayloadType</tt> is known to this <tt>MediaFormatFactory</tt>; otherwise,
   *     <tt>null</tt>
   * @see MediaFormatFactory#createMediaFormat(byte)
   */
  public MediaFormat createMediaFormat(byte rtpPayloadType) {
    /*
     * We know which are the MediaFormat instances with the specified
     * rtpPayloadType but we cannot directly return them because they do not
     * reflect the user's configuration with respect to being enabled and
     * disabled.
     */
    for (MediaFormat rtpPayloadTypeMediaFormat : MediaUtils.getMediaFormats(rtpPayloadType)) {
      MediaFormat mediaFormat =
          createMediaFormat(
              rtpPayloadTypeMediaFormat.getEncoding(), rtpPayloadTypeMediaFormat.getClockRate());
      if (mediaFormat != null) return mediaFormat;
    }
    return null;
  }

  /**
   * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt> with the specified
   * <tt>clockRate</tt> and a default set of format parameters. If <tt>encoding</tt> is known to
   * this <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an
   * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns
   * <tt>null</tt>.
   *
   * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for
   * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for
   * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt> and <tt>clockRate</tt>
   *     which is either an <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if
   *     <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt>
   * @see MediaFormatFactory#createMediaFormat(String, double)
   */
  public MediaFormat createMediaFormat(String encoding, double clockRate) {
    return createMediaFormat(encoding, clockRate, 1);
  }

  /**
   * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt>, <tt>clockRate</tt> and
   * <tt>channels</tt> and a default set of format parameters. If <tt>encoding</tt> is known to this
   * <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an
   * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns
   * <tt>null</tt>.
   *
   * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for
   * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for
   * @param channels the number of available channels (1 for mono, 2 for stereo) if it makes sense
   *     for the <tt>MediaFormat</tt> with the specified <tt>encoding</tt>; otherwise, ignored
   * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt> and
   *     <tt>channels</tt> and a default set of format parameters which is either an
   *     <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is
   *     known to this <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt>
   * @see MediaFormatFactory#createMediaFormat(String, double, int)
   */
  public MediaFormat createMediaFormat(String encoding, double clockRate, int channels) {
    return createMediaFormat(encoding, clockRate, channels, null);
  }

  private MediaFormat createMediaFormat(
      String encoding, double clockRate, int channels, Map<String, String> fmtps) {
    for (MediaFormat format : getSupportedMediaFormats(encoding, clockRate)) {
      /*
       * The mediaType, encoding and clockRate properties are sure to
       * match because format is the result of the search for encoding and
       * clockRate. We just want to make sure that the channels and the
       * format parameters match.
       */
      if (format.matches(
          format.getMediaType(), format.getEncoding(), format.getClockRate(), channels, fmtps))
        return format;
    }
    return null;
  }

  /**
   * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt>, <tt>clockRate</tt> and set
   * of format parameters. If <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>,
   * returns a <tt>MediaFormat</tt> which is either an <tt>AudioMediaFormat</tt> or a
   * <tt>VideoMediaFormat</tt> instance. Otherwise, returns <tt>null</tt>.
   *
   * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for
   * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for
   * @param formatParams any codec specific parameters which have been received via SIP/SDP or
   *     XMPP/Jingle
   * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt> and set
   *     of format parameters which is either an <tt>AudioMediaFormat</tt> or a
   *     <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is known to this
   *     <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt>
   * @see MediaFormatFactory#createMediaFormat(String, double, Map, Map)
   */
  public MediaFormat createMediaFormat(
      String encoding,
      double clockRate,
      Map<String, String> formatParams,
      Map<String, String> advancedParams) {
    return createMediaFormat(encoding, clockRate, 1, -1, formatParams, advancedParams);
  }

  /**
   * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt>, <tt>clockRate</tt>,
   * <tt>channels</tt> and set of format parameters. If <tt>encoding</tt> is known to this
   * <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an
   * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns
   * <tt>null</tt>.
   *
   * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for
   * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for
   * @param frameRate the frame rate in number of frames per second to create a <tt>MediaFormat</tt>
   *     for
   * @param channels the number of available channels (1 for mono, 2 for stereo) if it makes sense
   *     for the <tt>MediaFormat</tt> with the specified <tt>encoding</tt>; otherwise, ignored
   * @param formatParams any codec specific parameters which have been received via SIP/SDP or
   *     XMPP/Jingle
   * @param advancedParams any parameters which have been received via SIP/SDP or XMPP/Jingle
   * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt>,
   *     <tt>channels</tt> and set of format parameters which is either an <tt>AudioMediaFormat</tt>
   *     or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is known to this
   *     <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt>
   * @see MediaFormatFactory#createMediaFormat(String, double, int, float, Map, Map)
   */
  public MediaFormat createMediaFormat(
      String encoding,
      double clockRate,
      int channels,
      float frameRate,
      Map<String, String> formatParams,
      Map<String, String> advancedParams) {
    MediaFormat mediaFormat = createMediaFormat(encoding, clockRate, channels, formatParams);

    if (mediaFormat == null) return null;

    /*
     * MediaFormatImpl is immutable so if the caller wants to change the
     * format parameters and/or the advanced attributes, we'll have to
     * create a new MediaFormatImpl.
     */
    Map<String, String> formatParameters = null;
    Map<String, String> advancedParameters = null;

    if ((formatParams != null) && !formatParams.isEmpty()) formatParameters = formatParams;
    if ((advancedParams != null) && !advancedParams.isEmpty()) advancedParameters = advancedParams;

    if ((formatParameters != null) || (advancedParameters != null)) {
      switch (mediaFormat.getMediaType()) {
        case AUDIO:
          mediaFormat =
              new AudioMediaFormatImpl(
                  ((AudioMediaFormatImpl) mediaFormat).getFormat(),
                  formatParameters,
                  advancedParameters);
          break;
        case VIDEO:
          VideoMediaFormatImpl videoMediaFormatImpl = (VideoMediaFormatImpl) mediaFormat;

          /*
           * If the format of VideoMediaFormatImpl is
           * a ParameterizedVideoFormat, it's possible for the format
           * parameters of that ParameterizedVideoFormat and of the new
           * VideoMediaFormatImpl (to be created) to be out of sync. While
           * it's not technically perfect, it should be practically safe
           * for the format parameters which distinguish VideoFormats with
           * the same encoding and clock rate because mediaFormat has
           * already been created in sync with formatParams (with respect
           * to the format parameters which distinguish VideoFormats with
           * the same encoding and clock rate).
           */
          mediaFormat =
              new VideoMediaFormatImpl(
                  videoMediaFormatImpl.getFormat(),
                  videoMediaFormatImpl.getClockRate(),
                  frameRate,
                  formatParameters,
                  advancedParameters);
          break;
        default:
          mediaFormat = null;
      }
    }
    return mediaFormat;
  }

  /**
   * Creates a <tt>MediaFormat</tt> either for the specified <tt>rtpPayloadType</tt> or for the
   * specified <tt>encoding</tt>, <tt>clockRate</tt>, <tt>channels</tt> and set of format
   * parameters. If <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>, ignores
   * <tt>rtpPayloadType</tt> and returns a <tt>MediaFormat</tt> which is either an
   * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. If <tt>rtpPayloadType</tt>
   * is not {@link MediaFormat#RTP_PAYLOAD_TYPE_UNKNOWN} and <tt>encoding</tt> is <tt>null</tt>,
   * uses the encoding associated with <tt>rtpPayloadType</tt>.
   *
   * @param rtpPayloadType the RTP payload type to create a <tt>MediaFormat</tt> for; {@link
   *     MediaFormat#RTP_PAYLOAD_TYPE_UNKNOWN} if <tt>encoding</tt> is not <tt>null</tt>. If
   *     <tt>rtpPayloadType</tt> is not <tt>MediaFormat#RTP_PAYLOAD_TYPE_UNKNOWN</tt> and
   *     <tt>encoding</tt> is not <tt>null</tt>, <tt>rtpPayloadType</tt> is ignored
   * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for;
   *     <tt>null</tt>
   * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for
   * @param frameRate the frame rate in number of frames per second to create a <tt>MediaFormat</tt>
   *     for
   * @param channels the number of available channels (1 for mono, 2 for stereo) if it makes sense
   *     for the <tt>MediaFormat</tt> with the specified <tt>encoding</tt>; otherwise, ignored
   * @param formatParams any codec specific parameters which have been received via SIP/SDP or
   *     XMPP/Jingle
   * @param advancedParams any parameters which have been received via SIP/SDP or XMPP/Jingle
   * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt>,
   *     <tt>channels</tt> and set of format parameters which is either an <tt>AudioMediaFormat</tt>
   *     or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is known to this
   *     <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt>
   */
  public MediaFormat createMediaFormat(
      byte rtpPayloadType,
      String encoding,
      double clockRate,
      int channels,
      float frameRate,
      Map<String, String> formatParams,
      Map<String, String> advancedParams) {

    /*
     * If rtpPayloadType is specified, use it only to figure out encoding
     * and/or clockRate in case either one of them is unknown.
     */
    if ((MediaFormat.RTP_PAYLOAD_TYPE_UNKNOWN != rtpPayloadType)
        && ((encoding == null) || (CLOCK_RATE_NOT_SPECIFIED == clockRate))) {
      MediaFormat[] rtpPayloadTypeMediaFormats = MediaUtils.getMediaFormats(rtpPayloadType);

      if (rtpPayloadTypeMediaFormats.length > 0) {
        if (encoding == null) encoding = rtpPayloadTypeMediaFormats[0].getEncoding();

        // Assign or check the clock rate.
        if (CLOCK_RATE_NOT_SPECIFIED == clockRate)
          clockRate = rtpPayloadTypeMediaFormats[0].getClockRate();
        else {
          boolean clockRateIsValid = false;

          for (MediaFormat rtpPayloadTypeMediaFormat : rtpPayloadTypeMediaFormats)
            if (rtpPayloadTypeMediaFormat.getEncoding().equals(encoding)
                && (rtpPayloadTypeMediaFormat.getClockRate() == clockRate)) {
              clockRateIsValid = true;
              break;
            }

          if (!clockRateIsValid) return null;
        }
      }
    }

    return createMediaFormat(
        encoding, clockRate, channels, frameRate, formatParams, advancedParams);
  }

  /**
   * Gets the <tt>MediaFormat</tt>s among the specified <tt>mediaFormats</tt> which have the
   * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>.
   *
   * @param mediaFormats the <tt>MediaFormat</tt>s from which to filter out only the ones which have
   *     the specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>
   * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved
   * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link
   *     #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable
   * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s among <tt>mediaFormats</tt> which have the
   *     specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>
   */
  private List<MediaFormat> getMatchingMediaFormats(
      MediaFormat[] mediaFormats, String encoding, double clockRate) {
    /*
     * XXX Use String#equalsIgnoreCase(String) because some clients transmit
     * some of the codecs starting with capital letters.
     */

    /*
     * As per RFC 3551.4.5.2, because of a mistake in RFC 1890 and for
     * backward compatibility, G.722 should always be announced as 8000 even
     * though it is wideband. So, if someone is looking for G722/16000,
     * then: Forgive them, for they know not what they do!
     */
    if ("G722".equalsIgnoreCase(encoding) && (16000 == clockRate)) {
      clockRate = 8000;
      if (logger.isInfoEnabled()) logger.info("Suppressing erroneous 16000 announcement for G.722");
    }

    List<MediaFormat> supportedMediaFormats = new ArrayList<MediaFormat>();

    for (MediaFormat mediaFormat : mediaFormats) {
      if (mediaFormat.getEncoding().equalsIgnoreCase(encoding)
          && ((CLOCK_RATE_NOT_SPECIFIED == clockRate)
              || (mediaFormat.getClockRate() == clockRate))) {
        supportedMediaFormats.add(mediaFormat);
      }
    }
    return supportedMediaFormats;
  }

  /**
   * Gets the <tt>MediaFormat</tt>s supported by this <tt>MediaFormatFactory</tt> and the
   * <tt>MediaService</tt> associated with it and having the specified <tt>encoding</tt> and,
   * optionally, <tt>clockRate</tt>.
   *
   * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved
   * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link
   *     #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable
   * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s supported by the <tt>MediaService</tt>
   *     associated with this <tt>MediaFormatFactory</tt> and having the specified encoding and,
   *     optionally, clock rate
   */
  private List<MediaFormat> getSupportedMediaFormats(String encoding, double clockRate) {
    EncodingConfiguration encodingConfiguration =
        NeomediaServiceUtils.getMediaServiceImpl().getCurrentEncodingConfiguration();
    List<MediaFormat> supportedMediaFormats =
        getMatchingMediaFormats(
            encodingConfiguration.getAllEncodings(MediaType.AUDIO), encoding, clockRate);

    if (supportedMediaFormats.isEmpty())
      supportedMediaFormats =
          getMatchingMediaFormats(
              encodingConfiguration.getAllEncodings(MediaType.VIDEO), encoding, clockRate);
    return supportedMediaFormats;
  }
}