/**
   * Sets the time in milliseconds of the last activity related to this <tt>Conference</tt> to the
   * current system time.
   */
  public void touch() {
    long now = System.currentTimeMillis();

    synchronized (this) {
      if (getLastActivityTime() < now) lastActivityTime = now;
    }
  }
Ejemplo n.º 2
0
  /**
   * Updates rate statistics for the encodings of the tracks that this receiver is managing. Detects
   * simulcast stream suspension/resuming.
   *
   * @param pkt the {@link RawPacket} that is causing the update of this instance.
   * @param encoding the {@link RTPEncodingImpl} of the {@link RawPacket} that is passed as an
   *     argument.
   */
  void update(RawPacket pkt, RTPEncodingImpl encoding) {
    long nowMs = System.currentTimeMillis();

    // Update the encoding.
    FrameDesc frameDesc = encoding.update(pkt, nowMs);
    if (frameDesc == null /* no frame was changed */
        || !frameDesc.isIndependent() /* frame is dependent */

        // The webrtc engine is sending keyframes from high to low and less
        // often than 300 millis. The first keyframe that we observe after
        // we've waited for that long determines the streams that are
        // streaming (not suspended).
        || nowMs - lastKeyframeMs < MIN_KEY_FRAME_WAIT_MS) {
      return;
    }

    // media engines may decide to suspend a stream for congestion control.
    // This is the case with the webrtc.org simulcast implementation. This
    // behavior induces a streaming dependency between the encodings of a
    // given track. The following piece of code assumes that the subjective
    // quality array is ordered in a way to represent the streaming
    // dependencies.

    lastKeyframeMs = nowMs;
    boolean isActive = false;

    for (int i = rtpEncodings.length - 1; i >= 0; i--) {
      if (!isActive && rtpEncodings[i].requires(encoding.getIndex())) {
        isActive = true;
      }

      rtpEncodings[i].setActive(isActive);
    }
  }
  private MediaFormat[] getEncodings() {
    if (encodings != null) return encodings;

    MediaFormat[] availableEncodings = encodingConfiguration.getAllEncodings(type);
    int encodingCount = availableEncodings.length;

    if (encodingCount < 1) encodings = MediaUtils.EMPTY_MEDIA_FORMATS;
    else {
      /*
       * The MediaFormats will be displayed by encoding (name) and clock
       * rate and EncodingConfiguration will store them that way so this
       * TableModel should better display unique encoding-clock rate
       * pairs.
       */
      HashMap<String, MediaFormat> availableEncodingSet = new HashMap<String, MediaFormat>();

      for (MediaFormat availableEncoding : availableEncodings) {
        availableEncodingSet.put(
            availableEncoding.getEncoding() + "/" + availableEncoding.getClockRateString(),
            availableEncoding);
      }
      availableEncodings = availableEncodingSet.values().toArray(MediaUtils.EMPTY_MEDIA_FORMATS);
      encodingCount = availableEncodings.length;

      encodings = new MediaFormat[encodingCount];
      System.arraycopy(availableEncodings, 0, encodings, 0, encodingCount);
      // Display the encodings in decreasing priority.
      Arrays.sort(
          encodings,
          0,
          encodingCount,
          new Comparator<MediaFormat>() {
            public int compare(MediaFormat format0, MediaFormat format1) {
              int ret =
                  encodingConfiguration.getPriority(format1)
                      - encodingConfiguration.getPriority(format0);

              if (ret == 0) {
                /*
                 * In the cases of equal priorities, display them
                 * sorted by encoding name in increasing order.
                 */
                ret = format0.getEncoding().compareToIgnoreCase(format1.getEncoding());
                if (ret == 0) {
                  /*
                   * In the cases of equal priorities and equal
                   * encoding names, display them sorted by clock
                   * rate in decreasing order.
                   */
                  ret = Double.compare(format1.getClockRate(), format0.getClockRate());
                }
              }
              return ret;
            }
          });
    }
    return encodings;
  }
    /**
     * Removes receiver and sender feedback from RTCP packets.
     *
     * @param inPacket the <tt>RTCPCompoundPacket</tt> to filter.
     * @return the filtered <tt>RawPacket</tt>.
     */
    public RawPacket gateway(RTCPCompoundPacket inPacket) {
      if (inPacket == null || inPacket.packets == null || inPacket.packets.length == 0) {
        logger.info("Ignoring empty RTCP packet.");
        return null;
      }

      ArrayList<RTCPPacket> outPackets = new ArrayList<RTCPPacket>(inPacket.packets.length);

      for (RTCPPacket p : inPacket.packets) {
        switch (p.type) {
          case RTCPPacket.RR:
          case RTCPPacket.SR:
          case RTCPPacket.SDES:
            // We generate our own RR/SR/SDES packets. We only want
            // to forward NACKs/PLIs/etc.
            break;
          case RTCPFBPacket.PSFB:
            RTCPFBPacket psfb = (RTCPFBPacket) p;
            switch (psfb.fmt) {
              case RTCPREMBPacket.FMT:
                // We generate its own REMB packets.
                break;
              default:
                // We let through everything else, like NACK
                // packets.
                outPackets.add(psfb);
                break;
            }
            break;
          default:
            // We let through everything else, like BYE and APP
            // packets.
            outPackets.add(p);
            break;
        }
      }

      if (outPackets.size() == 0) {
        return null;
      }

      // We have feedback messages to send. Pack them in a compound
      // RR and send them. TODO Use RFC5506 Reduced-Size RTCP, if the
      // receiver supports it.
      Collection<RTCPRRPacket> rrPackets = makeRTCPRRPackets(System.currentTimeMillis());

      if (rrPackets != null && rrPackets.size() != 0) {
        outPackets.addAll(0, rrPackets);
      } else {
        logger.warn("We might be sending invalid RTCPs.");
      }

      RTCPPacket[] pkts = outPackets.toArray(new RTCPPacket[outPackets.size()]);
      RTCPCompoundPacket outPacket = new RTCPCompoundPacket(pkts);

      return generator.apply(outPacket);
    }
Ejemplo n.º 5
0
      public int write(byte[] buffer, int offset, int length, boolean transform) {
        RawPacket pkt = rawPacketArray[0];
        if (pkt == null) pkt = new RawPacket();
        rawPacketArray[0] = pkt;

        byte[] pktBuf = pkt.getBuffer();
        if (pktBuf == null || pktBuf.length < length) {
          pktBuf = new byte[length];
          pkt.setBuffer(pktBuf);
        }
        System.arraycopy(buffer, offset, pktBuf, 0, length);
        pkt.setOffset(0);
        pkt.setLength(length);

        if (transform) {
          PacketTransformer packetTransformer =
              isControlStream ? rtcpPacketTransformer : rtpPacketTransformer;

          if (packetTransformer != null)
            rawPacketArray = packetTransformer.reverseTransform(rawPacketArray);
        }

        SourceTransferHandler transferHandler;
        PushSourceStream pushSourceStream;

        try {
          if (isControlStream) {
            transferHandler = controlTransferHandler;
            pushSourceStream = getControlInputStream();
          } else {
            transferHandler = dataTransferHandler;
            pushSourceStream = getDataInputStream();
          }
        } catch (IOException ioe) {
          throw new UndeclaredThrowableException(ioe);
        }

        for (int i = 0; i < rawPacketArray.length; i++) {
          RawPacket packet = rawPacketArray[i];

          // keep the first element for reuse
          if (i != 0) rawPacketArray[i] = null;

          if (packet != null) {
            if (isControlStream) pendingControlPacket = packet;
            else pendingDataPacket = packet;

            if (transferHandler != null) {
              transferHandler.transferData(pushSourceStream);
            }
          }
        }

        return length;
      }
    /**
     * Adds a specific (RTP) SSRC to the list of SSRCs seen/received on this <tt>Channel</tt>.
     * Invoked by the Jitsi Videobridge server, not its clients.
     *
     * @param ssrc the (RTP) SSRC to be added to the list of SSRCs seen/received on this
     *     <tt>Channel</tt>
     * @return <tt>true</tt> if the list of SSRCs seen/received on this <tt>Channel</tt> has been
     *     modified as part of the method call; otherwise, <tt>false</tt>
     */
    public synchronized boolean addSSRC(int ssrc) {
      // contains
      for (int i = 0; i < ssrcs.length; i++) if (ssrcs[i] == ssrc) return false;

      // add
      int[] newSSRCs = new int[ssrcs.length + 1];

      System.arraycopy(ssrcs, 0, newSSRCs, 0, ssrcs.length);
      newSSRCs[ssrcs.length] = ssrc;
      ssrcs = newSSRCs;
      return true;
    }
Ejemplo n.º 7
0
 /**
  * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this
  * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed,
  * and that the new active stream has SSRC <tt>ssrc</tt>.
  *
  * @param ssrc the SSRC of the new active stream.
  */
 @Override
 public void activeSpeakerChanged(long ssrc) {
   if (eventHandler != null) {
     RecorderEvent e = new RecorderEvent();
     e.setAudioSsrc(ssrc);
     // TODO: how do we time this?
     e.setInstant(System.currentTimeMillis());
     e.setType(RecorderEvent.Type.SPEAKER_CHANGED);
     e.setMediaType(MediaType.VIDEO);
     eventHandler.handleEvent(e);
   }
 }
Ejemplo n.º 8
0
  /**
   * Encapsulates {@code pkt} in the RTX format, using {@code rtxSsrc} as its SSRC, and transmits it
   * to {@link #channel} by injecting it in the {@code MediaStream}.
   *
   * @param pkt the packet to transmit.
   * @param rtxSsrc the SSRC for the RTX stream.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  private boolean encapsulateInRtxAndTransmit(RawPacket pkt, long rtxSsrc, TransformEngine after) {
    byte[] buf = pkt.getBuffer();
    int len = pkt.getLength();
    int off = pkt.getOffset();

    byte[] newBuf = new byte[len + 2];
    RawPacket rtxPkt = new RawPacket(newBuf, 0, len + 2);

    int osn = pkt.getSequenceNumber();
    int headerLength = pkt.getHeaderLength();
    int payloadLength = pkt.getPayloadLength();

    // Copy the header.
    System.arraycopy(buf, off, newBuf, 0, headerLength);

    // Set the OSN field.
    newBuf[headerLength] = (byte) ((osn >> 8) & 0xff);
    newBuf[headerLength + 1] = (byte) (osn & 0xff);

    // Copy the payload.
    System.arraycopy(buf, off + headerLength, newBuf, headerLength + 2, payloadLength);

    MediaStream mediaStream = channel.getStream();
    if (mediaStream != null) {
      rtxPkt.setSSRC((int) rtxSsrc);
      rtxPkt.setPayloadType(rtxPayloadType);
      // Only call getNextRtxSequenceNumber() when we're sure we're going
      // to transmit a packet, because it consumes a sequence number.
      rtxPkt.setSequenceNumber(getNextRtxSequenceNumber(rtxSsrc));
      try {
        mediaStream.injectPacket(rtxPkt, /* data */ true, after);
      } catch (TransmissionFailedException tfe) {
        logger.warn("Failed to transmit an RTX packet.");
        return false;
      }
    }

    return true;
  }
    /**
     * Removes a specific (RTP) SSRC from the list of SSRCs seen/received on this <tt>Channel</tt>.
     * Invoked by the Jitsi Videobridge server, not its clients.
     *
     * @param ssrc the (RTP) SSRC to be removed from the list of SSRCs seen/received on this
     *     <tt>Channel</tt>
     * @return <tt>true</tt> if the list of SSRCs seen/received on this <tt>Channel</tt> has been
     *     modified as part of the method call; otherwise, <tt>false</tt>
     */
    public synchronized boolean removeSSRC(int ssrc) {
      if (ssrcs.length == 1) {
        if (ssrcs[0] == ssrc) {
          ssrcs = NO_SSRCS;
          return true;
        } else return false;
      } else {
        for (int i = 0; i < ssrcs.length; i++) {
          if (ssrcs[i] == ssrc) {
            int[] newSSRCs = new int[ssrcs.length - 1];

            if (i != 0) System.arraycopy(ssrcs, 0, newSSRCs, 0, i);
            if (i != newSSRCs.length) {
              System.arraycopy(ssrcs, i + 1, newSSRCs, i, newSSRCs.length - i);
            }
            ssrcs = newSSRCs;
            return true;
          }
        }
        return false;
      }
    }
Ejemplo n.º 10
0
  static {
    ConfigurationService cfg = LibJitsi.getConfigurationService();
    boolean dropUnencryptedPkts = false;

    if (cfg == null) {
      String s = System.getProperty(DROP_UNENCRYPTED_PKTS_PNAME);

      if (s != null) dropUnencryptedPkts = Boolean.parseBoolean(s);
    } else {
      dropUnencryptedPkts = cfg.getBoolean(DROP_UNENCRYPTED_PKTS_PNAME, dropUnencryptedPkts);
    }
    DROP_UNENCRYPTED_PKTS = dropUnencryptedPkts;
  }
Ejemplo n.º 11
0
  /**
   * Encapsulates {@code pkt} in the RTX format, using {@code rtxSsrc} as its SSRC, and transmits it
   * to {@link #channel} by injecting it in the {@code MediaStream}.
   *
   * @param pkt the packet to transmit.
   * @param rtxSsrc the SSRC for the RTX stream.
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  private boolean encapsulateInRtxAndTransmit(RawPacket pkt, long rtxSsrc) {
    byte[] buf = pkt.getBuffer();
    int len = pkt.getLength();
    int off = pkt.getOffset();
    byte[] newBuf = buf;
    if (buf.length < len + 2) {
      // FIXME The byte array newly allocated and assigned to newBuf must
      // be made known to pkt eventually.
      newBuf = new byte[len + 2];
    }

    int osn = pkt.getSequenceNumber();
    int headerLength = pkt.getHeaderLength();
    int payloadLength = len - headerLength;
    System.arraycopy(buf, off, newBuf, 0, headerLength);
    // FIXME If newBuf is actually buf, then we will override the first two
    // bytes of the payload bellow.
    newBuf[headerLength] = (byte) ((osn >> 8) & 0xff);
    newBuf[headerLength + 1] = (byte) (osn & 0xff);
    System.arraycopy(buf, off + headerLength, newBuf, headerLength + 2, payloadLength);
    // FIXME We tried to extend the payload of pkt by two bytes above but
    // we never told pkt that its length has increased by these two bytes.

    MediaStream mediaStream = channel.getStream();
    if (mediaStream != null) {
      pkt.setSSRC((int) rtxSsrc);
      // Only call getNextRtxSequenceNumber() when we're sure we're going
      // to transmit a packet, because it consumes a sequence number.
      pkt.setSequenceNumber(getNextRtxSequenceNumber(rtxSsrc));
      try {
        mediaStream.injectPacket(pkt, /* data */ true, /* after */ null);
      } catch (TransmissionFailedException tfe) {
        logger.warn("Failed to transmit an RTX packet.");
        return false;
      }
    }

    return true;
  }
Ejemplo n.º 12
0
      @Override
      public int read(byte[] buffer, int offset, int length) throws IOException {

        RawPacket pendingPacket;
        if (isControlStream) {
          pendingPacket = pendingControlPacket;
        } else {
          pendingPacket = pendingDataPacket;
        }
        int bytesToRead = 0;
        byte[] pendingPacketBuffer = pendingPacket.getBuffer();
        if (pendingPacketBuffer != null) {
          int pendingPacketLength = pendingPacket.getLength();
          bytesToRead = length > pendingPacketLength ? pendingPacketLength : length;
          System.arraycopy(
              pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead);
        }
        return bytesToRead;
      }
Ejemplo n.º 13
0
  /**
   * Removes the RTX encapsulation from a packet.
   *
   * @param pkt the packet to remove the RTX encapsulation from.
   * @return the original media packet represented by {@code pkt}, or null if we couldn't
   *     reconstruct the original packet.
   */
  private RawPacket deRtx(RawPacket pkt) {
    boolean success = false;

    if (pkt.getPayloadLength() - pkt.getPaddingSize() < 2) {
      // We need at least 2 bytes to read the OSN field.
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet with padding only: " + pkt);
      }
      return null;
    }

    long mediaSsrc = getPrimarySsrc(pkt);
    if (mediaSsrc != -1) {
      if (rtxAssociatedPayloadType != -1) {
        int osn = pkt.getOriginalSequenceNumber();
        // Remove the RTX header by moving the RTP header two bytes
        // right.
        byte[] buf = pkt.getBuffer();
        int off = pkt.getOffset();
        System.arraycopy(buf, off, buf, off + 2, pkt.getHeaderLength());

        pkt.setOffset(off + 2);
        pkt.setLength(pkt.getLength() - 2);

        pkt.setSSRC((int) mediaSsrc);
        pkt.setSequenceNumber(osn);
        pkt.setPayloadType(rtxAssociatedPayloadType);
        success = true;
      } else {
        logger.warn(
            "RTX packet received, but no APT is defined. Packet "
                + "SSRC "
                + pkt.getSSRCAsLong()
                + ", associated media"
                + " SSRC "
                + mediaSsrc);
      }
    }

    // If we failed to handle the RTX packet, drop it.
    return success ? pkt : null;
  }
Ejemplo n.º 14
0
  /**
   * Creates a {@code RawPacket} which represents the original packet encapsulated in {@code pkt}
   * using the RTX format.
   *
   * @param pkt the packet from which to extract a media packet.
   * @return the extracted media packet.
   */
  private RawPacket createMediaPacket(RawPacket pkt) {
    RawPacket mediaPacket = null;
    long rtxSsrc = pkt.getSSRC() & 0xffffffffL;

    // We need to know the SSRC paired with rtxSsrc *as seen by the
    // receiver (i.e. this.channel)*. However, we only store SSRCs
    // that endpoints *send* with.
    // We therefore assume that SSRC re-writing has not introduced any
    // new SSRCs and therefor the FID mappings known to the senders
    // also apply to receivers.
    RtpChannel sourceChannel = channel.getContent().findChannelByFidSsrc(rtxSsrc);
    if (sourceChannel != null) {
      long mediaSsrc = sourceChannel.getFidPairedSsrc(rtxSsrc);
      if (mediaSsrc != -1) {
        byte apt = sourceChannel.getRtxAssociatedPayloadType();
        if (apt != -1) {
          mediaPacket = new RawPacket(pkt.getBuffer().clone(), pkt.getOffset(), pkt.getLength());

          // Remove the RTX header by moving the RTP header two bytes
          // right.
          byte[] buf = mediaPacket.getBuffer();
          int off = mediaPacket.getOffset();
          System.arraycopy(buf, off, buf, off + 2, mediaPacket.getHeaderLength());

          mediaPacket.setOffset(off + 2);
          mediaPacket.setLength(pkt.getLength() - 2);

          mediaPacket.setSSRC((int) mediaSsrc);
          mediaPacket.setSequenceNumber(pkt.getOriginalSequenceNumber());
          mediaPacket.setPayloadType(apt);
        }
      }
    }

    return mediaPacket;
  }
  /**
   * Makes <tt>RTCPRRPacket</tt>s using information in FMJ.
   *
   * @param time
   * @return A <tt>Collection</tt> of <tt>RTCPRRPacket</tt>s to inject to the <tt>MediaStream</tt>.
   */
  private Collection<RTCPRRPacket> makeRTCPRRPackets(long time) {
    RTCPReportBlock[] reportBlocks = makeRTCPReportBlocks(time);
    if (reportBlocks == null || reportBlocks.length == 0) {
      return null;
    }

    Collection<RTCPRRPacket> rrPackets = new ArrayList<RTCPRRPacket>();

    // We use the stream's local source ID (SSRC) as the SSRC of packet
    // sender.
    long streamSSRC = getLocalSSRC();

    // Since a maximum of 31 reception report blocks will fit in an SR
    // or RR packet, additional RR packets SHOULD be stacked after the
    // initial SR or RR packet as needed to contain the reception
    // reports for all sources heard during the interval since the last
    // report.
    if (reportBlocks.length > MAX_RTCP_REPORT_BLOCKS) {
      for (int offset = 0; offset < reportBlocks.length; offset += MAX_RTCP_REPORT_BLOCKS) {
        RTCPReportBlock[] blocks =
            (reportBlocks.length - offset < MAX_RTCP_REPORT_BLOCKS)
                ? new RTCPReportBlock[reportBlocks.length - offset]
                : MAX_RTCP_REPORT_BLOCKS_ARRAY;

        System.arraycopy(reportBlocks, offset, blocks, 0, blocks.length);

        RTCPRRPacket rr = new RTCPRRPacket((int) streamSSRC, blocks);
        rrPackets.add(rr);
      }
    } else {
      RTCPRRPacket rr = new RTCPRRPacket((int) streamSSRC, reportBlocks);
      rrPackets.add(rr);
    }

    return rrPackets;
  }
Ejemplo n.º 16
0
  private void runOnDtlsTransport(StreamConnector connector) throws IOException {
    DtlsControlImpl dtlsControl = (DtlsControlImpl) getTransportManager().getDtlsControl(this);
    DtlsTransformEngine engine = dtlsControl.getTransformEngine();
    final DtlsPacketTransformer transformer = (DtlsPacketTransformer) engine.getRTPTransformer();

    byte[] receiveBuffer = new byte[SCTP_BUFFER_SIZE];

    if (LOG_SCTP_PACKETS) {
      System.setProperty(
          ConfigurationService.PNAME_SC_HOME_DIR_LOCATION, System.getProperty("java.io.tmpdir"));
      System.setProperty(
          ConfigurationService.PNAME_SC_HOME_DIR_NAME, SctpConnection.class.getName());
    }

    synchronized (this) {
      // FIXME local SCTP port is hardcoded in bridge offer SDP (Jitsi
      // Meet)
      sctpSocket = Sctp.createSocket(5000);
      assocIsUp = false;
      acceptedIncomingConnection = false;
    }

    // Implement output network link for SCTP stack on DTLS transport
    sctpSocket.setLink(
        new NetworkLink() {
          @Override
          public void onConnOut(SctpSocket s, byte[] packet) throws IOException {
            if (LOG_SCTP_PACKETS) {
              LibJitsi.getPacketLoggingService()
                  .logPacket(
                      PacketLoggingService.ProtocolName.ICE4J,
                      new byte[] {0, 0, 0, (byte) debugId},
                      5000,
                      new byte[] {0, 0, 0, (byte) (debugId + 1)},
                      remoteSctpPort,
                      PacketLoggingService.TransportName.UDP,
                      true,
                      packet);
            }

            // Send through DTLS transport
            transformer.sendApplicationData(packet, 0, packet.length);
          }
        });

    if (logger.isDebugEnabled()) {
      logger.debug("Connecting SCTP to port: " + remoteSctpPort + " to " + getEndpoint().getID());
    }

    sctpSocket.setNotificationListener(this);
    sctpSocket.listen();

    // FIXME manage threads
    threadPool.execute(
        new Runnable() {
          @Override
          public void run() {
            SctpSocket sctpSocket = null;
            try {
              // sctpSocket is set to null on close
              sctpSocket = SctpConnection.this.sctpSocket;
              while (sctpSocket != null) {
                if (sctpSocket.accept()) {
                  acceptedIncomingConnection = true;
                  break;
                }
                Thread.sleep(100);
                sctpSocket = SctpConnection.this.sctpSocket;
              }
              if (isReady()) {
                notifySctpConnectionReady();
              }
            } catch (Exception e) {
              logger.error("Error accepting SCTP connection", e);
            }

            if (sctpSocket == null && logger.isInfoEnabled()) {
              logger.info(
                  "SctpConnection " + getID() + " closed" + " before SctpSocket accept()-ed.");
            }
          }
        });

    // Notify that from now on SCTP connection is considered functional
    sctpSocket.setDataCallback(this);

    // Setup iceSocket
    DatagramSocket datagramSocket = connector.getDataSocket();
    if (datagramSocket != null) {
      this.iceSocket = new IceUdpSocketWrapper(datagramSocket);
    } else {
      this.iceSocket = new IceTcpSocketWrapper(connector.getDataTCPSocket());
    }

    DatagramPacket rcvPacket = new DatagramPacket(receiveBuffer, 0, receiveBuffer.length);

    // Receive loop, breaks when SCTP socket is closed
    try {
      do {
        iceSocket.receive(rcvPacket);

        RawPacket raw =
            new RawPacket(rcvPacket.getData(), rcvPacket.getOffset(), rcvPacket.getLength());

        raw = transformer.reverseTransform(raw);
        // Check for app data
        if (raw == null) continue;

        if (LOG_SCTP_PACKETS) {
          LibJitsi.getPacketLoggingService()
              .logPacket(
                  PacketLoggingService.ProtocolName.ICE4J,
                  new byte[] {0, 0, 0, (byte) (debugId + 1)},
                  remoteSctpPort,
                  new byte[] {0, 0, 0, (byte) debugId},
                  5000,
                  PacketLoggingService.TransportName.UDP,
                  false,
                  raw.getBuffer(),
                  raw.getOffset(),
                  raw.getLength());
        }

        // Pass network packet to SCTP stack
        sctpSocket.onConnIn(raw.getBuffer(), raw.getOffset(), raw.getLength());
      } while (true);
    } finally {
      // Eventually, close the socket although it should happen from
      // expire().
      synchronized (this) {
        assocIsUp = false;
        acceptedIncomingConnection = false;
        if (sctpSocket != null) {
          sctpSocket.close();
          sctpSocket = null;
        }
      }
    }
  }
Ejemplo n.º 17
0
  /**
   * Creates the video advanced settings.
   *
   * @return video advanced settings panel.
   */
  private static Component createVideoAdvancedSettings() {
    ResourceManagementService resources = NeomediaActivator.getResources();

    final DeviceConfiguration deviceConfig = mediaService.getDeviceConfiguration();

    TransparentPanel centerPanel = new TransparentPanel(new GridBagLayout());
    centerPanel.setMaximumSize(new Dimension(WIDTH, 150));

    JButton resetDefaultsButton =
        new JButton(resources.getI18NString("impl.media.configform.VIDEO_RESET"));
    JPanel resetButtonPanel = new TransparentPanel(new FlowLayout(FlowLayout.RIGHT));
    resetButtonPanel.add(resetDefaultsButton);

    final JPanel centerAdvancedPanel = new TransparentPanel(new BorderLayout());
    centerAdvancedPanel.add(centerPanel, BorderLayout.NORTH);
    centerAdvancedPanel.add(resetButtonPanel, BorderLayout.SOUTH);

    GridBagConstraints constraints = new GridBagConstraints();
    constraints.fill = GridBagConstraints.HORIZONTAL;
    constraints.anchor = GridBagConstraints.NORTHWEST;
    constraints.insets = new Insets(5, 5, 0, 0);
    constraints.gridx = 0;
    constraints.weightx = 0;
    constraints.weighty = 0;
    constraints.gridy = 0;

    centerPanel.add(
        new JLabel(resources.getI18NString("impl.media.configform.VIDEO_RESOLUTION")), constraints);
    constraints.gridy = 1;
    constraints.insets = new Insets(0, 0, 0, 0);
    final JCheckBox frameRateCheck =
        new SIPCommCheckBox(resources.getI18NString("impl.media.configform.VIDEO_FRAME_RATE"));
    centerPanel.add(frameRateCheck, constraints);
    constraints.gridy = 2;
    constraints.insets = new Insets(5, 5, 0, 0);
    centerPanel.add(
        new JLabel(resources.getI18NString("impl.media.configform.VIDEO_PACKETS_POLICY")),
        constraints);

    constraints.weightx = 1;
    constraints.gridx = 1;
    constraints.gridy = 0;
    constraints.insets = new Insets(5, 0, 0, 5);
    Object[] resolutionValues = new Object[DeviceConfiguration.SUPPORTED_RESOLUTIONS.length + 1];
    System.arraycopy(
        DeviceConfiguration.SUPPORTED_RESOLUTIONS,
        0,
        resolutionValues,
        1,
        DeviceConfiguration.SUPPORTED_RESOLUTIONS.length);
    final JComboBox sizeCombo = new JComboBox(resolutionValues);
    sizeCombo.setRenderer(new ResolutionCellRenderer());
    sizeCombo.setEditable(false);
    centerPanel.add(sizeCombo, constraints);

    // default value is 20
    final JSpinner frameRate = new JSpinner(new SpinnerNumberModel(20, 5, 30, 1));
    frameRate.addChangeListener(
        new ChangeListener() {
          public void stateChanged(ChangeEvent e) {
            deviceConfig.setFrameRate(
                ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue());
          }
        });
    constraints.gridy = 1;
    constraints.insets = new Insets(0, 0, 0, 5);
    centerPanel.add(frameRate, constraints);

    frameRateCheck.addActionListener(
        new ActionListener() {
          public void actionPerformed(ActionEvent e) {
            if (frameRateCheck.isSelected()) {
              deviceConfig.setFrameRate(
                  ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue());
            } else // unlimited framerate
            deviceConfig.setFrameRate(-1);

            frameRate.setEnabled(frameRateCheck.isSelected());
          }
        });

    final JSpinner videoMaxBandwidth =
        new JSpinner(
            new SpinnerNumberModel(deviceConfig.getVideoMaxBandwidth(), 1, Integer.MAX_VALUE, 1));
    videoMaxBandwidth.addChangeListener(
        new ChangeListener() {
          public void stateChanged(ChangeEvent e) {
            deviceConfig.setVideoMaxBandwidth(
                ((SpinnerNumberModel) videoMaxBandwidth.getModel()).getNumber().intValue());
          }
        });
    constraints.gridx = 1;
    constraints.gridy = 2;
    constraints.insets = new Insets(0, 0, 5, 5);
    centerPanel.add(videoMaxBandwidth, constraints);

    resetDefaultsButton.addActionListener(
        new ActionListener() {
          public void actionPerformed(ActionEvent e) {
            // reset to defaults
            sizeCombo.setSelectedIndex(0);
            frameRateCheck.setSelected(false);
            frameRate.setEnabled(false);
            frameRate.setValue(20);
            // unlimited framerate
            deviceConfig.setFrameRate(-1);
            videoMaxBandwidth.setValue(DeviceConfiguration.DEFAULT_VIDEO_MAX_BANDWIDTH);
          }
        });

    // load selected value or auto
    Dimension videoSize = deviceConfig.getVideoSize();

    if ((videoSize.getHeight() != DeviceConfiguration.DEFAULT_VIDEO_HEIGHT)
        && (videoSize.getWidth() != DeviceConfiguration.DEFAULT_VIDEO_WIDTH))
      sizeCombo.setSelectedItem(deviceConfig.getVideoSize());
    else sizeCombo.setSelectedIndex(0);
    sizeCombo.addActionListener(
        new ActionListener() {
          public void actionPerformed(ActionEvent e) {
            Dimension selectedVideoSize = (Dimension) sizeCombo.getSelectedItem();

            if (selectedVideoSize == null) {
              // the auto value, default one
              selectedVideoSize =
                  new Dimension(
                      DeviceConfiguration.DEFAULT_VIDEO_WIDTH,
                      DeviceConfiguration.DEFAULT_VIDEO_HEIGHT);
            }
            deviceConfig.setVideoSize(selectedVideoSize);
          }
        });

    frameRateCheck.setSelected(
        deviceConfig.getFrameRate() != DeviceConfiguration.DEFAULT_VIDEO_FRAMERATE);
    frameRate.setEnabled(frameRateCheck.isSelected());

    if (frameRate.isEnabled()) frameRate.setValue(deviceConfig.getFrameRate());

    return centerAdvancedPanel;
  }
 /**
  * Ctor.
  *
  * @param ssrc
  * @param remoteTime
  * @param rtpTimestamp
  * @param frequencyHz
  */
 ReceivedRemoteClock(int ssrc, long remoteTime, int rtpTimestamp, int frequencyHz) {
   this.ssrc = ssrc;
   this.remoteClock = new RemoteClock(remoteTime, rtpTimestamp);
   this.frequencyHz = frequencyHz;
   this.receivedTime = System.currentTimeMillis();
 }
Ejemplo n.º 19
0
  /**
   * Initializes a new <tt>SRTPTransformer</tt> instance with a specific (negotiated)
   * <tt>SRTPProtectionProfile</tt> and the keying material specified by a specific
   * <tt>TlsContext</tt>.
   *
   * @param srtpProtectionProfile the (negotiated) <tt>SRTPProtectionProfile</tt> to initialize the
   *     new instance with
   * @param tlsContext the <tt>TlsContext</tt> which represents the keying material
   * @return a new <tt>SRTPTransformer</tt> instance initialized with <tt>srtpProtectionProfile</tt>
   *     and <tt>tlsContext</tt>
   */
  private SinglePacketTransformer initializeSRTPTransformer(
      int srtpProtectionProfile, TlsContext tlsContext) {
    boolean rtcp;

    switch (componentID) {
      case Component.RTCP:
        rtcp = true;
        break;
      case Component.RTP:
        rtcp = false;
        break;
      default:
        throw new IllegalStateException("componentID");
    }

    int cipher_key_length;
    int cipher_salt_length;
    int cipher;
    int auth_function;
    int auth_key_length;
    int RTCP_auth_tag_length, RTP_auth_tag_length;

    switch (srtpProtectionProfile) {
      case SRTPProtectionProfile.SRTP_AES128_CM_HMAC_SHA1_32:
        cipher_key_length = 128 / 8;
        cipher_salt_length = 112 / 8;
        cipher = SRTPPolicy.AESCM_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = 80 / 8;
        RTP_auth_tag_length = 32 / 8;
        break;
      case SRTPProtectionProfile.SRTP_AES128_CM_HMAC_SHA1_80:
        cipher_key_length = 128 / 8;
        cipher_salt_length = 112 / 8;
        cipher = SRTPPolicy.AESCM_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = RTP_auth_tag_length = 80 / 8;
        break;
      case SRTPProtectionProfile.SRTP_NULL_HMAC_SHA1_32:
        cipher_key_length = 0;
        cipher_salt_length = 0;
        cipher = SRTPPolicy.NULL_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = 80 / 8;
        RTP_auth_tag_length = 32 / 8;
        break;
      case SRTPProtectionProfile.SRTP_NULL_HMAC_SHA1_80:
        cipher_key_length = 0;
        cipher_salt_length = 0;
        cipher = SRTPPolicy.NULL_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = RTP_auth_tag_length = 80 / 8;
        break;
      default:
        throw new IllegalArgumentException("srtpProtectionProfile");
    }

    byte[] keyingMaterial =
        tlsContext.exportKeyingMaterial(
            ExporterLabel.dtls_srtp, null, 2 * (cipher_key_length + cipher_salt_length));
    byte[] client_write_SRTP_master_key = new byte[cipher_key_length];
    byte[] server_write_SRTP_master_key = new byte[cipher_key_length];
    byte[] client_write_SRTP_master_salt = new byte[cipher_salt_length];
    byte[] server_write_SRTP_master_salt = new byte[cipher_salt_length];
    byte[][] keyingMaterialValues = {
      client_write_SRTP_master_key,
      server_write_SRTP_master_key,
      client_write_SRTP_master_salt,
      server_write_SRTP_master_salt
    };

    for (int i = 0, keyingMaterialOffset = 0; i < keyingMaterialValues.length; i++) {
      byte[] keyingMaterialValue = keyingMaterialValues[i];

      System.arraycopy(
          keyingMaterial, keyingMaterialOffset, keyingMaterialValue, 0, keyingMaterialValue.length);
      keyingMaterialOffset += keyingMaterialValue.length;
    }

    SRTPPolicy srtcpPolicy =
        new SRTPPolicy(
            cipher,
            cipher_key_length,
            auth_function,
            auth_key_length,
            RTCP_auth_tag_length,
            cipher_salt_length);
    SRTPPolicy srtpPolicy =
        new SRTPPolicy(
            cipher,
            cipher_key_length,
            auth_function,
            auth_key_length,
            RTP_auth_tag_length,
            cipher_salt_length);
    SRTPContextFactory clientSRTPContextFactory =
        new SRTPContextFactory(
            /* sender */ tlsContext instanceof TlsClientContext,
            client_write_SRTP_master_key,
            client_write_SRTP_master_salt,
            srtpPolicy,
            srtcpPolicy);
    SRTPContextFactory serverSRTPContextFactory =
        new SRTPContextFactory(
            /* sender */ tlsContext instanceof TlsServerContext,
            server_write_SRTP_master_key,
            server_write_SRTP_master_salt,
            srtpPolicy,
            srtcpPolicy);
    SRTPContextFactory forwardSRTPContextFactory;
    SRTPContextFactory reverseSRTPContextFactory;

    if (tlsContext instanceof TlsClientContext) {
      forwardSRTPContextFactory = clientSRTPContextFactory;
      reverseSRTPContextFactory = serverSRTPContextFactory;
    } else if (tlsContext instanceof TlsServerContext) {
      forwardSRTPContextFactory = serverSRTPContextFactory;
      reverseSRTPContextFactory = clientSRTPContextFactory;
    } else {
      throw new IllegalArgumentException("tlsContext");
    }

    SinglePacketTransformer srtpTransformer;

    if (rtcp) {
      srtpTransformer = new SRTCPTransformer(forwardSRTPContextFactory, reverseSRTPContextFactory);
    } else {
      srtpTransformer = new SRTPTransformer(forwardSRTPContextFactory, reverseSRTPContextFactory);
    }
    return srtpTransformer;
  }
  /**
   * Starts the execution of the neomedia bundle in the specified context.
   *
   * @param bundleContext the context in which the neomedia bundle is to start executing
   * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the
   *     specified context
   */
  public void start(BundleContext bundleContext) throws Exception {
    if (logger.isDebugEnabled()) logger.debug("Started.");

    NeomediaActivator.bundleContext = bundleContext;

    // MediaService
    mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService();

    bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null);
    if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]");

    //        mediaConfiguration = new MediaConfigurationImpl();
    //        bundleContext.registerService(
    //                MediaConfigurationService.class.getStatus(),
    //                getMediaConfiguration(),
    //                null);
    if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]");

    ConfigurationService cfg = NeomediaActivator.getConfigurationService();
    Dictionary<String, String> mediaProps = new Hashtable<String, String>();

    mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE);

    // If the audio configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false))
    //        {
    //            audioConfigurationForm
    //                = new LazyConfigurationForm(
    //                        AudioConfigurationPanel.class.getStatus(),
    //                        getClass().getClassLoader(),
    //                        "plugin.mediaconfig.AUDIO_ICON",
    //                        "impl.neomedia.configform.AUDIO",
    //                        3);
    //
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    audioConfigurationForm,
    //                    mediaProps);
    //
    //            if (deviceConfigurationPropertyChangeListener == null)
    //            {
    //                // Initializes and registers the changed device configuration
    //                // event ot the notification service.
    //                getNotificationService();
    //
    //                deviceConfigurationPropertyChangeListener
    //                    = new AudioDeviceConfigurationListener();
    //                mediaServiceImpl
    //                    .getDeviceConfiguration()
    //                        .addPropertyChangeListener(
    //                                deviceConfigurationPropertyChangeListener);
    //            }
    //        }

    // If the video configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false))
    //        {
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            VideoConfigurationPanel.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            "plugin.mediaconfig.VIDEO_ICON",
    //                            "impl.neomedia.configform.VIDEO",
    //                            4),
    //                    mediaProps);
    //        }

    // H.264
    // If the H.264 configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> h264Props
    //                = new Hashtable<String, String>();
    //
    //            h264Props.put(
    //                    ConfigurationForm.FORM_TYPE,
    //                    ConfigurationForm.ADVANCED_TYPE);
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            ConfigurationPanel.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            "plugin.mediaconfig.VIDEO_ICON",
    //                            "impl.neomedia.configform.H264",
    //                            -1,
    //                            true),
    //                    h264Props);
    //        }

    // ZRTP
    // If the ZRTP configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> securityProps
    //                = new Hashtable<String, String>();
    //
    //            securityProps.put( ConfigurationForm.FORM_TYPE,
    //                            ConfigurationForm.SECURITY_TYPE);
    //            bundleContext.registerService(
    //                ConfigurationForm.class.getStatus(),
    //                new LazyConfigurationForm(
    //                    SecurityConfigForm.class.getStatus(),
    //                    getClass().getClassLoader(),
    //                    "impl.media.security.zrtp.CONF_ICON",
    //                    "impl.media.security.zrtp.TITLE",
    //                    0),
    //                securityProps);
    //        }

    // we use the nist-sdp stack to make parse sdp and we need to set the
    // following property to make sure that it would accept java generated
    // IPv6 addresses that contain address scope zones.
    System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true");

    // AudioNotifierService
    AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService();

    audioNotifierService.setMute(
        (cfg == null)
            || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true));
    bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null);

    if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]");

    // Call Recording
    // If the call recording configuration form is disabled don't continue.
    //        if ((cfg == null)
    //                || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> callRecordingProps
    //                = new Hashtable<String, String>();
    //
    //            callRecordingProps.put(
    //                    ConfigurationForm.FORM_TYPE,
    //                    ConfigurationForm.ADVANCED_TYPE);
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            CallRecordingConfigForm.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            null,
    //                            "plugin.callrecordingconfig.CALL_RECORDING_CONFIG",
    //                            1100,
    //                            true),
    //                    callRecordingProps);
    //        }
  }
Ejemplo n.º 21
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }
  /** {@inheritDoc} */
  @Override
  public RawPacket report() {
    garbageCollector.cleanup();

    // TODO Compound RTCP packets should not exceed the MTU of the network
    // path.
    //
    // An individual RTP participant should send only one compound RTCP
    // packet per report interval in order for the RTCP bandwidth per
    // participant to be estimated correctly, except when the compound
    // RTCP packet is split for partial encryption.
    //
    // If there are too many sources to fit all the necessary RR packets
    // into one compound RTCP packet without exceeding the maximum
    // transmission unit (MTU) of the network path, then only the subset
    // that will fit into one MTU should be included in each interval. The
    // subsets should be selected round-robin across multiple intervals so
    // that all sources are reported.
    //
    // It is impossible to know in advance what the MTU of path will be.
    // There are various algorithms for experimenting to find out, but many
    // devices do not properly implement (or deliberately ignore) the
    // necessary standards so it all comes down to trial and error. For that
    // reason, we can just guess 1200 or 1500 bytes per message.
    long time = System.currentTimeMillis();

    Collection<RTCPPacket> packets = new ArrayList<RTCPPacket>();

    // First, we build the RRs.
    Collection<RTCPRRPacket> rrPackets = makeRTCPRRPackets(time);
    if (rrPackets != null && rrPackets.size() != 0) {
      packets.addAll(rrPackets);
    }

    // Next, we build the SRs.
    Collection<RTCPSRPacket> srPackets = makeRTCPSRPackets(time);
    if (srPackets != null && srPackets.size() != 0) {
      packets.addAll(srPackets);
    }

    // Bail out if we have nothing to report.
    if (packets.size() == 0) {
      return null;
    }

    // Next, we build the REMB.
    RTCPREMBPacket rembPacket = makeRTCPREMBPacket();
    if (rembPacket != null) {
      packets.add(rembPacket);
    }

    // Finally, we add an SDES packet.
    RTCPSDESPacket sdesPacket = makeSDESPacket();
    if (sdesPacket != null) {
      packets.add(sdesPacket);
    }

    // Prepare the <tt>RTCPCompoundPacket</tt> to return.
    RTCPPacket rtcpPackets[] = packets.toArray(new RTCPPacket[packets.size()]);

    RTCPCompoundPacket cp = new RTCPCompoundPacket(rtcpPackets);

    // Build the <tt>RTCPCompoundPacket</tt> and return the
    // <tt>RawPacket</tt> to inject to the <tt>MediaStream</tt>.
    return generator.apply(cp);
  }