Exemplo n.º 1
0
  /**
   * Retransmits a packet to {@link #channel}. If the destination supports the RTX format, the
   * packet will be encapsulated in RTX, otherwise, the packet will be retransmitted as-is.
   *
   * @param pkt the packet to retransmit.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  public boolean retransmit(RawPacket pkt, TransformEngine after) {
    boolean destinationSupportsRtx = rtxPayloadType != -1;
    boolean retransmitPlain;

    if (destinationSupportsRtx) {
      long rtxSsrc = getRtxSsrc(pkt);

      if (rtxSsrc == -1) {
        logger.warn(
            "Cannot find SSRC for RTX, retransmitting plain. " + "SSRC=" + pkt.getSSRCAsLong());
        retransmitPlain = true;
      } else {
        retransmitPlain = !encapsulateInRtxAndTransmit(pkt, rtxSsrc, after);
      }
    } else {
      retransmitPlain = true;
    }

    if (retransmitPlain) {
      MediaStream mediaStream = channel.getStream();

      if (mediaStream != null) {
        try {
          mediaStream.injectPacket(pkt, /* data */ true, after);
        } catch (TransmissionFailedException tfe) {
          logger.warn("Failed to retransmit a packet.");
          return false;
        }
      }
    }

    return true;
  }
Exemplo n.º 2
0
  /**
   * Restarts the recording for a specific SSRC.
   *
   * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording).
   */
  private void resetRecording(long ssrc, long timestamp) {
    ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);

    // we only restart audio recordings
    if (receiveStream != null && receiveStream.format instanceof AudioFormat) {
      String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX);

      // flush the buffer contained in the MP3 encoder
      String s = "trying to flush ssrc=" + ssrc;
      Processor p = receiveStream.processor;
      if (p != null) {
        s += " p!=null";
        for (TrackControl tc : p.getTrackControls()) {
          Object o = tc.getControl(FlushableControl.class.getName());
          if (o != null) ((FlushableControl) o).flush();
        }
      }

      if (logger.isInfoEnabled()) {
        logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename);
      }

      receiveStream.dataSink.close();
      receiveStream.dataSink = null;

      // flush the FMJ jitter buffer
      // DataSource ds = receiveStream.receiveStream.getDataSource();
      // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource)
      //    ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush();

      receiveStream.filename = newFilename;
      try {
        receiveStream.dataSink =
            Manager.createDataSink(
                receiveStream.dataSource, new MediaLocator("file:" + newFilename));
      } catch (NoDataSinkException ndse) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse);
        removeReceiveStream(receiveStream, false);
      }

      try {
        receiveStream.dataSink.open();
        receiveStream.dataSink.start();
      } catch (IOException ioe) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe);
        removeReceiveStream(receiveStream, false);
      }

      audioRecordingStarted(ssrc, timestamp);
    }
  }
Exemplo n.º 3
0
  private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) {
    if (receiveStream.format instanceof VideoFormat) {
      rtpConnector.packetBuffer.disable(receiveStream.ssrc);
      emptyPacketBuffer(receiveStream.ssrc);
    }

    if (receiveStream.dataSink != null) {
      try {
        receiveStream.dataSink.stop();
      } catch (IOException e) {
        logger.error("Failed to stop DataSink " + e);
      }

      receiveStream.dataSink.close();
    }

    if (receiveStream.processor != null) {
      receiveStream.processor.stop();
      receiveStream.processor.close();
    }

    DataSource dataSource = receiveStream.receiveStream.getDataSource();
    if (dataSource != null) {
      try {
        dataSource.stop();
      } catch (IOException ioe) {
        logger.warn("Failed to stop DataSource");
      }
      dataSource.disconnect();
    }

    synchronized (receiveStreams) {
      receiveStreams.remove(receiveStream);
    }
  }
Exemplo n.º 4
0
  /**
   * Tries to find an SSRC paired with {@code ssrc} in an FID group in one of the channels from
   * {@link #channel}'s {@code Content}. Returns -1 on failure.
   *
   * @param pkt the {@code RawPacket} that holds the RTP packet for which to find a paired SSRC.
   * @return An SSRC paired with {@code ssrc} in an FID group, or -1.
   */
  private long getRtxSsrc(RawPacket pkt) {
    StreamRTPManager receiveRTPManager =
        channel.getStream().getRTPTranslator().findStreamRTPManagerByReceiveSSRC(pkt.getSSRC());

    MediaStreamTrackReceiver receiver = null;
    if (receiveRTPManager != null) {
      MediaStream receiveStream = receiveRTPManager.getMediaStream();
      if (receiveStream != null) {
        receiver = receiveStream.getMediaStreamTrackReceiver();
      }
    }

    if (receiver == null) {
      return -1;
    }

    RTPEncoding encoding = receiver.resolveRTPEncoding(pkt);
    if (encoding == null) {
      logger.warn(
          "encoding_not_found"
              + ",stream_hash="
              + channel.getStream().hashCode()
              + " ssrc="
              + pkt.getSSRCAsLong());
      return -1;
    }

    return encoding.getRTXSSRC();
  }
Exemplo n.º 5
0
  /**
   * Removes the RTX encapsulation from a packet.
   *
   * @param pkt the packet to remove the RTX encapsulation from.
   * @return the original media packet represented by {@code pkt}, or null if we couldn't
   *     reconstruct the original packet.
   */
  private RawPacket deRtx(RawPacket pkt) {
    boolean success = false;

    if (pkt.getPayloadLength() - pkt.getPaddingSize() < 2) {
      // We need at least 2 bytes to read the OSN field.
      if (logger.isDebugEnabled()) {
        logger.debug("Dropping an incoming RTX packet with padding only: " + pkt);
      }
      return null;
    }

    long mediaSsrc = getPrimarySsrc(pkt);
    if (mediaSsrc != -1) {
      if (rtxAssociatedPayloadType != -1) {
        int osn = pkt.getOriginalSequenceNumber();
        // Remove the RTX header by moving the RTP header two bytes
        // right.
        byte[] buf = pkt.getBuffer();
        int off = pkt.getOffset();
        System.arraycopy(buf, off, buf, off + 2, pkt.getHeaderLength());

        pkt.setOffset(off + 2);
        pkt.setLength(pkt.getLength() - 2);

        pkt.setSSRC((int) mediaSsrc);
        pkt.setSequenceNumber(osn);
        pkt.setPayloadType(rtxAssociatedPayloadType);
        success = true;
      } else {
        logger.warn(
            "RTX packet received, but no APT is defined. Packet "
                + "SSRC "
                + pkt.getSSRCAsLong()
                + ", associated media"
                + " SSRC "
                + mediaSsrc);
      }
    }

    // If we failed to handle the RTX packet, drop it.
    return success ? pkt : null;
  }
Exemplo n.º 6
0
  /**
   * Encapsulates {@code pkt} in the RTX format, using {@code rtxSsrc} as its SSRC, and transmits it
   * to {@link #channel} by injecting it in the {@code MediaStream}.
   *
   * @param pkt the packet to transmit.
   * @param rtxSsrc the SSRC for the RTX stream.
   * @param after the {@code TransformEngine} in the chain of {@code TransformEngine}s of the
   *     associated {@code MediaStream} after which the injection of {@code pkt} is to begin
   * @return {@code true} if the packet was successfully retransmitted, {@code false} otherwise.
   */
  private boolean encapsulateInRtxAndTransmit(RawPacket pkt, long rtxSsrc, TransformEngine after) {
    byte[] buf = pkt.getBuffer();
    int len = pkt.getLength();
    int off = pkt.getOffset();

    byte[] newBuf = new byte[len + 2];
    RawPacket rtxPkt = new RawPacket(newBuf, 0, len + 2);

    int osn = pkt.getSequenceNumber();
    int headerLength = pkt.getHeaderLength();
    int payloadLength = pkt.getPayloadLength();

    // Copy the header.
    System.arraycopy(buf, off, newBuf, 0, headerLength);

    // Set the OSN field.
    newBuf[headerLength] = (byte) ((osn >> 8) & 0xff);
    newBuf[headerLength + 1] = (byte) (osn & 0xff);

    // Copy the payload.
    System.arraycopy(buf, off + headerLength, newBuf, headerLength + 2, payloadLength);

    MediaStream mediaStream = channel.getStream();
    if (mediaStream != null) {
      rtxPkt.setSSRC((int) rtxSsrc);
      rtxPkt.setPayloadType(rtxPayloadType);
      // Only call getNextRtxSequenceNumber() when we're sure we're going
      // to transmit a packet, because it consumes a sequence number.
      rtxPkt.setSequenceNumber(getNextRtxSequenceNumber(rtxSsrc));
      try {
        mediaStream.injectPacket(rtxPkt, /* data */ true, after);
      } catch (TransmissionFailedException tfe) {
        logger.warn("Failed to transmit an RTX packet.");
        return false;
      }
    }

    return true;
  }
Exemplo n.º 7
0
  /** {@inheritDoc} */
  @Override
  protected int doProcess(Buffer inBuffer, Buffer outBuffer) {
    byte[] inData = (byte[]) inBuffer.getData();
    int inOffset = inBuffer.getOffset();

    if (!VP8PayloadDescriptor.isValid(inData, inOffset)) {
      logger.warn("Invalid RTP/VP8 packet discarded.");
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK?
    }

    long inSeq = inBuffer.getSequenceNumber();
    long inRtpTimestamp = inBuffer.getRtpTimeStamp();
    int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset);
    boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0;
    boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset);
    int inLength = inBuffer.getLength();
    int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset);
    int inPayloadLength = inLength - inPdSize;

    if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) {
      if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    if (!empty) {
      // if the incoming packet has a different PictureID or timestamp
      // than those of the current frame, then it belongs to a different
      // frame.
      if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId)
          | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) {
        if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq
        {
          // the packet belongs to a previous frame. discard it
          if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq);
          outBuffer.setDiscard(true);
          return BUFFER_PROCESSED_OK;
        } else // inSeq > firstSeq (and also presumably isSeq > lastSeq)
        {
          // the packet belongs to a subsequent frame (to the one
          // currently being held). Drop the current frame.

          if (logger.isInfoEnabled())
            logger.info(
                "Discarding saved packets on arrival of"
                    + " a packet for a subsequent frame: "
                    + inSeq);

          // TODO: this would be the place to complain about the
          // not-well-received PictureID by sending a RTCP SLI or NACK.
          reinit();
        }
      }
    }

    // a whole frame in a single packet. avoid the extra copy to
    // this.data and output it immediately.
    if (empty && inMarker && inIsStartOfFrame) {
      byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false);
      System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength);
      outBuffer.setOffset(0);
      outBuffer.setLength(inPayloadLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);

      lastSentSeq = inSeq;

      return BUFFER_PROCESSED_OK;
    }

    // add to this.data
    Container container = free.poll();
    if (container == null) container = new Container();
    if (container.buf == null || container.buf.length < inPayloadLength)
      container.buf = new byte[inPayloadLength];

    if (data.get(inSeq) != null) {
      if (logger.isInfoEnabled())
        logger.info("(Probable) duplicate packet detected, discarding " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength);
    container.len = inPayloadLength;
    data.put(inSeq, container);

    // update fields
    frameLength += inPayloadLength;
    if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq;
    if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq;

    if (empty) {
      // the first received packet for the current frame was just added
      empty = false;
      timestamp = inRtpTimestamp;
      pictureId = inPictureId;
    }

    if (inMarker) haveEnd = true;
    if (inIsStartOfFrame) haveStart = true;

    // check if we have a full frame
    if (frameComplete()) {
      byte[] outData = validateByteArraySize(outBuffer, frameLength, false);
      int ptr = 0;
      Container b;
      for (Map.Entry<Long, Container> entry : data.entrySet()) {
        b = entry.getValue();
        System.arraycopy(b.buf, 0, outData, ptr, b.len);
        ptr += b.len;
      }

      outBuffer.setOffset(0);
      outBuffer.setLength(frameLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);
      lastSentSeq = lastSeq;

      // prepare for the next frame
      reinit();

      return BUFFER_PROCESSED_OK;
    } else {
      // frame not complete yet
      outBuffer.setDiscard(true);
      return OUTPUT_BUFFER_NOT_FILLED;
    }
  }
Exemplo n.º 8
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }
Exemplo n.º 9
0
  /**
   * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}.
   *
   * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s.
   */
  @Override
  public void update(ReceiveStreamEvent event) {
    if (event == null) return;
    ReceiveStream receiveStream = event.getReceiveStream();

    if (event instanceof NewReceiveStreamEvent) {
      if (receiveStream == null) {
        logger.warn("NewReceiveStreamEvent: null");
        return;
      }

      final long ssrc = getReceiveStreamSSRC(receiveStream);

      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc);

      if (receiveStreamDesc != null) {
        String s = "NewReceiveStreamEvent for an existing SSRC. ";
        if (receiveStream != receiveStreamDesc.receiveStream)
          s += "(but different ReceiveStream object)";
        logger.warn(s);
        return;
      } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream);

      if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc);

      // Find the format of the ReceiveStream
      DataSource dataSource = receiveStream.getDataSource();
      if (dataSource instanceof PushBufferDataSource) {
        Format format = null;
        PushBufferDataSource pbds = (PushBufferDataSource) dataSource;
        for (PushBufferStream pbs : pbds.getStreams()) {
          if ((format = pbs.getFormat()) != null) break;
        }

        if (format == null) {
          logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format");
          return;
        }

        receiveStreamDesc.format = format;
      } else {
        logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource");
        return;
      }

      int rtpClockRate = -1;
      if (receiveStreamDesc.format instanceof AudioFormat)
        rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate();
      else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000;
      getSynchronizer().setRtpClockRate(ssrc, rtpClockRate);

      // create a Processor and configure it
      Processor processor = null;
      try {
        processor = Manager.createProcessor(receiveStream.getDataSource());
      } catch (NoProcessorException npe) {
        logger.error("Failed to create Processor: ", npe);
        return;
      } catch (IOException ioe) {
        logger.error("Failed to create Processor: ", ioe);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc);

      processor.addControllerListener(this);
      receiveStreamDesc.processor = processor;

      final int streamCount;
      synchronized (receiveStreams) {
        receiveStreams.add(receiveStreamDesc);
        streamCount = receiveStreams.size();
      }

      /*
       * XXX TODO IRBABOON
       * This is a terrible hack which works around a failure to realize()
       * some of the Processor-s for audio streams, when multiple streams
       * start nearly simultaneously. The cause of the problem is currently
       * unknown (and synchronizing all FMJ calls in RecorderRtpImpl
       * does not help).
       * XXX TODO NOOBABRI
       */
      if (receiveStreamDesc.format instanceof AudioFormat) {
        final Processor p = processor;
        new Thread() {
          @Override
          public void run() {
            // delay configuring the processors for the different
            // audio streams to decrease the probability that they
            // run together.
            try {
              int ms = 450 * (streamCount - 1);
              logger.warn(
                  "Sleeping for "
                      + ms
                      + "ms before"
                      + " configuring processor for SSRC="
                      + ssrc
                      + " "
                      + System.currentTimeMillis());
              Thread.sleep(ms);
            } catch (Exception e) {
            }

            p.configure();
          }
        }.run();
      } else {
        processor.configure();
      }
    } else if (event instanceof TimeoutEvent) {
      if (receiveStream == null) {
        // TODO: we might want to get the list of ReceiveStream-s from
        // rtpManager and compare it to our list, to see if we should
        // remove a stream.
        logger.warn("TimeoutEvent: null.");
        return;
      }

      // FMJ silently creates new ReceiveStream instances, so we have to
      // recognize them by the SSRC.
      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream));
      if (receiveStreamDesc != null) {
        if (logger.isInfoEnabled()) {
          logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc);
        }

        removeReceiveStream(receiveStreamDesc, true);
      }
    } else if (event != null && logger.isInfoEnabled()) {
      logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event);
    }
  }