/**
   * Gets the <tt>MediaFormat</tt>s among the specified <tt>mediaFormats</tt> which have the
   * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>.
   *
   * @param mediaFormats the <tt>MediaFormat</tt>s from which to filter out only the ones which have
   *     the specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>
   * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved
   * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link
   *     #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable
   * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s among <tt>mediaFormats</tt> which have the
   *     specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>
   */
  private List<MediaFormat> getMatchingMediaFormats(
      MediaFormat[] mediaFormats, String encoding, double clockRate) {
    /*
     * XXX Use String#equalsIgnoreCase(String) because some clients transmit
     * some of the codecs starting with capital letters.
     */

    /*
     * As per RFC 3551.4.5.2, because of a mistake in RFC 1890 and for
     * backward compatibility, G.722 should always be announced as 8000 even
     * though it is wideband. So, if someone is looking for G722/16000,
     * then: Forgive them, for they know not what they do!
     */
    if ("G722".equalsIgnoreCase(encoding) && (16000 == clockRate)) {
      clockRate = 8000;
      if (logger.isInfoEnabled()) logger.info("Suppressing erroneous 16000 announcement for G.722");
    }

    List<MediaFormat> supportedMediaFormats = new ArrayList<MediaFormat>();

    for (MediaFormat mediaFormat : mediaFormats) {
      if (mediaFormat.getEncoding().equalsIgnoreCase(encoding)
          && ((CLOCK_RATE_NOT_SPECIFIED == clockRate)
              || (mediaFormat.getClockRate() == clockRate))) {
        supportedMediaFormats.add(mediaFormat);
      }
    }
    return supportedMediaFormats;
  }
  /**
   * Notifies this {@code SimulcastReceiver} that a specific {@code SimulcastReceiver} has detected
   * the start of a new video frame in the RTP stream that it represents. Determines whether any of
   * {@link #simulcastLayers} other than {@code source} have been paused/stopped by the remote peer.
   * The determination is based on counting (video) frames.
   *
   * @param source the {@code SimulcastLayer} which is the source of the event i.e. which has
   *     detected the start of a new video frame in the RTP stream that it represents
   * @param pkt the {@code RawPacket} which was received by {@code source} and possibly influenced
   *     the decision that a new view frame was started in the RTP stream represented by {@code
   *     source}
   * @param layers the set of {@code SimulcastLayer}s managed by this {@code SimulcastReceiver}.
   *     Explicitly provided to the method in order to avoid invocations of {@link
   *     #getSimulcastLayers()} because the latter makes a copy at the time of this writing.
   */
  private void simulcastLayerFrameStarted(
      SimulcastLayer source, RawPacket pkt, SimulcastLayer[] layers) {
    // Allow the value of the constant TIMEOUT_ON_FRAME_COUNT to disable (at
    // compile time) the frame-based approach to the detection of layer
    // drops.
    if (TIMEOUT_ON_FRAME_COUNT <= 1) return;

    // Timeouts in layers caused by source may occur only based on the span
    // (of time or received frames) during which source has received
    // TIMEOUT_ON_FRAME_COUNT number of frames. The current method
    // invocation signals the receipt of 1 frame by source.
    int indexOfLastSourceOccurrenceInHistory = -1;
    int sourceFrameCount = 0;
    int ix = 0;

    for (Iterator<SimulcastLayer> it = simulcastLayerFrameHistory.iterator(); it.hasNext(); ++ix) {
      if (it.next() == source) {
        if (indexOfLastSourceOccurrenceInHistory != -1) {
          // Prune simulcastLayerFrameHistory so that it does not
          // become unnecessarily long.
          it.remove();
        } else if (++sourceFrameCount >= TIMEOUT_ON_FRAME_COUNT - 1) {
          // The span of TIMEOUT_ON_FRAME_COUNT number of frames
          // received by source only is to be examined for the
          // purposes of timeouts. The current method invocations
          // signals the receipt of 1 frame by source so
          // TIMEOUT_ON_FRAME_COUNT - 1 occurrences of source in
          // simulcastLayerFrameHistory is enough.
          indexOfLastSourceOccurrenceInHistory = ix;
        }
      }
    }

    if (indexOfLastSourceOccurrenceInHistory != -1) {
      // Presumably, if a SimulcastLayer is active, all SimulcastLayers
      // before it (according to SimulcastLayer's order) are active as
      // well. Consequently, timeouts may occur in SimulcastLayers which
      // are after source.
      boolean maybeTimeout = false;

      for (SimulcastLayer layer : layers) {
        if (maybeTimeout) {
          // There's no point in timing layer out if it's timed out
          // already.
          if (layer.isStreaming()) {
            maybeTimeout(source, pkt, layer, indexOfLastSourceOccurrenceInHistory);
          }
        } else if (layer == source) {
          maybeTimeout = true;
        }
      }
    }

    // As previously stated, the current method invocation signals the
    // receipt of 1 frame by source.
    simulcastLayerFrameHistory.add(0, source);
    // TODO Prune simulcastLayerFrameHistory by forgetting so that it does
    // not become too long.
  }
Ejemplo n.º 3
0
 /**
  * Adds <tt>WebRtcDataStreamListener</tt> to the list of listeners.
  *
  * @param listener the <tt>WebRtcDataStreamListener</tt> to be added to the listeners list.
  */
 public void addChannelListener(WebRtcDataStreamListener listener) {
   if (listener == null) {
     throw new NullPointerException("listener");
   } else {
     synchronized (listeners) {
       if (!listeners.contains(listener)) {
         listeners.add(listener);
       }
     }
   }
 }
Ejemplo n.º 4
0
  /**
   * Gets the <tt>WebRtcDataStreamListener</tt>s added to this instance.
   *
   * @return the <tt>WebRtcDataStreamListener</tt>s added to this instance or <tt>null</tt> if there
   *     are no <tt>WebRtcDataStreamListener</tt>s added to this instance
   */
  private WebRtcDataStreamListener[] getChannelListeners() {
    WebRtcDataStreamListener[] ls;

    synchronized (listeners) {
      if (listeners.isEmpty()) {
        ls = null;
      } else {
        ls = listeners.toArray(new WebRtcDataStreamListener[listeners.size()]);
      }
    }
    return ls;
  }
  /**
   * Gets the <tt>MediaFormat</tt>s supported by this <tt>MediaFormatFactory</tt> and the
   * <tt>MediaService</tt> associated with it and having the specified <tt>encoding</tt> and,
   * optionally, <tt>clockRate</tt>.
   *
   * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved
   * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link
   *     #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable
   * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s supported by the <tt>MediaService</tt>
   *     associated with this <tt>MediaFormatFactory</tt> and having the specified encoding and,
   *     optionally, clock rate
   */
  private List<MediaFormat> getSupportedMediaFormats(String encoding, double clockRate) {
    EncodingConfiguration encodingConfiguration =
        NeomediaServiceUtils.getMediaServiceImpl().getCurrentEncodingConfiguration();
    List<MediaFormat> supportedMediaFormats =
        getMatchingMediaFormats(
            encodingConfiguration.getAllEncodings(MediaType.AUDIO), encoding, clockRate);

    if (supportedMediaFormats.isEmpty())
      supportedMediaFormats =
          getMatchingMediaFormats(
              encodingConfiguration.getAllEncodings(MediaType.VIDEO), encoding, clockRate);
    return supportedMediaFormats;
  }
Ejemplo n.º 6
0
 /**
  * Removes <tt>WebRtcDataStreamListener</tt> from the list of listeners.
  *
  * @param listener the <tt>WebRtcDataStreamListener</tt> to be removed from the listeners list.
  */
 public void removeChannelListener(WebRtcDataStreamListener listener) {
   if (listener != null) {
     synchronized (listeners) {
       listeners.remove(listener);
     }
   }
 }
  /**
   * Determines whether {@code effect} has been paused/stopped by the remote peer. The determination
   * is based on counting frames and is triggered by the receipt of (a piece of) a new (video) frame
   * by {@code cause}.
   *
   * @param cause the {@code SimulcastLayer} which has received (a piece of) a new (video) frame and
   *     has thus triggered a check on {@code effect}
   * @param pkt the {@code RawPacket} which was received by {@code cause} and possibly influenced
   *     the decision to trigger a check on {@code effect}
   * @param effect the {@code SimulcastLayer} which is to be checked whether it looks like it has
   *     been paused/stopped by the remote peer
   * @param endIndexInSimulcastLayerFrameHistory
   */
  private void maybeTimeout(
      SimulcastLayer cause,
      RawPacket pkt,
      SimulcastLayer effect,
      int endIndexInSimulcastLayerFrameHistory) {
    Iterator<SimulcastLayer> it = simulcastLayerFrameHistory.iterator();
    boolean timeout = true;

    for (int ix = 0; it.hasNext() && ix < endIndexInSimulcastLayerFrameHistory; ++ix) {
      if (it.next() == effect) {
        timeout = false;
        break;
      }
    }
    if (timeout) {
      effect.maybeTimeout(pkt);

      if (!effect.isStreaming()) {
        // Since effect has been determined to have been paused/stopped
        // by the remote peer, its possible presence in
        // simulcastLayerFrameHistory is irrelevant now. In other words,
        // remove effect from simulcastLayerFrameHistory.
        while (it.hasNext()) {
          if (it.next() == effect) it.remove();
        }
      }
    }
  }