示例#1
0
  /** {@inheritDoc} */
  public long getLocalTime(long ssrc, long rtp0) {
    // don't use getSSRCDesc, because we don't want to create an instance
    SSRCDesc ssrcDesc = ssrcs.get(ssrc);
    if (ssrcDesc == null) {
      return -1;
    }

    // get all required times
    long clockRate; // the clock rate for the RTP clock for the given SSRC
    long rtp1; // some time X in the RTP clock for the given SSRC
    double ntp1; // the same time X in the source's wallclock
    String endpointId;
    synchronized (ssrcDesc) {
      clockRate = ssrcDesc.clockRate;
      rtp1 = ssrcDesc.rtpTime;
      ntp1 = ssrcDesc.ntpTime;
      endpointId = ssrcDesc.endpointId;
    }

    // if something is missing, we can't calculate the time
    if (clockRate == -1 || rtp1 == -1 || ntp1 == -1.0 || endpointId == null) {
      return -1;
    }

    Endpoint endpoint = endpoints.get(ssrcDesc.endpointId);
    if (endpoint == null) {
      return -1;
    }

    double ntp2; // some time Y in the source's wallclock (same clock as for ntp1)
    long local2; // the same time Y in the local clock
    synchronized (endpoint) {
      ntp2 = endpoint.ntpTime;
      local2 = endpoint.localTime;
    }

    if (ntp2 == -1.0 || local2 == -1) {
      return -1;
    }

    // crunch the numbers. we're looking for 'local0',
    // the local time corresponding to 'rtp0'
    long local0;

    double diff1S = ntp1 - ntp2;
    double diff2S = ((double) TimeUtils.rtpDiff(rtp0, rtp1)) / clockRate;

    long diffMs = Math.round((diff1S + diff2S) * 1000);

    local0 = local2 + diffMs;

    return local0;
  }
  /**
   * Returns a <tt>SimulcastLayer</tt> that is the closest match to the target order, or null if
   * simulcast hasn't been configured for this receiver.
   *
   * @param targetOrder the simulcast layer target order.
   * @return a <tt>SimulcastLayer</tt> that is the closest match to the target order, or null.
   */
  public SimulcastLayer getSimulcastLayer(int targetOrder) {
    SimulcastLayer[] layers = getSimulcastLayers();
    if (layers == null || layers.length == 0) {
      return null;
    }

    // Iterate through the simulcast layers that we own and return the one
    // that matches best the targetOrder parameter.
    SimulcastLayer next = layers[0];
    for (int i = 1; i < Math.min(targetOrder + 1, layers.length); i++) {
      if (!layers[i].isStreaming()) {
        break;
      }

      next = layers[i];
    }

    return next;
  }
    /**
     * Opens a connection to the media source of the associated <tt>DataSource</tt>.
     *
     * @throws IOException if anything goes wrong while opening a connection to the media source of
     *     the associated <tt>DataSource</tt>
     */
    public synchronized void connect() throws IOException {
      javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) getFormat();
      int channels = af.getChannels();
      int channelConfig;

      switch (channels) {
        case Format.NOT_SPECIFIED:
        case 1:
          channelConfig = AudioFormat.CHANNEL_IN_MONO;
          break;
        case 2:
          channelConfig = AudioFormat.CHANNEL_IN_STEREO;
          break;
        default:
          throw new IOException("channels");
      }

      int sampleSizeInBits = af.getSampleSizeInBits();
      int audioFormat;

      switch (sampleSizeInBits) {
        case 8:
          audioFormat = AudioFormat.ENCODING_PCM_8BIT;
          break;
        case 16:
          audioFormat = AudioFormat.ENCODING_PCM_16BIT;
          break;
        default:
          throw new IOException("sampleSizeInBits");
      }

      double sampleRate = af.getSampleRate();

      length =
          (int)
              Math.round(
                  20 /* milliseconds */ * (sampleRate / 1000) * channels * (sampleSizeInBits / 8));

      /*
       * Apart from the thread in which #read(Buffer) is executed, use the
       * thread priority for the thread which will create the AudioRecord.
       */
      setThreadPriority();
      try {
        int minBufferSize =
            AudioRecord.getMinBufferSize((int) sampleRate, channelConfig, audioFormat);

        audioRecord =
            new AudioRecord(
                MediaRecorder.AudioSource.DEFAULT,
                (int) sampleRate,
                channelConfig,
                audioFormat,
                Math.max(length, minBufferSize));

        // tries to configure audio effects if available
        configureEffects();
      } catch (IllegalArgumentException iae) {
        IOException ioe = new IOException();

        ioe.initCause(iae);
        throw ioe;
      }

      setThreadPriority = true;
    }
    /**
     * Inspect an <tt>RTCPCompoundPacket</tt> and build-up the state for future estimations.
     *
     * @param pkt
     */
    public void apply(RTCPCompoundPacket pkt) {
      if (pkt == null || pkt.packets == null || pkt.packets.length == 0) {
        return;
      }

      for (RTCPPacket rtcpPacket : pkt.packets) {
        switch (rtcpPacket.type) {
          case RTCPPacket.SR:
            RTCPSRPacket srPacket = (RTCPSRPacket) rtcpPacket;

            // The media sender SSRC.
            int ssrc = srPacket.ssrc;

            // Convert 64-bit NTP timestamp to Java standard time.
            // Note that java time (milliseconds) by definition has
            // less precision then NTP time (picoseconds) so
            // converting NTP timestamp to java time and back to NTP
            // timestamp loses precision. For example, Tue, Dec 17
            // 2002 09:07:24.810 EST is represented by a single
            // Java-based time value of f22cd1fc8a, but its NTP
            // equivalent are all values ranging from
            // c1a9ae1c.cf5c28f5 to c1a9ae1c.cf9db22c.

            // Use round-off on fractional part to preserve going to
            // lower precision
            long fraction = Math.round(1000D * srPacket.ntptimestamplsw / 0x100000000L);
            /*
             * If the most significant bit (MSB) on the seconds
             * field is set we use a different time base. The
             * following text is a quote from RFC-2030 (SNTP v4):
             *
             * If bit 0 is set, the UTC time is in the range
             * 1968-2036 and UTC time is reckoned from 0h 0m 0s UTC
             * on 1 January 1900. If bit 0 is not set, the time is
             * in the range 2036-2104 and UTC time is reckoned from
             * 6h 28m 16s UTC on 7 February 2036.
             */
            long msb = srPacket.ntptimestampmsw & 0x80000000L;
            long remoteTime =
                (msb == 0)
                    // use base: 7-Feb-2036 @ 06:28:16 UTC
                    ? msb0baseTime + (srPacket.ntptimestampmsw * 1000) + fraction
                    // use base: 1-Jan-1900 @ 01:00:00 UTC
                    : msb1baseTime + (srPacket.ntptimestampmsw * 1000) + fraction;

            // Estimate the clock rate of the sender.
            int frequencyHz = -1;
            if (receivedClocks.containsKey(ssrc)) {
              // Calculate the clock rate.
              ReceivedRemoteClock oldStats = receivedClocks.get(ssrc);
              RemoteClock oldRemoteClock = oldStats.getRemoteClock();
              frequencyHz =
                  Math.round(
                      (float)
                              (((int) srPacket.rtptimestamp - oldRemoteClock.getRtpTimestamp())
                                  & 0xffffffffl)
                          / (remoteTime - oldRemoteClock.getRemoteTime()));
            }

            // Replace whatever was in there before.
            receivedClocks.put(
                ssrc,
                new ReceivedRemoteClock(
                    ssrc, remoteTime, (int) srPacket.rtptimestamp, frequencyHz));
            break;
          case RTCPPacket.SDES:
            break;
        }
      }
    }