예제 #1
1
  /**
   * {@inheritDoc}
   *
   * @param format unused, since this implementation records multiple streams using potentially
   *     different formats.
   * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the
   *     recorded media files.
   */
  @Override
  public void start(String format, String dirname) throws IOException, MediaException {
    if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode());
    path = dirname;

    MediaService mediaService = LibJitsi.getMediaService();

    /*
     * Note that we use only one RTPConnector for both the RTPTranslator
     * and the RTPManager instances. The this.translator will write to its
     * output streams, and this.rtpManager will read from its input streams.
     */
    rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType);

    rtpManager = RTPManager.newInstance();

    /*
     * Add the formats that we know about.
     */
    rtpManager.addFormat(vp8RtpFormat, vp8PayloadType);
    rtpManager.addFormat(opusFormat, opusPayloadType);
    rtpManager.addReceiveStreamListener(this);

    /*
     * Note: When this.rtpManager sends RTCP sender/receiver reports, they
     * will end up being written to its own input stream. This is not
     * expected to cause problems, but might be something to keep an eye on.
     */
    rtpManager.initialize(rtpConnector);

    /*
     * Register a fake call participant.
     * TODO: can we use a more generic MediaStream here?
     */
    streamRTPManager =
        new StreamRTPManager(
            mediaService.createMediaStream(
                new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)),
            translator);

    streamRTPManager.initialize(rtpConnector);

    rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender();

    translator.addFormat(streamRTPManager, opusFormat, opusPayloadType);

    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat,
    // redPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat,
    // ulpfecPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager,
    // mediaFormatImpl.getFormat(), vp8PayloadType);

    started = true;
  }
예제 #2
0
  /**
   * Handles a specific <tt>IOException</tt> which was thrown during the execution of {@link
   * #runInConnectThread(DTLSProtocol, TlsPeer, DatagramTransport)} while trying to establish a DTLS
   * connection
   *
   * @param ioe the <tt>IOException</tt> to handle
   * @param msg the human-readable message to log about the specified <tt>ioe</tt>
   * @param i the number of tries remaining after the current one
   * @return <tt>true</tt> if the specified <tt>ioe</tt> was successfully handled; <tt>false</tt>,
   *     otherwise
   */
  private boolean handleRunInConnectThreadException(IOException ioe, String msg, int i) {
    // SrtpControl.start(MediaType) starts its associated TransformEngine.
    // We will use that mediaType to signal the normal stop then as well
    // i.e. we will ignore exception after the procedure to stop this
    // PacketTransformer has begun.
    if (mediaType == null) return false;

    if (ioe instanceof TlsFatalAlert) {
      TlsFatalAlert tfa = (TlsFatalAlert) ioe;
      short alertDescription = tfa.getAlertDescription();

      if (alertDescription == AlertDescription.unexpected_message) {
        msg += " Received fatal unexpected message.";
        if (i == 0
            || !Thread.currentThread().equals(connectThread)
            || connector == null
            || mediaType == null) {
          msg += " Giving up after " + (CONNECT_TRIES - i) + " retries.";
        } else {
          msg += " Will retry.";
          logger.error(msg, ioe);

          return true;
        }
      } else {
        msg += " Received fatal alert " + alertDescription + ".";
      }
    }

    logger.error(msg, ioe);
    return false;
  }
예제 #3
0
  /** Implements notification in order to track socket state. */
  @Override
  public synchronized void onSctpNotification(SctpSocket socket, SctpNotification notification) {
    if (logger.isDebugEnabled()) {
      logger.debug("socket=" + socket + "; notification=" + notification);
    }
    switch (notification.sn_type) {
      case SctpNotification.SCTP_ASSOC_CHANGE:
        SctpNotification.AssociationChange assocChange =
            (SctpNotification.AssociationChange) notification;

        switch (assocChange.state) {
          case SctpNotification.AssociationChange.SCTP_COMM_UP:
            if (!assocIsUp) {
              boolean wasReady = isReady();

              assocIsUp = true;
              if (isReady() && !wasReady) notifySctpConnectionReady();
            }
            break;

          case SctpNotification.AssociationChange.SCTP_COMM_LOST:
          case SctpNotification.AssociationChange.SCTP_SHUTDOWN_COMP:
          case SctpNotification.AssociationChange.SCTP_CANT_STR_ASSOC:
            try {
              closeStream();
            } catch (IOException e) {
              logger.error("Error closing SCTP socket", e);
            }
            break;
        }
        break;
    }
  }
예제 #4
0
  private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) {
    if (receiveStream.format instanceof VideoFormat) {
      rtpConnector.packetBuffer.disable(receiveStream.ssrc);
      emptyPacketBuffer(receiveStream.ssrc);
    }

    if (receiveStream.dataSink != null) {
      try {
        receiveStream.dataSink.stop();
      } catch (IOException e) {
        logger.error("Failed to stop DataSink " + e);
      }

      receiveStream.dataSink.close();
    }

    if (receiveStream.processor != null) {
      receiveStream.processor.stop();
      receiveStream.processor.close();
    }

    DataSource dataSource = receiveStream.receiveStream.getDataSource();
    if (dataSource != null) {
      try {
        dataSource.stop();
      } catch (IOException ioe) {
        logger.warn("Failed to stop DataSource");
      }
      dataSource.disconnect();
    }

    synchronized (receiveStreams) {
      receiveStreams.remove(receiveStream);
    }
  }
예제 #5
0
  /**
   * Restarts the recording for a specific SSRC.
   *
   * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording).
   */
  private void resetRecording(long ssrc, long timestamp) {
    ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);

    // we only restart audio recordings
    if (receiveStream != null && receiveStream.format instanceof AudioFormat) {
      String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX);

      // flush the buffer contained in the MP3 encoder
      String s = "trying to flush ssrc=" + ssrc;
      Processor p = receiveStream.processor;
      if (p != null) {
        s += " p!=null";
        for (TrackControl tc : p.getTrackControls()) {
          Object o = tc.getControl(FlushableControl.class.getName());
          if (o != null) ((FlushableControl) o).flush();
        }
      }

      if (logger.isInfoEnabled()) {
        logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename);
      }

      receiveStream.dataSink.close();
      receiveStream.dataSink = null;

      // flush the FMJ jitter buffer
      // DataSource ds = receiveStream.receiveStream.getDataSource();
      // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource)
      //    ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush();

      receiveStream.filename = newFilename;
      try {
        receiveStream.dataSink =
            Manager.createDataSink(
                receiveStream.dataSource, new MediaLocator("file:" + newFilename));
      } catch (NoDataSinkException ndse) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse);
        removeReceiveStream(receiveStream, false);
      }

      try {
        receiveStream.dataSink.open();
        receiveStream.dataSink.start();
      } catch (IOException ioe) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe);
        removeReceiveStream(receiveStream, false);
      }

      audioRecordingStarted(ssrc, timestamp);
    }
  }
예제 #6
0
  /**
   * Starts the configuration service
   *
   * @param bundleContext the <tt>BundleContext</tt> as provided by the OSGi framework.
   * @throws Exception if anything goes wrong
   */
  public void start(BundleContext bundleContext) throws Exception {
    FileAccessService fas = ServiceUtils.getService(bundleContext, FileAccessService.class);

    if (fas != null) {
      File usePropFileConfig;
      try {
        usePropFileConfig =
            fas.getPrivatePersistentFile(".usepropfileconfig", FileCategory.PROFILE);
      } catch (Exception ise) {
        // There is somewhat of a chicken-and-egg dependency between
        // FileConfigurationServiceImpl and ConfigurationServiceImpl:
        // FileConfigurationServiceImpl throws IllegalStateException if
        // certain System properties are not set,
        // ConfigurationServiceImpl will make sure that these properties
        // are set but it will do that later.
        // A SecurityException is thrown when the destination
        // is not writable or we do not have access to that folder
        usePropFileConfig = null;
      }

      if (usePropFileConfig != null && usePropFileConfig.exists()) {
        logger.info("Using properties file configuration store.");
        this.cs = LibJitsi.getConfigurationService();
      }
    }

    if (this.cs == null) {
      this.cs = new JdbcConfigService(fas);
    }

    bundleContext.registerService(ConfigurationService.class.getName(), this.cs, null);

    fixPermissions(this.cs);
  }
예제 #7
0
 /** Stops this <tt>PacketTransformer</tt>. */
 private synchronized void stop() {
   if (connectThread != null) connectThread = null;
   try {
     // The dtlsTransport and _srtpTransformer SHOULD be closed, of
     // course. The datagramTransport MUST be closed.
     if (dtlsTransport != null) {
       try {
         dtlsTransport.close();
       } catch (IOException ioe) {
         logger.error("Failed to (properly) close " + dtlsTransport.getClass(), ioe);
       }
       dtlsTransport = null;
     }
     if (_srtpTransformer != null) {
       _srtpTransformer.close();
       _srtpTransformer = null;
     }
   } finally {
     try {
       closeDatagramTransport();
     } finally {
       notifyAll();
     }
   }
 }
예제 #8
0
  /**
   * Sends acknowledgment for open channel request on given SCTP stream ID.
   *
   * @param sid SCTP stream identifier to be used for sending ack.
   */
  private void sendOpenChannelAck(int sid) throws IOException {
    // Send ACK
    byte[] ack = MSG_CHANNEL_ACK_BYTES;
    int sendAck = sctpSocket.send(ack, true, sid, WEB_RTC_PPID_CTRL);

    if (sendAck != ack.length) {
      logger.error("Failed to send open channel confirmation");
    }
  }
예제 #9
0
  /**
   * {@inheritDoc}
   *
   * <p>SCTP input data callback.
   */
  @Override
  public void onSctpPacket(
      byte[] data, int sid, int ssn, int tsn, long ppid, int context, int flags) {
    if (ppid == WEB_RTC_PPID_CTRL) {
      // Channel control PPID
      try {
        onCtrlPacket(data, sid);
      } catch (IOException e) {
        logger.error("IOException when processing ctrl packet", e);
      }
    } else if (ppid == WEB_RTC_PPID_STRING || ppid == WEB_RTC_PPID_BIN) {
      WebRtcDataStream channel;

      synchronized (this) {
        channel = channels.get(sid);
      }

      if (channel == null) {
        logger.error("No channel found for sid: " + sid);
        return;
      }
      if (ppid == WEB_RTC_PPID_STRING) {
        // WebRTC String
        String str;
        String charsetName = "UTF-8";

        try {
          str = new String(data, charsetName);
        } catch (UnsupportedEncodingException uee) {
          logger.error("Unsupported charset encoding/name " + charsetName, uee);
          str = null;
        }
        channel.onStringMsg(str);
      } else {
        // WebRTC Binary
        channel.onBinaryMsg(data);
      }
    } else {
      logger.warn("Got message on unsupported PPID: " + ppid);
    }
  }
예제 #10
0
 /**
  * Closes {@link #datagramTransport} if it is non-<tt>null</tt> and logs and swallows any
  * <tt>IOException</tt>.
  */
 private void closeDatagramTransport() {
   if (datagramTransport != null) {
     try {
       datagramTransport.close();
     } catch (IOException ioe) {
       // DatagramTransportImpl has no reason to fail because it is
       // merely an adapter of #connector and this PacketTransformer to
       // the terms of the Bouncy Castle Crypto API.
       logger.error("Failed to (properly) close " + datagramTransport.getClass(), ioe);
     }
     datagramTransport = null;
   }
 }
예제 #11
0
  private void emptyPacketBuffer(long ssrc) {
    RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc);
    RTPConnectorImpl.OutputDataStreamImpl dataStream;

    try {
      dataStream = rtpConnector.getDataOutputStream();
    } catch (IOException ioe) {
      logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe);
      return;
    }
    for (RawPacket pkt : pkts)
      dataStream.write(
          pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */);
  }
예제 #12
0
  @Override
  public void stop() {
    if (started) {
      if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode());

      // remove the recorder from the translator (e.g. stop new packets from
      // being written to rtpConnector
      if (streamRTPManager != null) streamRTPManager.dispose();

      HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>();
      synchronized (receiveStreams) {
        streamsToRemove.addAll(receiveStreams);
      }

      for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false);

      rtpConnector.rtcpPacketTransformer.close();
      rtpConnector.rtpPacketTransformer.close();
      rtpManager.dispose();

      started = false;
    }
  }
예제 #13
0
  /**
   * Sends the data contained in a specific byte array as application data through the DTLS
   * connection of this <tt>DtlsPacketTransformer</tt>.
   *
   * @param buf the byte array containing data to send.
   * @param off the offset in <tt>buf</tt> where the data begins.
   * @param len the length of data to send.
   */
  public void sendApplicationData(byte[] buf, int off, int len) {
    DTLSTransport dtlsTransport = this.dtlsTransport;
    Throwable throwable = null;

    if (dtlsTransport != null) {
      try {
        dtlsTransport.send(buf, off, len);
      } catch (IOException ioe) {
        throwable = ioe;
      }
    } else {
      throwable = new NullPointerException("dtlsTransport");
    }
    if (throwable != null) {
      // SrtpControl.start(MediaType) starts its associated
      // TransformEngine. We will use that mediaType to signal the normal
      // stop then as well i.e. we will ignore exception after the
      // procedure to stop this PacketTransformer has begun.
      if (mediaType != null && !tlsPeerHasRaisedCloseNotifyWarning) {
        logger.error("Failed to send application data over DTLS transport: ", throwable);
      }
    }
  }
예제 #14
0
  /**
   * Makes home folder and the configuration file readable and writable only to the owner.
   *
   * @param cs the <tt>ConfigurationService</tt> instance to check for home folder and configuration
   *     file.
   */
  private static void fixPermissions(ConfigurationService cs) {
    if (!OSUtils.IS_LINUX && !OSUtils.IS_MAC) return;

    try {
      // let's check config file and config folder
      File homeFolder = new File(cs.getScHomeDirLocation(), cs.getScHomeDirName());
      Set<PosixFilePermission> perms =
          new HashSet<PosixFilePermission>() {
            {
              add(PosixFilePermission.OWNER_READ);
              add(PosixFilePermission.OWNER_WRITE);
              add(PosixFilePermission.OWNER_EXECUTE);
            }
          };
      Files.setPosixFilePermissions(Paths.get(homeFolder.getAbsolutePath()), perms);

      String fileName = cs.getConfigurationFilename();
      if (fileName != null) {
        File cf = new File(homeFolder, fileName);
        if (cf.exists()) {
          perms =
              new HashSet<PosixFilePermission>() {
                {
                  add(PosixFilePermission.OWNER_READ);
                  add(PosixFilePermission.OWNER_WRITE);
                }
              };
          Files.setPosixFilePermissions(Paths.get(cf.getAbsolutePath()), perms);
        }
      }
    } catch (Throwable t) {
      logger.error("Error creating c lib instance for fixing file permissions", t);

      if (t instanceof InterruptedException) Thread.currentThread().interrupt();
      else if (t instanceof ThreadDeath) throw (ThreadDeath) t;
    }
  }
예제 #15
0
/**
 * Class is a transport layer for WebRTC data channels. It consists of SCTP connection running on
 * top of ICE/DTLS layer. Manages WebRTC data channels. See
 * http://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-08 for more info on WebRTC data
 * channels.
 *
 * <p>Control protocol: http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-03 FIXME handle
 * closing of data channels(SCTP stream reset)
 *
 * @author Pawel Domas
 * @author Lyubomir Marinov
 * @author Boris Grozev
 */
public class SctpConnection extends Channel
    implements SctpDataCallback, SctpSocket.NotificationListener {
  /** Generator used to track debug IDs. */
  private static int debugIdGen = -1;

  /** DTLS transport buffer size. Note: randomly chosen. */
  private static final int DTLS_BUFFER_SIZE = 2048;

  /** Switch used for debugging SCTP traffic purposes. FIXME to be removed */
  private static final boolean LOG_SCTP_PACKETS = false;

  /** The logger */
  private static final Logger logger = Logger.getLogger(SctpConnection.class);

  /**
   * Message type used to acknowledge WebRTC data channel allocation on SCTP stream ID on which
   * <tt>MSG_OPEN_CHANNEL</tt> message arrives.
   */
  private static final int MSG_CHANNEL_ACK = 0x2;

  private static final byte[] MSG_CHANNEL_ACK_BYTES = new byte[] {MSG_CHANNEL_ACK};

  /**
   * Message with this type sent over control PPID in order to open new WebRTC data channel on SCTP
   * stream ID that this message is sent.
   */
  private static final int MSG_OPEN_CHANNEL = 0x3;

  /** SCTP transport buffer size. */
  private static final int SCTP_BUFFER_SIZE = DTLS_BUFFER_SIZE - 13;

  /** The pool of <tt>Thread</tt>s which run <tt>SctpConnection</tt>s. */
  private static final ExecutorService threadPool =
      ExecutorUtils.newCachedThreadPool(true, SctpConnection.class.getName());

  /** Payload protocol id that identifies binary data in WebRTC data channel. */
  static final int WEB_RTC_PPID_BIN = 53;

  /** Payload protocol id for control data. Used for <tt>WebRtcDataStream</tt> allocation. */
  static final int WEB_RTC_PPID_CTRL = 50;

  /** Payload protocol id that identifies text data UTF8 encoded in WebRTC data channels. */
  static final int WEB_RTC_PPID_STRING = 51;

  /**
   * The <tt>String</tt> value of the <tt>Protocol</tt> field of the <tt>DATA_CHANNEL_OPEN</tt>
   * message.
   */
  private static final String WEBRTC_DATA_CHANNEL_PROTOCOL = "http://jitsi.org/protocols/colibri";

  private static synchronized int generateDebugId() {
    debugIdGen += 2;
    return debugIdGen;
  }

  /**
   * Indicates whether the STCP association is ready and has not been ended by a subsequent state
   * change.
   */
  private boolean assocIsUp;

  /** Indicates if we have accepted incoming connection. */
  private boolean acceptedIncomingConnection;

  /** Data channels mapped by SCTP stream identified(sid). */
  private final Map<Integer, WebRtcDataStream> channels = new HashMap<Integer, WebRtcDataStream>();

  /** Debug ID used to distinguish SCTP sockets in packet logs. */
  private final int debugId;

  /**
   * The <tt>AsyncExecutor</tt> which is to asynchronously dispatch the events fired by this
   * instance in order to prevent possible listeners from blocking this <tt>SctpConnection</tt> in
   * general and {@link #sctpSocket} in particular for too long. The timeout of <tt>15</tt> is
   * chosen to be in accord with the time it takes to expire a <tt>Channel</tt>.
   */
  private final AsyncExecutor<Runnable> eventDispatcher =
      new AsyncExecutor<Runnable>(15, TimeUnit.MILLISECONDS);

  /** Datagram socket for ICE/UDP layer. */
  private IceSocketWrapper iceSocket;

  /**
   * List of <tt>WebRtcDataStreamListener</tt>s that will be notified whenever new WebRTC data
   * channel is opened.
   */
  private final List<WebRtcDataStreamListener> listeners =
      new ArrayList<WebRtcDataStreamListener>();

  /** Remote SCTP port. */
  private final int remoteSctpPort;

  /** <tt>SctpSocket</tt> used for SCTP transport. */
  private SctpSocket sctpSocket;

  /**
   * Flag prevents from starting this connection multiple times from {@link #maybeStartStream()}.
   */
  private boolean started;

  /**
   * Initializes a new <tt>SctpConnection</tt> instance.
   *
   * @param id the string identifier of this connection instance
   * @param content the <tt>Content</tt> which is initializing the new instance
   * @param endpoint the <tt>Endpoint</tt> of newly created instance
   * @param remoteSctpPort the SCTP port used by remote peer
   * @param channelBundleId the ID of the channel-bundle this <tt>SctpConnection</tt> is to be a
   *     part of (or <tt>null</tt> if no it is not to be a part of a channel-bundle).
   * @throws Exception if an error occurs while initializing the new instance
   */
  public SctpConnection(
      String id, Content content, Endpoint endpoint, int remoteSctpPort, String channelBundleId)
      throws Exception {
    super(content, id, channelBundleId);

    setEndpoint(endpoint.getID());

    this.remoteSctpPort = remoteSctpPort;
    this.debugId = generateDebugId();
  }

  /**
   * Adds <tt>WebRtcDataStreamListener</tt> to the list of listeners.
   *
   * @param listener the <tt>WebRtcDataStreamListener</tt> to be added to the listeners list.
   */
  public void addChannelListener(WebRtcDataStreamListener listener) {
    if (listener == null) {
      throw new NullPointerException("listener");
    } else {
      synchronized (listeners) {
        if (!listeners.contains(listener)) {
          listeners.add(listener);
        }
      }
    }
  }

  /** {@inheritDoc} */
  @Override
  protected void closeStream() throws IOException {
    try {
      synchronized (this) {
        assocIsUp = false;
        acceptedIncomingConnection = false;
        if (sctpSocket != null) {
          sctpSocket.close();
          sctpSocket = null;
        }
      }
    } finally {
      if (iceSocket != null) {
        // It is now the responsibility of the transport manager to
        // close the socket.
        // iceUdpSocket.close();
      }
    }
  }

  /** {@inheritDoc} */
  @Override
  public void expire() {
    try {
      eventDispatcher.shutdown();
    } finally {
      super.expire();
    }
  }

  /**
   * Gets the <tt>WebRtcDataStreamListener</tt>s added to this instance.
   *
   * @return the <tt>WebRtcDataStreamListener</tt>s added to this instance or <tt>null</tt> if there
   *     are no <tt>WebRtcDataStreamListener</tt>s added to this instance
   */
  private WebRtcDataStreamListener[] getChannelListeners() {
    WebRtcDataStreamListener[] ls;

    synchronized (listeners) {
      if (listeners.isEmpty()) {
        ls = null;
      } else {
        ls = listeners.toArray(new WebRtcDataStreamListener[listeners.size()]);
      }
    }
    return ls;
  }

  /**
   * Returns default <tt>WebRtcDataStream</tt> if it's ready or <tt>null</tt> otherwise.
   *
   * @return <tt>WebRtcDataStream</tt> if it's ready or <tt>null</tt> otherwise.
   * @throws IOException
   */
  public WebRtcDataStream getDefaultDataStream() throws IOException {
    WebRtcDataStream def;

    synchronized (this) {
      if (sctpSocket == null) {
        def = null;
      } else {
        // Channel that runs on sid 0
        def = channels.get(0);
        if (def == null) {
          def = openChannel(0, 0, 0, 0, "default");
        }
        // Pawel Domas: Must be acknowledged before use
        /*
         * XXX Lyubomir Marinov: We're always sending ordered. According
         * to "WebRTC Data Channel Establishment Protocol", we can start
         * sending messages containing user data after the
         * DATA_CHANNEL_OPEN message has been sent without waiting for
         * the reception of the corresponding DATA_CHANNEL_ACK message.
         */
        //                if (!def.isAcknowledged())
        //                    def = null;
      }
    }
    return def;
  }

  /**
   * Returns <tt>true</tt> if this <tt>SctpConnection</tt> is connected to the remote peer and
   * operational.
   *
   * @return <tt>true</tt> if this <tt>SctpConnection</tt> is connected to the remote peer and
   *     operational
   */
  public boolean isReady() {
    return assocIsUp && acceptedIncomingConnection;
  }

  /** {@inheritDoc} */
  @Override
  protected void maybeStartStream() throws IOException {
    // connector
    final StreamConnector connector = getStreamConnector();

    if (connector == null) return;

    synchronized (this) {
      if (started) return;

      threadPool.execute(
          new Runnable() {
            @Override
            public void run() {
              try {
                Sctp.init();

                runOnDtlsTransport(connector);
              } catch (IOException e) {
                logger.error(e, e);
              } finally {
                try {
                  Sctp.finish();
                } catch (IOException e) {
                  logger.error("Failed to shutdown SCTP stack", e);
                }
              }
            }
          });

      started = true;
    }
  }

  /**
   * Submits {@link #notifyChannelOpenedInEventDispatcher(WebRtcDataStream)} to {@link
   * #eventDispatcher} for asynchronous execution.
   *
   * @param dataChannel
   */
  private void notifyChannelOpened(final WebRtcDataStream dataChannel) {
    if (!isExpired()) {
      eventDispatcher.execute(
          new Runnable() {
            @Override
            public void run() {
              notifyChannelOpenedInEventDispatcher(dataChannel);
            }
          });
    }
  }

  private void notifyChannelOpenedInEventDispatcher(WebRtcDataStream dataChannel) {
    /*
     * When executing asynchronously in eventDispatcher, it is technically
     * possible that this SctpConnection may have expired by now.
     */
    if (!isExpired()) {
      WebRtcDataStreamListener[] ls = getChannelListeners();

      if (ls != null) {
        for (WebRtcDataStreamListener l : ls) {
          l.onChannelOpened(this, dataChannel);
        }
      }
    }
  }

  /**
   * Submits {@link #notifySctpConnectionReadyInEventDispatcher()} to {@link #eventDispatcher} for
   * asynchronous execution.
   */
  private void notifySctpConnectionReady() {
    if (!isExpired()) {
      eventDispatcher.execute(
          new Runnable() {
            @Override
            public void run() {
              notifySctpConnectionReadyInEventDispatcher();
            }
          });
    }
  }

  /**
   * Notifies the <tt>WebRtcDataStreamListener</tt>s added to this instance that this
   * <tt>SctpConnection</tt> is ready i.e. it is connected to the remote peer and operational.
   */
  private void notifySctpConnectionReadyInEventDispatcher() {
    /*
     * When executing asynchronously in eventDispatcher, it is technically
     * possible that this SctpConnection may have expired by now.
     */
    if (!isExpired() && isReady()) {
      WebRtcDataStreamListener[] ls = getChannelListeners();

      if (ls != null) {
        for (WebRtcDataStreamListener l : ls) {
          l.onSctpConnectionReady(this);
        }
      }
    }
  }

  /**
   * Handles control packet.
   *
   * @param data raw packet data that arrived on control PPID.
   * @param sid SCTP stream id on which the data has arrived.
   */
  private synchronized void onCtrlPacket(byte[] data, int sid) throws IOException {
    ByteBuffer buffer = ByteBuffer.wrap(data);
    int messageType = /* 1 byte unsigned integer */ 0xFF & buffer.get();

    if (messageType == MSG_CHANNEL_ACK) {
      if (logger.isDebugEnabled()) {
        logger.debug(getEndpoint().getID() + " ACK received SID: " + sid);
      }
      // Open channel ACK
      WebRtcDataStream channel = channels.get(sid);
      if (channel != null) {
        // Ack check prevents from firing multiple notifications
        // if we get more than one ACKs (by mistake/bug).
        if (!channel.isAcknowledged()) {
          channel.ackReceived();
          notifyChannelOpened(channel);
        } else {
          logger.warn("Redundant ACK received for SID: " + sid);
        }
      } else {
        logger.error("No channel exists on sid: " + sid);
      }
    } else if (messageType == MSG_OPEN_CHANNEL) {
      int channelType = /* 1 byte unsigned integer */ 0xFF & buffer.get();
      int priority = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort();
      long reliability = /* 4 bytes unsigned integer */ 0xFFFFFFFFL & buffer.getInt();
      int labelLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort();
      int protocolLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort();
      String label;
      String protocol;

      if (labelLength == 0) {
        label = "";
      } else {
        byte[] labelBytes = new byte[labelLength];

        buffer.get(labelBytes);
        label = new String(labelBytes, "UTF-8");
      }
      if (protocolLength == 0) {
        protocol = "";
      } else {
        byte[] protocolBytes = new byte[protocolLength];

        buffer.get(protocolBytes);
        protocol = new String(protocolBytes, "UTF-8");
      }

      if (logger.isDebugEnabled()) {
        logger.debug(
            "!!! "
                + getEndpoint().getID()
                + " data channel open request on SID: "
                + sid
                + " type: "
                + channelType
                + " prio: "
                + priority
                + " reliab: "
                + reliability
                + " label: "
                + label
                + " proto: "
                + protocol);
      }

      if (channels.containsKey(sid)) {
        logger.error("Channel on sid: " + sid + " already exists");
      }

      WebRtcDataStream newChannel = new WebRtcDataStream(sctpSocket, sid, label, true);
      channels.put(sid, newChannel);

      sendOpenChannelAck(sid);

      notifyChannelOpened(newChannel);
    } else {
      logger.error("Unexpected ctrl msg type: " + messageType);
    }
  }

  /** {@inheritDoc} */
  @Override
  protected void onEndpointChanged(Endpoint oldValue, Endpoint newValue) {
    if (oldValue != null) oldValue.setSctpConnection(null);
    if (newValue != null) newValue.setSctpConnection(this);
  }

  /** Implements notification in order to track socket state. */
  @Override
  public synchronized void onSctpNotification(SctpSocket socket, SctpNotification notification) {
    if (logger.isDebugEnabled()) {
      logger.debug("socket=" + socket + "; notification=" + notification);
    }
    switch (notification.sn_type) {
      case SctpNotification.SCTP_ASSOC_CHANGE:
        SctpNotification.AssociationChange assocChange =
            (SctpNotification.AssociationChange) notification;

        switch (assocChange.state) {
          case SctpNotification.AssociationChange.SCTP_COMM_UP:
            if (!assocIsUp) {
              boolean wasReady = isReady();

              assocIsUp = true;
              if (isReady() && !wasReady) notifySctpConnectionReady();
            }
            break;

          case SctpNotification.AssociationChange.SCTP_COMM_LOST:
          case SctpNotification.AssociationChange.SCTP_SHUTDOWN_COMP:
          case SctpNotification.AssociationChange.SCTP_CANT_STR_ASSOC:
            try {
              closeStream();
            } catch (IOException e) {
              logger.error("Error closing SCTP socket", e);
            }
            break;
        }
        break;
    }
  }

  /**
   * {@inheritDoc}
   *
   * <p>SCTP input data callback.
   */
  @Override
  public void onSctpPacket(
      byte[] data, int sid, int ssn, int tsn, long ppid, int context, int flags) {
    if (ppid == WEB_RTC_PPID_CTRL) {
      // Channel control PPID
      try {
        onCtrlPacket(data, sid);
      } catch (IOException e) {
        logger.error("IOException when processing ctrl packet", e);
      }
    } else if (ppid == WEB_RTC_PPID_STRING || ppid == WEB_RTC_PPID_BIN) {
      WebRtcDataStream channel;

      synchronized (this) {
        channel = channels.get(sid);
      }

      if (channel == null) {
        logger.error("No channel found for sid: " + sid);
        return;
      }
      if (ppid == WEB_RTC_PPID_STRING) {
        // WebRTC String
        String str;
        String charsetName = "UTF-8";

        try {
          str = new String(data, charsetName);
        } catch (UnsupportedEncodingException uee) {
          logger.error("Unsupported charset encoding/name " + charsetName, uee);
          str = null;
        }
        channel.onStringMsg(str);
      } else {
        // WebRTC Binary
        channel.onBinaryMsg(data);
      }
    } else {
      logger.warn("Got message on unsupported PPID: " + ppid);
    }
  }

  /**
   * Opens new WebRTC data channel using specified parameters.
   *
   * @param type channel type as defined in control protocol description. Use 0 for "reliable".
   * @param prio channel priority. The higher the number, the lower the priority.
   * @param reliab Reliability Parameter<br>
   *     This field is ignored if a reliable channel is used. If a partial reliable channel with
   *     limited number of retransmissions is used, this field specifies the number of
   *     retransmissions. If a partial reliable channel with limited lifetime is used, this field
   *     specifies the maximum lifetime in milliseconds. The following table summarizes this:<br>
   *     </br>
   *     <p>+------------------------------------------------+------------------+ | Channel Type |
   *     Reliability | | | Parameter |
   *     +------------------------------------------------+------------------+ |
   *     DATA_CHANNEL_RELIABLE | Ignored | | DATA_CHANNEL_RELIABLE_UNORDERED | Ignored | |
   *     DATA_CHANNEL_PARTIAL_RELIABLE_REXMIT | Number of RTX | |
   *     DATA_CHANNEL_PARTIAL_RELIABLE_REXMIT_UNORDERED | Number of RTX | |
   *     DATA_CHANNEL_PARTIAL_RELIABLE_TIMED | Lifetime in ms | |
   *     DATA_CHANNEL_PARTIAL_RELIABLE_TIMED_UNORDERED | Lifetime in ms |
   *     +------------------------------------------------+------------------+
   * @param sid SCTP stream id that will be used by new channel (it must not be already used).
   * @param label text label for the channel.
   * @return new instance of <tt>WebRtcDataStream</tt> that represents opened WebRTC data channel.
   * @throws IOException if IO error occurs.
   */
  public synchronized WebRtcDataStream openChannel(
      int type, int prio, long reliab, int sid, String label) throws IOException {
    if (channels.containsKey(sid)) {
      throw new IOException("Channel on sid: " + sid + " already exists");
    }

    // Label Length & Label
    byte[] labelBytes;
    int labelByteLength;

    if (label == null) {
      labelBytes = null;
      labelByteLength = 0;
    } else {
      labelBytes = label.getBytes("UTF-8");
      labelByteLength = labelBytes.length;
      if (labelByteLength > 0xFFFF) labelByteLength = 0xFFFF;
    }

    // Protocol Length & Protocol
    String protocol = WEBRTC_DATA_CHANNEL_PROTOCOL;
    byte[] protocolBytes;
    int protocolByteLength;

    if (protocol == null) {
      protocolBytes = null;
      protocolByteLength = 0;
    } else {
      protocolBytes = protocol.getBytes("UTF-8");
      protocolByteLength = protocolBytes.length;
      if (protocolByteLength > 0xFFFF) protocolByteLength = 0xFFFF;
    }

    ByteBuffer packet = ByteBuffer.allocate(12 + labelByteLength + protocolByteLength);

    // Message open new channel on current sid
    // Message Type
    packet.put((byte) MSG_OPEN_CHANNEL);
    // Channel Type
    packet.put((byte) type);
    // Priority
    packet.putShort((short) prio);
    // Reliability Parameter
    packet.putInt((int) reliab);
    // Label Length
    packet.putShort((short) labelByteLength);
    // Protocol Length
    packet.putShort((short) protocolByteLength);
    // Label
    if (labelByteLength != 0) {
      packet.put(labelBytes, 0, labelByteLength);
    }
    // Protocol
    if (protocolByteLength != 0) {
      packet.put(protocolBytes, 0, protocolByteLength);
    }

    int sentCount = sctpSocket.send(packet.array(), true, sid, WEB_RTC_PPID_CTRL);

    if (sentCount != packet.capacity()) {
      throw new IOException("Failed to open new chanel on sid: " + sid);
    }

    WebRtcDataStream channel = new WebRtcDataStream(sctpSocket, sid, label, false);

    channels.put(sid, channel);

    return channel;
  }

  /**
   * Removes <tt>WebRtcDataStreamListener</tt> from the list of listeners.
   *
   * @param listener the <tt>WebRtcDataStreamListener</tt> to be removed from the listeners list.
   */
  public void removeChannelListener(WebRtcDataStreamListener listener) {
    if (listener != null) {
      synchronized (listeners) {
        listeners.remove(listener);
      }
    }
  }

  private void runOnDtlsTransport(StreamConnector connector) throws IOException {
    DtlsControlImpl dtlsControl = (DtlsControlImpl) getTransportManager().getDtlsControl(this);
    DtlsTransformEngine engine = dtlsControl.getTransformEngine();
    final DtlsPacketTransformer transformer = (DtlsPacketTransformer) engine.getRTPTransformer();

    byte[] receiveBuffer = new byte[SCTP_BUFFER_SIZE];

    if (LOG_SCTP_PACKETS) {
      System.setProperty(
          ConfigurationService.PNAME_SC_HOME_DIR_LOCATION, System.getProperty("java.io.tmpdir"));
      System.setProperty(
          ConfigurationService.PNAME_SC_HOME_DIR_NAME, SctpConnection.class.getName());
    }

    synchronized (this) {
      // FIXME local SCTP port is hardcoded in bridge offer SDP (Jitsi
      // Meet)
      sctpSocket = Sctp.createSocket(5000);
      assocIsUp = false;
      acceptedIncomingConnection = false;
    }

    // Implement output network link for SCTP stack on DTLS transport
    sctpSocket.setLink(
        new NetworkLink() {
          @Override
          public void onConnOut(SctpSocket s, byte[] packet) throws IOException {
            if (LOG_SCTP_PACKETS) {
              LibJitsi.getPacketLoggingService()
                  .logPacket(
                      PacketLoggingService.ProtocolName.ICE4J,
                      new byte[] {0, 0, 0, (byte) debugId},
                      5000,
                      new byte[] {0, 0, 0, (byte) (debugId + 1)},
                      remoteSctpPort,
                      PacketLoggingService.TransportName.UDP,
                      true,
                      packet);
            }

            // Send through DTLS transport
            transformer.sendApplicationData(packet, 0, packet.length);
          }
        });

    if (logger.isDebugEnabled()) {
      logger.debug("Connecting SCTP to port: " + remoteSctpPort + " to " + getEndpoint().getID());
    }

    sctpSocket.setNotificationListener(this);
    sctpSocket.listen();

    // FIXME manage threads
    threadPool.execute(
        new Runnable() {
          @Override
          public void run() {
            SctpSocket sctpSocket = null;
            try {
              // sctpSocket is set to null on close
              sctpSocket = SctpConnection.this.sctpSocket;
              while (sctpSocket != null) {
                if (sctpSocket.accept()) {
                  acceptedIncomingConnection = true;
                  break;
                }
                Thread.sleep(100);
                sctpSocket = SctpConnection.this.sctpSocket;
              }
              if (isReady()) {
                notifySctpConnectionReady();
              }
            } catch (Exception e) {
              logger.error("Error accepting SCTP connection", e);
            }

            if (sctpSocket == null && logger.isInfoEnabled()) {
              logger.info(
                  "SctpConnection " + getID() + " closed" + " before SctpSocket accept()-ed.");
            }
          }
        });

    // Notify that from now on SCTP connection is considered functional
    sctpSocket.setDataCallback(this);

    // Setup iceSocket
    DatagramSocket datagramSocket = connector.getDataSocket();
    if (datagramSocket != null) {
      this.iceSocket = new IceUdpSocketWrapper(datagramSocket);
    } else {
      this.iceSocket = new IceTcpSocketWrapper(connector.getDataTCPSocket());
    }

    DatagramPacket rcvPacket = new DatagramPacket(receiveBuffer, 0, receiveBuffer.length);

    // Receive loop, breaks when SCTP socket is closed
    try {
      do {
        iceSocket.receive(rcvPacket);

        RawPacket raw =
            new RawPacket(rcvPacket.getData(), rcvPacket.getOffset(), rcvPacket.getLength());

        raw = transformer.reverseTransform(raw);
        // Check for app data
        if (raw == null) continue;

        if (LOG_SCTP_PACKETS) {
          LibJitsi.getPacketLoggingService()
              .logPacket(
                  PacketLoggingService.ProtocolName.ICE4J,
                  new byte[] {0, 0, 0, (byte) (debugId + 1)},
                  remoteSctpPort,
                  new byte[] {0, 0, 0, (byte) debugId},
                  5000,
                  PacketLoggingService.TransportName.UDP,
                  false,
                  raw.getBuffer(),
                  raw.getOffset(),
                  raw.getLength());
        }

        // Pass network packet to SCTP stack
        sctpSocket.onConnIn(raw.getBuffer(), raw.getOffset(), raw.getLength());
      } while (true);
    } finally {
      // Eventually, close the socket although it should happen from
      // expire().
      synchronized (this) {
        assocIsUp = false;
        acceptedIncomingConnection = false;
        if (sctpSocket != null) {
          sctpSocket.close();
          sctpSocket = null;
        }
      }
    }
  }

  /**
   * Sends acknowledgment for open channel request on given SCTP stream ID.
   *
   * @param sid SCTP stream identifier to be used for sending ack.
   */
  private void sendOpenChannelAck(int sid) throws IOException {
    // Send ACK
    byte[] ack = MSG_CHANNEL_ACK_BYTES;
    int sendAck = sctpSocket.send(ack, true, sid, WEB_RTC_PPID_CTRL);

    if (sendAck != ack.length) {
      logger.error("Failed to send open channel confirmation");
    }
  }

  /**
   * {@inheritDoc}
   *
   * <p>Creates a <tt>TransportManager</tt> instance suitable for an <tt>SctpConnection</tt> (e.g.
   * with 1 component only).
   */
  protected TransportManager createTransportManager(String xmlNamespace) throws IOException {
    if (IceUdpTransportPacketExtension.NAMESPACE.equals(xmlNamespace)) {
      Content content = getContent();
      return new IceUdpTransportManager(
          content.getConference(), isInitiator(), 1 /* num components */, content.getName());
    } else if (RawUdpTransportPacketExtension.NAMESPACE.equals(xmlNamespace)) {
      // TODO: support RawUdp once RawUdpTransportManager is updated
      // return new RawUdpTransportManager(this);
      throw new IllegalArgumentException("Unsupported Jingle transport " + xmlNamespace);
    } else {
      throw new IllegalArgumentException("Unsupported Jingle transport " + xmlNamespace);
    }
  }
}
예제 #16
0
  /**
   * Handles control packet.
   *
   * @param data raw packet data that arrived on control PPID.
   * @param sid SCTP stream id on which the data has arrived.
   */
  private synchronized void onCtrlPacket(byte[] data, int sid) throws IOException {
    ByteBuffer buffer = ByteBuffer.wrap(data);
    int messageType = /* 1 byte unsigned integer */ 0xFF & buffer.get();

    if (messageType == MSG_CHANNEL_ACK) {
      if (logger.isDebugEnabled()) {
        logger.debug(getEndpoint().getID() + " ACK received SID: " + sid);
      }
      // Open channel ACK
      WebRtcDataStream channel = channels.get(sid);
      if (channel != null) {
        // Ack check prevents from firing multiple notifications
        // if we get more than one ACKs (by mistake/bug).
        if (!channel.isAcknowledged()) {
          channel.ackReceived();
          notifyChannelOpened(channel);
        } else {
          logger.warn("Redundant ACK received for SID: " + sid);
        }
      } else {
        logger.error("No channel exists on sid: " + sid);
      }
    } else if (messageType == MSG_OPEN_CHANNEL) {
      int channelType = /* 1 byte unsigned integer */ 0xFF & buffer.get();
      int priority = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort();
      long reliability = /* 4 bytes unsigned integer */ 0xFFFFFFFFL & buffer.getInt();
      int labelLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort();
      int protocolLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort();
      String label;
      String protocol;

      if (labelLength == 0) {
        label = "";
      } else {
        byte[] labelBytes = new byte[labelLength];

        buffer.get(labelBytes);
        label = new String(labelBytes, "UTF-8");
      }
      if (protocolLength == 0) {
        protocol = "";
      } else {
        byte[] protocolBytes = new byte[protocolLength];

        buffer.get(protocolBytes);
        protocol = new String(protocolBytes, "UTF-8");
      }

      if (logger.isDebugEnabled()) {
        logger.debug(
            "!!! "
                + getEndpoint().getID()
                + " data channel open request on SID: "
                + sid
                + " type: "
                + channelType
                + " prio: "
                + priority
                + " reliab: "
                + reliability
                + " label: "
                + label
                + " proto: "
                + protocol);
      }

      if (channels.containsKey(sid)) {
        logger.error("Channel on sid: " + sid + " already exists");
      }

      WebRtcDataStream newChannel = new WebRtcDataStream(sctpSocket, sid, label, true);
      channels.put(sid, newChannel);

      sendOpenChannelAck(sid);

      notifyChannelOpened(newChannel);
    } else {
      logger.error("Unexpected ctrl msg type: " + messageType);
    }
  }
  /**
   * Starts the execution of the neomedia bundle in the specified context.
   *
   * @param bundleContext the context in which the neomedia bundle is to start executing
   * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the
   *     specified context
   */
  public void start(BundleContext bundleContext) throws Exception {
    if (logger.isDebugEnabled()) logger.debug("Started.");

    NeomediaActivator.bundleContext = bundleContext;

    // MediaService
    mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService();

    bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null);
    if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]");

    //        mediaConfiguration = new MediaConfigurationImpl();
    //        bundleContext.registerService(
    //                MediaConfigurationService.class.getStatus(),
    //                getMediaConfiguration(),
    //                null);
    if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]");

    ConfigurationService cfg = NeomediaActivator.getConfigurationService();
    Dictionary<String, String> mediaProps = new Hashtable<String, String>();

    mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE);

    // If the audio configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false))
    //        {
    //            audioConfigurationForm
    //                = new LazyConfigurationForm(
    //                        AudioConfigurationPanel.class.getStatus(),
    //                        getClass().getClassLoader(),
    //                        "plugin.mediaconfig.AUDIO_ICON",
    //                        "impl.neomedia.configform.AUDIO",
    //                        3);
    //
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    audioConfigurationForm,
    //                    mediaProps);
    //
    //            if (deviceConfigurationPropertyChangeListener == null)
    //            {
    //                // Initializes and registers the changed device configuration
    //                // event ot the notification service.
    //                getNotificationService();
    //
    //                deviceConfigurationPropertyChangeListener
    //                    = new AudioDeviceConfigurationListener();
    //                mediaServiceImpl
    //                    .getDeviceConfiguration()
    //                        .addPropertyChangeListener(
    //                                deviceConfigurationPropertyChangeListener);
    //            }
    //        }

    // If the video configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false))
    //        {
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            VideoConfigurationPanel.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            "plugin.mediaconfig.VIDEO_ICON",
    //                            "impl.neomedia.configform.VIDEO",
    //                            4),
    //                    mediaProps);
    //        }

    // H.264
    // If the H.264 configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> h264Props
    //                = new Hashtable<String, String>();
    //
    //            h264Props.put(
    //                    ConfigurationForm.FORM_TYPE,
    //                    ConfigurationForm.ADVANCED_TYPE);
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            ConfigurationPanel.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            "plugin.mediaconfig.VIDEO_ICON",
    //                            "impl.neomedia.configform.H264",
    //                            -1,
    //                            true),
    //                    h264Props);
    //        }

    // ZRTP
    // If the ZRTP configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> securityProps
    //                = new Hashtable<String, String>();
    //
    //            securityProps.put( ConfigurationForm.FORM_TYPE,
    //                            ConfigurationForm.SECURITY_TYPE);
    //            bundleContext.registerService(
    //                ConfigurationForm.class.getStatus(),
    //                new LazyConfigurationForm(
    //                    SecurityConfigForm.class.getStatus(),
    //                    getClass().getClassLoader(),
    //                    "impl.media.security.zrtp.CONF_ICON",
    //                    "impl.media.security.zrtp.TITLE",
    //                    0),
    //                securityProps);
    //        }

    // we use the nist-sdp stack to make parse sdp and we need to set the
    // following property to make sure that it would accept java generated
    // IPv6 addresses that contain address scope zones.
    System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true");

    // AudioNotifierService
    AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService();

    audioNotifierService.setMute(
        (cfg == null)
            || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true));
    bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null);

    if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]");

    // Call Recording
    // If the call recording configuration form is disabled don't continue.
    //        if ((cfg == null)
    //                || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> callRecordingProps
    //                = new Hashtable<String, String>();
    //
    //            callRecordingProps.put(
    //                    ConfigurationForm.FORM_TYPE,
    //                    ConfigurationForm.ADVANCED_TYPE);
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            CallRecordingConfigForm.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            null,
    //                            "plugin.callrecordingconfig.CALL_RECORDING_CONFIG",
    //                            1100,
    //                            true),
    //                    callRecordingProps);
    //        }
  }
예제 #18
0
  /** Starts this <tt>PacketTransformer</tt>. */
  private synchronized void start() {
    if (this.datagramTransport != null) {
      if (this.connectThread == null && dtlsTransport == null) {
        logger.warn(
            getClass().getName()
                + " has been started but has failed to establish"
                + " the DTLS connection!");
      }
      return;
    }

    if (rtcpmux && Component.RTCP == componentID) {
      // In the case of rtcp-mux, the RTCP transformer does not create
      // a DTLS session. The SRTP context (_srtpTransformer) will be
      // initialized on demand using initializeSRTCPTransformerFromRtp().
      return;
    }

    AbstractRTPConnector connector = this.connector;

    if (connector == null) throw new NullPointerException("connector");

    DtlsControl.Setup setup = this.setup;
    SecureRandom secureRandom = DtlsControlImpl.createSecureRandom();
    final DTLSProtocol dtlsProtocolObj;
    final TlsPeer tlsPeer;

    if (DtlsControl.Setup.ACTIVE.equals(setup)) {
      dtlsProtocolObj = new DTLSClientProtocol(secureRandom);
      tlsPeer = new TlsClientImpl(this);
    } else {
      dtlsProtocolObj = new DTLSServerProtocol(secureRandom);
      tlsPeer = new TlsServerImpl(this);
    }
    tlsPeerHasRaisedCloseNotifyWarning = false;

    final DatagramTransportImpl datagramTransport = new DatagramTransportImpl(componentID);

    datagramTransport.setConnector(connector);

    Thread connectThread =
        new Thread() {
          @Override
          public void run() {
            try {
              runInConnectThread(dtlsProtocolObj, tlsPeer, datagramTransport);
            } finally {
              if (Thread.currentThread().equals(DtlsPacketTransformer.this.connectThread)) {
                DtlsPacketTransformer.this.connectThread = null;
              }
            }
          }
        };

    connectThread.setDaemon(true);
    connectThread.setName(DtlsPacketTransformer.class.getName() + ".connectThread");

    this.connectThread = connectThread;
    this.datagramTransport = datagramTransport;

    boolean started = false;

    try {
      connectThread.start();
      started = true;
    } finally {
      if (!started) {
        if (connectThread.equals(this.connectThread)) this.connectThread = null;
        if (datagramTransport.equals(this.datagramTransport)) this.datagramTransport = null;
      }
    }

    notifyAll();
  }
예제 #19
0
  /** {@inheritDoc} */
  @Override
  public RawPacket reverseTransform(RawPacket pkt) {
    byte[] buf = pkt.getBuffer();
    int off = pkt.getOffset();
    int len = pkt.getLength();

    if (isDtlsRecord(buf, off, len)) {
      if (rtcpmux && Component.RTCP == componentID) {
        // This should never happen.
        logger.warn(
            "Dropping a DTLS record, because it was received on the"
                + " RTCP channel while rtcpmux is in use.");
        return null;
      }

      boolean receive;

      synchronized (this) {
        if (datagramTransport == null) {
          receive = false;
        } else {
          datagramTransport.queueReceive(buf, off, len);
          receive = true;
        }
      }
      if (receive) {
        DTLSTransport dtlsTransport = this.dtlsTransport;

        if (dtlsTransport == null) {
          // The specified pkt looks like a DTLS record and it has
          // been consumed for the purposes of the secure channel
          // represented by this PacketTransformer.
          pkt = null;
        } else {
          try {
            int receiveLimit = dtlsTransport.getReceiveLimit();
            int delta = receiveLimit - len;

            if (delta > 0) {
              pkt.grow(delta);
              buf = pkt.getBuffer();
              off = pkt.getOffset();
              len = pkt.getLength();
            } else if (delta < 0) {
              pkt.shrink(-delta);
              buf = pkt.getBuffer();
              off = pkt.getOffset();
              len = pkt.getLength();
            }

            int received = dtlsTransport.receive(buf, off, len, DTLS_TRANSPORT_RECEIVE_WAITMILLIS);

            if (received <= 0) {
              // No application data was decoded.
              pkt = null;
            } else {
              delta = len - received;
              if (delta > 0) pkt.shrink(delta);
            }
          } catch (IOException ioe) {
            pkt = null;
            // SrtpControl.start(MediaType) starts its associated
            // TransformEngine. We will use that mediaType to signal
            // the normal stop then as well i.e. we will ignore
            // exception after the procedure to stop this
            // PacketTransformer has begun.
            if (mediaType != null && !tlsPeerHasRaisedCloseNotifyWarning) {
              logger.error("Failed to decode a DTLS record!", ioe);
            }
          }
        }
      } else {
        // The specified pkt looks like a DTLS record but it is
        // unexpected in the current state of the secure channel
        // represented by this PacketTransformer. This PacketTransformer
        // has not been started (successfully) or has been closed.
        pkt = null;
      }
    } else if (transformEngine.isSrtpDisabled()) {
      // In pure DTLS mode only DTLS records pass through.
      pkt = null;
    } else {
      // DTLS-SRTP has not been initialized yet or has failed to
      // initialize.
      SinglePacketTransformer srtpTransformer = waitInitializeAndGetSRTPTransformer();

      if (srtpTransformer != null) pkt = srtpTransformer.reverseTransform(pkt);
      else if (DROP_UNENCRYPTED_PKTS) pkt = null;
      // XXX Else, it is our explicit policy to let the received packet
      // pass through and rely on the SrtpListener to notify the user that
      // the session is not secured.
    }
    return pkt;
  }
예제 #20
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }
/**
 * Implements <tt>BundleActivator</tt> for the neomedia bundle.
 *
 * @author Martin Andre
 * @author Emil Ivov
 * @author Lyubomir Marinov
 * @author Boris Grozev
 */
public class NeomediaActivator implements BundleActivator {

  /**
   * The <tt>Logger</tt> used by the <tt>NeomediaActivator</tt> class and its instances for logging
   * output.
   */
  private final Logger logger = Logger.getLogger(NeomediaActivator.class);

  /** Indicates if the audio configuration form should be disabled, i.e. not visible to the user. */
  private static final String AUDIO_CONFIG_DISABLED_PROP =
      "net.java.sip.communicator.impl.neomedia.AUDIO_CONFIG_DISABLED";

  /** Indicates if the video configuration form should be disabled, i.e. not visible to the user. */
  private static final String VIDEO_CONFIG_DISABLED_PROP =
      "net.java.sip.communicator.impl.neomedia.VIDEO_CONFIG_DISABLED";

  /** Indicates if the H.264 configuration form should be disabled, i.e. not visible to the user. */
  private static final String H264_CONFIG_DISABLED_PROP =
      "net.java.sip.communicator.impl.neomedia.h264config.DISABLED";

  /** Indicates if the ZRTP configuration form should be disabled, i.e. not visible to the user. */
  private static final String ZRTP_CONFIG_DISABLED_PROP =
      "net.java.sip.communicator.impl.neomedia.zrtpconfig.DISABLED";

  /**
   * Indicates if the call recording config form should be disabled, i.e. not visible to the user.
   */
  private static final String CALL_RECORDING_CONFIG_DISABLED_PROP =
      "net.java.sip.communicator.impl.neomedia.callrecordingconfig.DISABLED";

  /**
   * The name of the notification pop-up event displayed when the device configration has changed.
   */
  private static final String DEVICE_CONFIGURATION_HAS_CHANGED = "DeviceConfigurationChanged";

  /**
   * The context in which the one and only <tt>NeomediaActivator</tt> instance has started
   * executing.
   */
  private static BundleContext bundleContext;

  /**
   * The <tt>ConfigurationService</tt> registered in {@link #bundleContext} and used by the
   * <tt>NeomediaActivator</tt> instance to read and write configuration properties.
   */
  private static ConfigurationService configurationService;

  /**
   * The <tt>FileAccessService</tt> registered in {@link #bundleContext} and used by the
   * <tt>NeomediaActivator</tt> instance to safely access files.
   */
  private static FileAccessService fileAccessService;

  /** The notifcation service to pop-up messages. */
  private static NotificationService notificationService;

  /**
   * The one and only <tt>MediaServiceImpl</tt> instance registered in {@link #bundleContext} by the
   * <tt>NeomediaActivator</tt> instance.
   */
  private static MediaServiceImpl mediaServiceImpl;

  /**
   * The <tt>ResourceManagementService</tt> registered in {@link #bundleContext} and representing
   * the resources such as internationalized and localized text and images used by the neomedia
   * bundle.
   */
  private static ResourceManagementService resources;

  /**
   * The OSGi <tt>PacketLoggingService</tt> of {@link #mediaServiceImpl} in {@link #bundleContext}
   * and used for debugging.
   */
  private static PacketLoggingService packetLoggingService = null;

  /** A listener to the click on the popup message concerning device configuration changes. */
  private AudioDeviceConfigurationListener deviceConfigurationPropertyChangeListener;

  /** A {@link MediaConfigurationService} instance. */
  //    private static MediaConfigurationImpl mediaConfiguration;

  /** The audio configuration form used to define the capture/notify/playback audio devices. */
  private static ConfigurationForm audioConfigurationForm;

  /**
   * Starts the execution of the neomedia bundle in the specified context.
   *
   * @param bundleContext the context in which the neomedia bundle is to start executing
   * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the
   *     specified context
   */
  public void start(BundleContext bundleContext) throws Exception {
    if (logger.isDebugEnabled()) logger.debug("Started.");

    NeomediaActivator.bundleContext = bundleContext;

    // MediaService
    mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService();

    bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null);
    if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]");

    //        mediaConfiguration = new MediaConfigurationImpl();
    //        bundleContext.registerService(
    //                MediaConfigurationService.class.getStatus(),
    //                getMediaConfiguration(),
    //                null);
    if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]");

    ConfigurationService cfg = NeomediaActivator.getConfigurationService();
    Dictionary<String, String> mediaProps = new Hashtable<String, String>();

    mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE);

    // If the audio configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false))
    //        {
    //            audioConfigurationForm
    //                = new LazyConfigurationForm(
    //                        AudioConfigurationPanel.class.getStatus(),
    //                        getClass().getClassLoader(),
    //                        "plugin.mediaconfig.AUDIO_ICON",
    //                        "impl.neomedia.configform.AUDIO",
    //                        3);
    //
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    audioConfigurationForm,
    //                    mediaProps);
    //
    //            if (deviceConfigurationPropertyChangeListener == null)
    //            {
    //                // Initializes and registers the changed device configuration
    //                // event ot the notification service.
    //                getNotificationService();
    //
    //                deviceConfigurationPropertyChangeListener
    //                    = new AudioDeviceConfigurationListener();
    //                mediaServiceImpl
    //                    .getDeviceConfiguration()
    //                        .addPropertyChangeListener(
    //                                deviceConfigurationPropertyChangeListener);
    //            }
    //        }

    // If the video configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false))
    //        {
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            VideoConfigurationPanel.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            "plugin.mediaconfig.VIDEO_ICON",
    //                            "impl.neomedia.configform.VIDEO",
    //                            4),
    //                    mediaProps);
    //        }

    // H.264
    // If the H.264 configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> h264Props
    //                = new Hashtable<String, String>();
    //
    //            h264Props.put(
    //                    ConfigurationForm.FORM_TYPE,
    //                    ConfigurationForm.ADVANCED_TYPE);
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            ConfigurationPanel.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            "plugin.mediaconfig.VIDEO_ICON",
    //                            "impl.neomedia.configform.H264",
    //                            -1,
    //                            true),
    //                    h264Props);
    //        }

    // ZRTP
    // If the ZRTP configuration form is disabled don't register it.
    //        if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> securityProps
    //                = new Hashtable<String, String>();
    //
    //            securityProps.put( ConfigurationForm.FORM_TYPE,
    //                            ConfigurationForm.SECURITY_TYPE);
    //            bundleContext.registerService(
    //                ConfigurationForm.class.getStatus(),
    //                new LazyConfigurationForm(
    //                    SecurityConfigForm.class.getStatus(),
    //                    getClass().getClassLoader(),
    //                    "impl.media.security.zrtp.CONF_ICON",
    //                    "impl.media.security.zrtp.TITLE",
    //                    0),
    //                securityProps);
    //        }

    // we use the nist-sdp stack to make parse sdp and we need to set the
    // following property to make sure that it would accept java generated
    // IPv6 addresses that contain address scope zones.
    System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true");

    // AudioNotifierService
    AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService();

    audioNotifierService.setMute(
        (cfg == null)
            || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true));
    bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null);

    if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]");

    // Call Recording
    // If the call recording configuration form is disabled don't continue.
    //        if ((cfg == null)
    //                || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false))
    //        {
    //            Dictionary<String, String> callRecordingProps
    //                = new Hashtable<String, String>();
    //
    //            callRecordingProps.put(
    //                    ConfigurationForm.FORM_TYPE,
    //                    ConfigurationForm.ADVANCED_TYPE);
    //            bundleContext.registerService(
    //                    ConfigurationForm.class.getStatus(),
    //                    new LazyConfigurationForm(
    //                            CallRecordingConfigForm.class.getStatus(),
    //                            getClass().getClassLoader(),
    //                            null,
    //                            "plugin.callrecordingconfig.CALL_RECORDING_CONFIG",
    //                            1100,
    //                            true),
    //                    callRecordingProps);
    //        }
  }

  /**
   * Stops the execution of the neomedia bundle in the specified context.
   *
   * @param bundleContext the context in which the neomedia bundle is to stop executing
   * @throws Exception if an error occurs while stopping the execution of the neomedia bundle in the
   *     specified context
   */
  public void stop(BundleContext bundleContext) throws Exception {
    try {
      if (deviceConfigurationPropertyChangeListener != null) {
        mediaServiceImpl
            .getDeviceConfiguration()
            .removePropertyChangeListener(deviceConfigurationPropertyChangeListener);
        if (deviceConfigurationPropertyChangeListener != null) {
          deviceConfigurationPropertyChangeListener.managePopupMessageListenerRegistration(false);
          deviceConfigurationPropertyChangeListener = null;
        }
      }
    } finally {
      configurationService = null;
      fileAccessService = null;
      mediaServiceImpl = null;
      resources = null;
    }
  }

  /**
   * Returns a reference to a ConfigurationService implementation currently registered in the bundle
   * context or null if no such implementation was found.
   *
   * @return a currently valid implementation of the ConfigurationService.
   */
  public static ConfigurationService getConfigurationService() {
    if (configurationService == null) {
      configurationService = ServiceUtils.getService(bundleContext, ConfigurationService.class);
    }
    return configurationService;
  }

  /**
   * Returns a reference to a FileAccessService implementation currently registered in the bundle
   * context or null if no such implementation was found.
   *
   * @return a currently valid implementation of the FileAccessService .
   */
  public static FileAccessService getFileAccessService() {
    if (fileAccessService == null) {
      fileAccessService = ServiceUtils.getService(bundleContext, FileAccessService.class);
    }
    return fileAccessService;
  }

  /**
   * Gets the <tt>MediaService</tt> implementation instance registered by the neomedia bundle.
   *
   * @return the <tt>MediaService</tt> implementation instance registered by the neomedia bundle
   */
  public static MediaServiceImpl getMediaServiceImpl() {
    return mediaServiceImpl;
  }

  //    public static MediaConfigurationService getMediaConfiguration()
  //    {
  //        return mediaConfiguration;
  //    }

  /**
   * Gets the <tt>ResourceManagementService</tt> instance which represents the resources such as
   * internationalized and localized text and images used by the neomedia bundle.
   *
   * @return the <tt>ResourceManagementService</tt> instance which represents the resources such as
   *     internationalized and localized text and images used by the neomedia bundle
   */
  public static ResourceManagementService getResources() {
    if (resources == null) {
      resources = ResourceManagementServiceUtils.getService(bundleContext);
    }
    return resources;
  }

  /**
   * Returns a reference to the <tt>PacketLoggingService</tt> implementation currently registered in
   * the bundle context or null if no such implementation was found.
   *
   * @return a reference to a <tt>PacketLoggingService</tt> implementation currently registered in
   *     the bundle context or null if no such implementation was found.
   */
  public static PacketLoggingService getPacketLogging() {
    if (packetLoggingService == null) {
      packetLoggingService = ServiceUtils.getService(bundleContext, PacketLoggingService.class);
    }
    return packetLoggingService;
  }

  /**
   * Returns the <tt>NotificationService</tt> obtained from the bundle context.
   *
   * @return The <tt>NotificationService</tt> obtained from the bundle context.
   */
  public static NotificationService getNotificationService() {
    if (notificationService == null) {
      // Get the notification service implementation
      ServiceReference notifReference =
          bundleContext.getServiceReference(NotificationService.class.getName());

      notificationService = (NotificationService) bundleContext.getService(notifReference);

      if (notificationService != null) {
        // Register a popup message for a device configuration changed
        // notification.
        notificationService.registerDefaultNotificationForEvent(
            DEVICE_CONFIGURATION_HAS_CHANGED,
            net.java.sip.communicator.service.notification.NotificationAction.ACTION_POPUP_MESSAGE,
            "Device onfiguration has changed",
            null);
      }
    }

    return notificationService;
  }

  /** A listener to the click on the popup message concerning device configuration changes. */
  private class AudioDeviceConfigurationListener implements PropertyChangeListener /*,
                   SystrayPopupMessageListener*/ {
    /**
     * A boolean used to verify that this listener registers only once to the popup message
     * notification handler.
     */
    private boolean isRegisteredToPopupMessageListener = false;

    /**
     * Registers or unregister as a popup message listener to detect when a user click on
     * notification saying that the device configuration has changed.
     *
     * @param enable True to register to the popup message notifcation handler. False to unregister.
     */
    public void managePopupMessageListenerRegistration(boolean enable) {
      Iterator<NotificationHandler> notificationHandlers =
          notificationService
              .getActionHandlers(
                  net.java.sip.communicator.service.notification.NotificationAction
                      .ACTION_POPUP_MESSAGE)
              .iterator();
      NotificationHandler notificationHandler;
      while (notificationHandlers.hasNext()) {
        notificationHandler = notificationHandlers.next();
        if (notificationHandler instanceof PopupMessageNotificationHandler) {
          // Register.
          if (enable) {
            //                        ((PopupMessageNotificationHandler) notificationHandler)
            //                            .addPopupMessageListener(this);
          }
          // Unregister.
          else {
            //                        ((PopupMessageNotificationHandler) notificationHandler)
            //                            .removePopupMessageListener(this);
          }
        }
      }
    }

    /**
     * Function called when an audio device is plugged or unplugged.
     *
     * @param event The property change event which may concern the audio device.
     */
    public void propertyChange(PropertyChangeEvent event) {
      if (DeviceConfiguration.PROP_AUDIO_SYSTEM_DEVICES.equals(event.getPropertyName())) {
        NotificationService notificationService = getNotificationService();

        if (notificationService != null) {
          // Registers only once to the  popup message notification
          // handler.
          if (!isRegisteredToPopupMessageListener) {
            isRegisteredToPopupMessageListener = true;
            managePopupMessageListenerRegistration(true);
          }

          // Fires the popup notification.
          ResourceManagementService resources = NeomediaActivator.getResources();
          Map<String, Object> extras = new HashMap<String, Object>();

          extras.put(NotificationData.POPUP_MESSAGE_HANDLER_TAG_EXTRA, this);
          notificationService.fireNotification(
              DEVICE_CONFIGURATION_HAS_CHANGED,
              resources.getI18NString("impl.media.configform" + ".AUDIO_DEVICE_CONFIG_CHANGED"),
              resources.getI18NString(
                  "impl.media.configform" + ".AUDIO_DEVICE_CONFIG_MANAGMENT_CLICK"),
              null,
              extras);
        }
      }
    }

    /**
     * Indicates that user has clicked on the systray popup message.
     *
     * @param evt the event triggered when user clicks on the systray popup message
     */
    //        public void popupMessageClicked(SystrayPopupMessageEvent evt)
    //        {
    //            // Checks if this event is fired from one click on one of our popup
    //            // message.
    //            if(evt.getTag() == deviceConfigurationPropertyChangeListener)
    //            {
    //                // Get the UI service
    //                ServiceReference uiReference = bundleContext
    //                    .getServiceReference(UIService.class.getStatus());
    //
    //                UIService uiService = (UIService) bundleContext
    //                    .getService(uiReference);
    //
    //                if(uiService != null)
    //                {
    //                    // Shows the audio configuration window.
    //                    ConfigurationContainer configurationContainer
    //                        = uiService.getConfigurationContainer();
    //                    configurationContainer.setSelected(audioConfigurationForm);
    //                    configurationContainer.setVisible(true);
    //                }
    //            }
    //        }
  }

  public static BundleContext getBundleContext() {
    return bundleContext;
  }
}
예제 #22
0
  /**
   * Listens for incoming datagrams, stores them for reading by the <tt>read</tt> method and
   * notifies the local <tt>transferHandler</tt> that there's data to be read.
   */
  public void run() {
    DatagramPacket p = new DatagramPacket(buffer, 0, PACKET_RECEIVE_BUFFER_LENGTH);

    while (!closed) {
      try {
        // http://code.google.com/p/android/issues/detail?id=24765
        if (OSUtils.IS_ANDROID) p.setLength(PACKET_RECEIVE_BUFFER_LENGTH);

        receivePacket(p);
      } catch (IOException e) {
        ioError = true;
        break;
      }

      /*
       * Do the DatagramPacketFilters accept the received DatagramPacket?
       */
      DatagramPacketFilter[] datagramPacketFilters = getDatagramPacketFilters();
      boolean accept;

      if (!enabled) accept = false;
      else if (datagramPacketFilters == null) accept = true;
      else {
        accept = true;
        for (int i = 0; i < datagramPacketFilters.length; i++) {
          try {
            if (!datagramPacketFilters[i].accept(p)) {
              accept = false;
              break;
            }
          } catch (Throwable t) {
            if (t instanceof ThreadDeath) throw (ThreadDeath) t;
          }
        }
      }

      if (accept) {
        RawPacket pkts[] = createRawPacket(p);

        for (int i = 0; i < pkts.length; i++) {
          RawPacket pkt = pkts[i];

          pkts[i] = null;

          if (pkt != null) {
            if (pkt.isInvalid()) {
              /*
               * Return pkt to the pool because it is invalid and,
               * consequently, will not be made available to
               * reading.
               */
              poolRawPacket(pkt);
            } else {
              RawPacket oldPkt;

              synchronized (pktSyncRoot) {
                oldPkt = this.pkt;
                this.pkt = pkt;
              }
              if (oldPkt != null) {
                /*
                 * Return oldPkt to the pool because it was made
                 * available to reading and it was not read.
                 */
                poolRawPacket(oldPkt);
              }

              if (videoRecorder != null) videoRecorder.recordData(pkt);

              if ((transferHandler != null) && !closed) {
                try {
                  transferHandler.transferData(this);
                } catch (Throwable t) {
                  /*
                   * XXX We cannot allow transferHandler to
                   * kill us.
                   */
                  if (t instanceof ThreadDeath) {
                    throw (ThreadDeath) t;
                  } else {
                    logger.warn("An RTP packet may have not been" + " fully handled.", t);
                  }
                }
              }
            }
          }
        }
        rawPacketArrayPool.offer(pkts);
      }
    }
  }
예제 #23
0
  private void runOnDtlsTransport(StreamConnector connector) throws IOException {
    DtlsControlImpl dtlsControl = (DtlsControlImpl) getTransportManager().getDtlsControl(this);
    DtlsTransformEngine engine = dtlsControl.getTransformEngine();
    final DtlsPacketTransformer transformer = (DtlsPacketTransformer) engine.getRTPTransformer();

    byte[] receiveBuffer = new byte[SCTP_BUFFER_SIZE];

    if (LOG_SCTP_PACKETS) {
      System.setProperty(
          ConfigurationService.PNAME_SC_HOME_DIR_LOCATION, System.getProperty("java.io.tmpdir"));
      System.setProperty(
          ConfigurationService.PNAME_SC_HOME_DIR_NAME, SctpConnection.class.getName());
    }

    synchronized (this) {
      // FIXME local SCTP port is hardcoded in bridge offer SDP (Jitsi
      // Meet)
      sctpSocket = Sctp.createSocket(5000);
      assocIsUp = false;
      acceptedIncomingConnection = false;
    }

    // Implement output network link for SCTP stack on DTLS transport
    sctpSocket.setLink(
        new NetworkLink() {
          @Override
          public void onConnOut(SctpSocket s, byte[] packet) throws IOException {
            if (LOG_SCTP_PACKETS) {
              LibJitsi.getPacketLoggingService()
                  .logPacket(
                      PacketLoggingService.ProtocolName.ICE4J,
                      new byte[] {0, 0, 0, (byte) debugId},
                      5000,
                      new byte[] {0, 0, 0, (byte) (debugId + 1)},
                      remoteSctpPort,
                      PacketLoggingService.TransportName.UDP,
                      true,
                      packet);
            }

            // Send through DTLS transport
            transformer.sendApplicationData(packet, 0, packet.length);
          }
        });

    if (logger.isDebugEnabled()) {
      logger.debug("Connecting SCTP to port: " + remoteSctpPort + " to " + getEndpoint().getID());
    }

    sctpSocket.setNotificationListener(this);
    sctpSocket.listen();

    // FIXME manage threads
    threadPool.execute(
        new Runnable() {
          @Override
          public void run() {
            SctpSocket sctpSocket = null;
            try {
              // sctpSocket is set to null on close
              sctpSocket = SctpConnection.this.sctpSocket;
              while (sctpSocket != null) {
                if (sctpSocket.accept()) {
                  acceptedIncomingConnection = true;
                  break;
                }
                Thread.sleep(100);
                sctpSocket = SctpConnection.this.sctpSocket;
              }
              if (isReady()) {
                notifySctpConnectionReady();
              }
            } catch (Exception e) {
              logger.error("Error accepting SCTP connection", e);
            }

            if (sctpSocket == null && logger.isInfoEnabled()) {
              logger.info(
                  "SctpConnection " + getID() + " closed" + " before SctpSocket accept()-ed.");
            }
          }
        });

    // Notify that from now on SCTP connection is considered functional
    sctpSocket.setDataCallback(this);

    // Setup iceSocket
    DatagramSocket datagramSocket = connector.getDataSocket();
    if (datagramSocket != null) {
      this.iceSocket = new IceUdpSocketWrapper(datagramSocket);
    } else {
      this.iceSocket = new IceTcpSocketWrapper(connector.getDataTCPSocket());
    }

    DatagramPacket rcvPacket = new DatagramPacket(receiveBuffer, 0, receiveBuffer.length);

    // Receive loop, breaks when SCTP socket is closed
    try {
      do {
        iceSocket.receive(rcvPacket);

        RawPacket raw =
            new RawPacket(rcvPacket.getData(), rcvPacket.getOffset(), rcvPacket.getLength());

        raw = transformer.reverseTransform(raw);
        // Check for app data
        if (raw == null) continue;

        if (LOG_SCTP_PACKETS) {
          LibJitsi.getPacketLoggingService()
              .logPacket(
                  PacketLoggingService.ProtocolName.ICE4J,
                  new byte[] {0, 0, 0, (byte) (debugId + 1)},
                  remoteSctpPort,
                  new byte[] {0, 0, 0, (byte) debugId},
                  5000,
                  PacketLoggingService.TransportName.UDP,
                  false,
                  raw.getBuffer(),
                  raw.getOffset(),
                  raw.getLength());
        }

        // Pass network packet to SCTP stack
        sctpSocket.onConnIn(raw.getBuffer(), raw.getOffset(), raw.getLength());
      } while (true);
    } finally {
      // Eventually, close the socket although it should happen from
      // expire().
      synchronized (this) {
        assocIsUp = false;
        acceptedIncomingConnection = false;
        if (sctpSocket != null) {
          sctpSocket.close();
          sctpSocket = null;
        }
      }
    }
  }
예제 #24
0
  /**
   * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}.
   *
   * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s.
   */
  @Override
  public void update(ReceiveStreamEvent event) {
    if (event == null) return;
    ReceiveStream receiveStream = event.getReceiveStream();

    if (event instanceof NewReceiveStreamEvent) {
      if (receiveStream == null) {
        logger.warn("NewReceiveStreamEvent: null");
        return;
      }

      final long ssrc = getReceiveStreamSSRC(receiveStream);

      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc);

      if (receiveStreamDesc != null) {
        String s = "NewReceiveStreamEvent for an existing SSRC. ";
        if (receiveStream != receiveStreamDesc.receiveStream)
          s += "(but different ReceiveStream object)";
        logger.warn(s);
        return;
      } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream);

      if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc);

      // Find the format of the ReceiveStream
      DataSource dataSource = receiveStream.getDataSource();
      if (dataSource instanceof PushBufferDataSource) {
        Format format = null;
        PushBufferDataSource pbds = (PushBufferDataSource) dataSource;
        for (PushBufferStream pbs : pbds.getStreams()) {
          if ((format = pbs.getFormat()) != null) break;
        }

        if (format == null) {
          logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format");
          return;
        }

        receiveStreamDesc.format = format;
      } else {
        logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource");
        return;
      }

      int rtpClockRate = -1;
      if (receiveStreamDesc.format instanceof AudioFormat)
        rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate();
      else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000;
      getSynchronizer().setRtpClockRate(ssrc, rtpClockRate);

      // create a Processor and configure it
      Processor processor = null;
      try {
        processor = Manager.createProcessor(receiveStream.getDataSource());
      } catch (NoProcessorException npe) {
        logger.error("Failed to create Processor: ", npe);
        return;
      } catch (IOException ioe) {
        logger.error("Failed to create Processor: ", ioe);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc);

      processor.addControllerListener(this);
      receiveStreamDesc.processor = processor;

      final int streamCount;
      synchronized (receiveStreams) {
        receiveStreams.add(receiveStreamDesc);
        streamCount = receiveStreams.size();
      }

      /*
       * XXX TODO IRBABOON
       * This is a terrible hack which works around a failure to realize()
       * some of the Processor-s for audio streams, when multiple streams
       * start nearly simultaneously. The cause of the problem is currently
       * unknown (and synchronizing all FMJ calls in RecorderRtpImpl
       * does not help).
       * XXX TODO NOOBABRI
       */
      if (receiveStreamDesc.format instanceof AudioFormat) {
        final Processor p = processor;
        new Thread() {
          @Override
          public void run() {
            // delay configuring the processors for the different
            // audio streams to decrease the probability that they
            // run together.
            try {
              int ms = 450 * (streamCount - 1);
              logger.warn(
                  "Sleeping for "
                      + ms
                      + "ms before"
                      + " configuring processor for SSRC="
                      + ssrc
                      + " "
                      + System.currentTimeMillis());
              Thread.sleep(ms);
            } catch (Exception e) {
            }

            p.configure();
          }
        }.run();
      } else {
        processor.configure();
      }
    } else if (event instanceof TimeoutEvent) {
      if (receiveStream == null) {
        // TODO: we might want to get the list of ReceiveStream-s from
        // rtpManager and compare it to our list, to see if we should
        // remove a stream.
        logger.warn("TimeoutEvent: null.");
        return;
      }

      // FMJ silently creates new ReceiveStream instances, so we have to
      // recognize them by the SSRC.
      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream));
      if (receiveStreamDesc != null) {
        if (logger.isInfoEnabled()) {
          logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc);
        }

        removeReceiveStream(receiveStreamDesc, true);
      }
    } else if (event != null && logger.isInfoEnabled()) {
      logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event);
    }
  }
예제 #25
0
  /**
   * Enables or disables this <tt>RTPConnectorInputStream</tt>. While the stream is disabled, it
   * does not accept any packets.
   *
   * @param enabled <tt>true</tt> to enable, <tt>false</tt> to disable.
   */
  public void setEnabled(boolean enabled) {
    if (logger.isDebugEnabled()) logger.debug("setEnabled: " + enabled);

    this.enabled = enabled;
  }
예제 #26
0
/**
 * Implements {@link PacketTransformer} for DTLS-SRTP. It's capable of working in pure DTLS mode if
 * appropriate flag was set in <tt>DtlsControlImpl</tt>.
 *
 * @author Lyubomir Marinov
 */
public class DtlsPacketTransformer extends SinglePacketTransformer {
  private static final long CONNECT_RETRY_INTERVAL = 500;

  /**
   * The maximum number of times that {@link #runInConnectThread(DTLSProtocol, TlsPeer,
   * DatagramTransport)} is to retry the invocations of {@link DTLSClientProtocol#connect(TlsClient,
   * DatagramTransport)} and {@link DTLSServerProtocol#accept(TlsServer, DatagramTransport)} in
   * anticipation of a successful connection.
   */
  private static final int CONNECT_TRIES = 3;

  /**
   * The indicator which determines whether unencrypted packets sent or received through
   * <tt>DtlsPacketTransformer</tt> are to be dropped. The default value is <tt>false</tt>.
   *
   * @see #DROP_UNENCRYPTED_PKTS_PNAME
   */
  private static final boolean DROP_UNENCRYPTED_PKTS;

  /**
   * The name of the <tt>ConfigurationService</tt> and/or <tt>System</tt> property which indicates
   * whether unencrypted packets sent or received through <tt>DtlsPacketTransformer</tt> are to be
   * dropped. The default value is <tt>false</tt>.
   */
  private static final String DROP_UNENCRYPTED_PKTS_PNAME =
      DtlsPacketTransformer.class.getName() + ".dropUnencryptedPkts";

  /** The length of the header of a DTLS record. */
  static final int DTLS_RECORD_HEADER_LENGTH = 13;

  /**
   * The number of milliseconds a <tt>DtlsPacketTransform</tt> is to wait on its {@link
   * #dtlsTransport} in order to receive a packet.
   */
  private static final int DTLS_TRANSPORT_RECEIVE_WAITMILLIS = -1;

  /**
   * The <tt>Logger</tt> used by the <tt>DtlsPacketTransformer</tt> class and its instances to print
   * debug information.
   */
  private static final Logger logger = Logger.getLogger(DtlsPacketTransformer.class);

  static {
    ConfigurationService cfg = LibJitsi.getConfigurationService();
    boolean dropUnencryptedPkts = false;

    if (cfg == null) {
      String s = System.getProperty(DROP_UNENCRYPTED_PKTS_PNAME);

      if (s != null) dropUnencryptedPkts = Boolean.parseBoolean(s);
    } else {
      dropUnencryptedPkts = cfg.getBoolean(DROP_UNENCRYPTED_PKTS_PNAME, dropUnencryptedPkts);
    }
    DROP_UNENCRYPTED_PKTS = dropUnencryptedPkts;
  }

  /**
   * Determines whether a specific array of <tt>byte</tt>s appears to contain a DTLS record.
   *
   * @param buf the array of <tt>byte</tt>s to be analyzed
   * @param off the offset within <tt>buf</tt> at which the analysis is to start
   * @param len the number of bytes within <tt>buf</tt> starting at <tt>off</tt> to be analyzed
   * @return <tt>true</tt> if the specified <tt>buf</tt> appears to contain a DTLS record
   */
  public static boolean isDtlsRecord(byte[] buf, int off, int len) {
    boolean b = false;

    if (len >= DTLS_RECORD_HEADER_LENGTH) {
      short type = TlsUtils.readUint8(buf, off);

      switch (type) {
        case ContentType.alert:
        case ContentType.application_data:
        case ContentType.change_cipher_spec:
        case ContentType.handshake:
          int major = buf[off + 1] & 0xff;
          int minor = buf[off + 2] & 0xff;
          ProtocolVersion version = null;

          if ((major == ProtocolVersion.DTLSv10.getMajorVersion())
              && (minor == ProtocolVersion.DTLSv10.getMinorVersion())) {
            version = ProtocolVersion.DTLSv10;
          }
          if ((version == null)
              && (major == ProtocolVersion.DTLSv12.getMajorVersion())
              && (minor == ProtocolVersion.DTLSv12.getMinorVersion())) {
            version = ProtocolVersion.DTLSv12;
          }
          if (version != null) {
            int length = TlsUtils.readUint16(buf, off + 11);

            if (DTLS_RECORD_HEADER_LENGTH + length <= len) b = true;
          }
          break;
        default:
          // Unless a new ContentType has been defined by the Bouncy
          // Castle Crypto APIs, the specified buf does not represent a
          // DTLS record.
          break;
      }
    }
    return b;
  }

  /** The ID of the component which this instance works for/is associated with. */
  private final int componentID;

  /** The <tt>RTPConnector</tt> which uses this <tt>PacketTransformer</tt>. */
  private AbstractRTPConnector connector;

  /** The background <tt>Thread</tt> which initializes {@link #dtlsTransport}. */
  private Thread connectThread;

  /**
   * The <tt>DatagramTransport</tt> implementation which adapts {@link #connector} and this
   * <tt>PacketTransformer</tt> to the terms of the Bouncy Castle Crypto APIs.
   */
  private DatagramTransportImpl datagramTransport;

  /**
   * The <tt>DTLSTransport</tt> through which the actual packet transformations are being performed
   * by this instance.
   */
  private DTLSTransport dtlsTransport;

  /** The <tt>MediaType</tt> of the stream which this instance works for/is associated with. */
  private MediaType mediaType;

  /**
   * Whether rtcp-mux is in use.
   *
   * <p>If enabled, and this is the transformer for RTCP, it will not establish a DTLS session on
   * its own, but rather wait for the RTP transformer to do so, and reuse it to initialize the SRTP
   * transformer.
   */
  private boolean rtcpmux = false;

  /**
   * The value of the <tt>setup</tt> SDP attribute defined by RFC 4145 &quot;TCP-Based Media
   * Transport in the Session Description Protocol (SDP)&quot; which determines whether this
   * instance acts as a DTLS client or a DTLS server.
   */
  private DtlsControl.Setup setup;

  /** The {@code SRTPTransformer} (to be) used by this instance. */
  private SinglePacketTransformer _srtpTransformer;

  /**
   * The indicator which determines whether the <tt>TlsPeer</tt> employed by this
   * <tt>PacketTransformer</tt> has raised an <tt>AlertDescription.close_notify</tt>
   * <tt>AlertLevel.warning</tt> i.e. the remote DTLS peer has closed the write side of the
   * connection.
   */
  private boolean tlsPeerHasRaisedCloseNotifyWarning;

  /** The <tt>TransformEngine</tt> which has initialized this instance. */
  private final DtlsTransformEngine transformEngine;

  /**
   * Initializes a new <tt>DtlsPacketTransformer</tt> instance.
   *
   * @param transformEngine the <tt>TransformEngine</tt> which is initializing the new instance
   * @param componentID the ID of the component for which the new instance is to work
   */
  public DtlsPacketTransformer(DtlsTransformEngine transformEngine, int componentID) {
    this.transformEngine = transformEngine;
    this.componentID = componentID;
  }

  /** {@inheritDoc} */
  @Override
  public synchronized void close() {
    // SrtpControl.start(MediaType) starts its associated TransformEngine.
    // We will use that mediaType to signal the normal stop then as well
    // i.e. we will call setMediaType(null) first.
    setMediaType(null);
    setConnector(null);
  }

  /**
   * Closes {@link #datagramTransport} if it is non-<tt>null</tt> and logs and swallows any
   * <tt>IOException</tt>.
   */
  private void closeDatagramTransport() {
    if (datagramTransport != null) {
      try {
        datagramTransport.close();
      } catch (IOException ioe) {
        // DatagramTransportImpl has no reason to fail because it is
        // merely an adapter of #connector and this PacketTransformer to
        // the terms of the Bouncy Castle Crypto API.
        logger.error("Failed to (properly) close " + datagramTransport.getClass(), ioe);
      }
      datagramTransport = null;
    }
  }

  /**
   * Determines whether {@link #runInConnectThread(DTLSProtocol, TlsPeer, DatagramTransport)} is to
   * try to establish a DTLS connection.
   *
   * @param i the number of tries remaining after the current one
   * @param datagramTransport
   * @return <tt>true</tt> to try to establish a DTLS connection; otherwise, <tt>false</tt>
   */
  private boolean enterRunInConnectThreadLoop(int i, DatagramTransport datagramTransport) {
    if (i < 0 || i > CONNECT_TRIES) {
      return false;
    } else {
      Thread currentThread = Thread.currentThread();

      synchronized (this) {
        if (i > 0 && i < CONNECT_TRIES - 1) {
          boolean interrupted = false;

          try {
            wait(CONNECT_RETRY_INTERVAL);
          } catch (InterruptedException ie) {
            interrupted = true;
          }
          if (interrupted) currentThread.interrupt();
        }

        return currentThread.equals(this.connectThread)
            && datagramTransport.equals(this.datagramTransport);
      }
    }
  }

  /**
   * Gets the <tt>DtlsControl</tt> implementation associated with this instance.
   *
   * @return the <tt>DtlsControl</tt> implementation associated with this instance
   */
  DtlsControlImpl getDtlsControl() {
    return getTransformEngine().getDtlsControl();
  }

  /**
   * Gets the <tt>TransformEngine</tt> which has initialized this instance.
   *
   * @return the <tt>TransformEngine</tt> which has initialized this instance
   */
  DtlsTransformEngine getTransformEngine() {
    return transformEngine;
  }

  /**
   * Handles a specific <tt>IOException</tt> which was thrown during the execution of {@link
   * #runInConnectThread(DTLSProtocol, TlsPeer, DatagramTransport)} while trying to establish a DTLS
   * connection
   *
   * @param ioe the <tt>IOException</tt> to handle
   * @param msg the human-readable message to log about the specified <tt>ioe</tt>
   * @param i the number of tries remaining after the current one
   * @return <tt>true</tt> if the specified <tt>ioe</tt> was successfully handled; <tt>false</tt>,
   *     otherwise
   */
  private boolean handleRunInConnectThreadException(IOException ioe, String msg, int i) {
    // SrtpControl.start(MediaType) starts its associated TransformEngine.
    // We will use that mediaType to signal the normal stop then as well
    // i.e. we will ignore exception after the procedure to stop this
    // PacketTransformer has begun.
    if (mediaType == null) return false;

    if (ioe instanceof TlsFatalAlert) {
      TlsFatalAlert tfa = (TlsFatalAlert) ioe;
      short alertDescription = tfa.getAlertDescription();

      if (alertDescription == AlertDescription.unexpected_message) {
        msg += " Received fatal unexpected message.";
        if (i == 0
            || !Thread.currentThread().equals(connectThread)
            || connector == null
            || mediaType == null) {
          msg += " Giving up after " + (CONNECT_TRIES - i) + " retries.";
        } else {
          msg += " Will retry.";
          logger.error(msg, ioe);

          return true;
        }
      } else {
        msg += " Received fatal alert " + alertDescription + ".";
      }
    }

    logger.error(msg, ioe);
    return false;
  }

  /**
   * Tries to initialize {@link #_srtpTransformer} by using the <tt>DtlsPacketTransformer</tt> for
   * RTP.
   *
   * @return the (possibly updated) value of {@link #_srtpTransformer}.
   */
  private SinglePacketTransformer initializeSRTCPTransformerFromRtp() {
    DtlsPacketTransformer rtpTransformer =
        (DtlsPacketTransformer) getTransformEngine().getRTPTransformer();

    // Prevent recursion (that is pretty much impossible to ever happen).
    if (rtpTransformer != this) {
      PacketTransformer srtpTransformer = rtpTransformer.waitInitializeAndGetSRTPTransformer();

      if (srtpTransformer != null && srtpTransformer instanceof SRTPTransformer) {
        synchronized (this) {
          if (_srtpTransformer == null) {
            _srtpTransformer = new SRTCPTransformer((SRTPTransformer) srtpTransformer);
            // For the sake of completeness, we notify whenever we
            // assign to _srtpTransformer.
            notifyAll();
          }
        }
      }
    }

    return _srtpTransformer;
  }

  /**
   * Initializes a new <tt>SRTPTransformer</tt> instance with a specific (negotiated)
   * <tt>SRTPProtectionProfile</tt> and the keying material specified by a specific
   * <tt>TlsContext</tt>.
   *
   * @param srtpProtectionProfile the (negotiated) <tt>SRTPProtectionProfile</tt> to initialize the
   *     new instance with
   * @param tlsContext the <tt>TlsContext</tt> which represents the keying material
   * @return a new <tt>SRTPTransformer</tt> instance initialized with <tt>srtpProtectionProfile</tt>
   *     and <tt>tlsContext</tt>
   */
  private SinglePacketTransformer initializeSRTPTransformer(
      int srtpProtectionProfile, TlsContext tlsContext) {
    boolean rtcp;

    switch (componentID) {
      case Component.RTCP:
        rtcp = true;
        break;
      case Component.RTP:
        rtcp = false;
        break;
      default:
        throw new IllegalStateException("componentID");
    }

    int cipher_key_length;
    int cipher_salt_length;
    int cipher;
    int auth_function;
    int auth_key_length;
    int RTCP_auth_tag_length, RTP_auth_tag_length;

    switch (srtpProtectionProfile) {
      case SRTPProtectionProfile.SRTP_AES128_CM_HMAC_SHA1_32:
        cipher_key_length = 128 / 8;
        cipher_salt_length = 112 / 8;
        cipher = SRTPPolicy.AESCM_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = 80 / 8;
        RTP_auth_tag_length = 32 / 8;
        break;
      case SRTPProtectionProfile.SRTP_AES128_CM_HMAC_SHA1_80:
        cipher_key_length = 128 / 8;
        cipher_salt_length = 112 / 8;
        cipher = SRTPPolicy.AESCM_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = RTP_auth_tag_length = 80 / 8;
        break;
      case SRTPProtectionProfile.SRTP_NULL_HMAC_SHA1_32:
        cipher_key_length = 0;
        cipher_salt_length = 0;
        cipher = SRTPPolicy.NULL_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = 80 / 8;
        RTP_auth_tag_length = 32 / 8;
        break;
      case SRTPProtectionProfile.SRTP_NULL_HMAC_SHA1_80:
        cipher_key_length = 0;
        cipher_salt_length = 0;
        cipher = SRTPPolicy.NULL_ENCRYPTION;
        auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION;
        auth_key_length = 160 / 8;
        RTCP_auth_tag_length = RTP_auth_tag_length = 80 / 8;
        break;
      default:
        throw new IllegalArgumentException("srtpProtectionProfile");
    }

    byte[] keyingMaterial =
        tlsContext.exportKeyingMaterial(
            ExporterLabel.dtls_srtp, null, 2 * (cipher_key_length + cipher_salt_length));
    byte[] client_write_SRTP_master_key = new byte[cipher_key_length];
    byte[] server_write_SRTP_master_key = new byte[cipher_key_length];
    byte[] client_write_SRTP_master_salt = new byte[cipher_salt_length];
    byte[] server_write_SRTP_master_salt = new byte[cipher_salt_length];
    byte[][] keyingMaterialValues = {
      client_write_SRTP_master_key,
      server_write_SRTP_master_key,
      client_write_SRTP_master_salt,
      server_write_SRTP_master_salt
    };

    for (int i = 0, keyingMaterialOffset = 0; i < keyingMaterialValues.length; i++) {
      byte[] keyingMaterialValue = keyingMaterialValues[i];

      System.arraycopy(
          keyingMaterial, keyingMaterialOffset, keyingMaterialValue, 0, keyingMaterialValue.length);
      keyingMaterialOffset += keyingMaterialValue.length;
    }

    SRTPPolicy srtcpPolicy =
        new SRTPPolicy(
            cipher,
            cipher_key_length,
            auth_function,
            auth_key_length,
            RTCP_auth_tag_length,
            cipher_salt_length);
    SRTPPolicy srtpPolicy =
        new SRTPPolicy(
            cipher,
            cipher_key_length,
            auth_function,
            auth_key_length,
            RTP_auth_tag_length,
            cipher_salt_length);
    SRTPContextFactory clientSRTPContextFactory =
        new SRTPContextFactory(
            /* sender */ tlsContext instanceof TlsClientContext,
            client_write_SRTP_master_key,
            client_write_SRTP_master_salt,
            srtpPolicy,
            srtcpPolicy);
    SRTPContextFactory serverSRTPContextFactory =
        new SRTPContextFactory(
            /* sender */ tlsContext instanceof TlsServerContext,
            server_write_SRTP_master_key,
            server_write_SRTP_master_salt,
            srtpPolicy,
            srtcpPolicy);
    SRTPContextFactory forwardSRTPContextFactory;
    SRTPContextFactory reverseSRTPContextFactory;

    if (tlsContext instanceof TlsClientContext) {
      forwardSRTPContextFactory = clientSRTPContextFactory;
      reverseSRTPContextFactory = serverSRTPContextFactory;
    } else if (tlsContext instanceof TlsServerContext) {
      forwardSRTPContextFactory = serverSRTPContextFactory;
      reverseSRTPContextFactory = clientSRTPContextFactory;
    } else {
      throw new IllegalArgumentException("tlsContext");
    }

    SinglePacketTransformer srtpTransformer;

    if (rtcp) {
      srtpTransformer = new SRTCPTransformer(forwardSRTPContextFactory, reverseSRTPContextFactory);
    } else {
      srtpTransformer = new SRTPTransformer(forwardSRTPContextFactory, reverseSRTPContextFactory);
    }
    return srtpTransformer;
  }

  /**
   * Notifies this instance that the DTLS record layer associated with a specific <tt>TlsPeer</tt>
   * has raised an alert.
   *
   * @param tlsPeer the <tt>TlsPeer</tt> whose associated DTLS record layer has raised an alert
   * @param alertLevel {@link AlertLevel}
   * @param alertDescription {@link AlertDescription}
   * @param message a human-readable message explaining what caused the alert. May be <tt>null</tt>.
   * @param cause the exception that caused the alert to be raised. May be <tt>null</tt>.
   */
  void notifyAlertRaised(
      TlsPeer tlsPeer, short alertLevel, short alertDescription, String message, Exception cause) {
    if (AlertLevel.warning == alertLevel && AlertDescription.close_notify == alertDescription) {
      tlsPeerHasRaisedCloseNotifyWarning = true;
    }
  }

  /** {@inheritDoc} */
  @Override
  public RawPacket reverseTransform(RawPacket pkt) {
    byte[] buf = pkt.getBuffer();
    int off = pkt.getOffset();
    int len = pkt.getLength();

    if (isDtlsRecord(buf, off, len)) {
      if (rtcpmux && Component.RTCP == componentID) {
        // This should never happen.
        logger.warn(
            "Dropping a DTLS record, because it was received on the"
                + " RTCP channel while rtcpmux is in use.");
        return null;
      }

      boolean receive;

      synchronized (this) {
        if (datagramTransport == null) {
          receive = false;
        } else {
          datagramTransport.queueReceive(buf, off, len);
          receive = true;
        }
      }
      if (receive) {
        DTLSTransport dtlsTransport = this.dtlsTransport;

        if (dtlsTransport == null) {
          // The specified pkt looks like a DTLS record and it has
          // been consumed for the purposes of the secure channel
          // represented by this PacketTransformer.
          pkt = null;
        } else {
          try {
            int receiveLimit = dtlsTransport.getReceiveLimit();
            int delta = receiveLimit - len;

            if (delta > 0) {
              pkt.grow(delta);
              buf = pkt.getBuffer();
              off = pkt.getOffset();
              len = pkt.getLength();
            } else if (delta < 0) {
              pkt.shrink(-delta);
              buf = pkt.getBuffer();
              off = pkt.getOffset();
              len = pkt.getLength();
            }

            int received = dtlsTransport.receive(buf, off, len, DTLS_TRANSPORT_RECEIVE_WAITMILLIS);

            if (received <= 0) {
              // No application data was decoded.
              pkt = null;
            } else {
              delta = len - received;
              if (delta > 0) pkt.shrink(delta);
            }
          } catch (IOException ioe) {
            pkt = null;
            // SrtpControl.start(MediaType) starts its associated
            // TransformEngine. We will use that mediaType to signal
            // the normal stop then as well i.e. we will ignore
            // exception after the procedure to stop this
            // PacketTransformer has begun.
            if (mediaType != null && !tlsPeerHasRaisedCloseNotifyWarning) {
              logger.error("Failed to decode a DTLS record!", ioe);
            }
          }
        }
      } else {
        // The specified pkt looks like a DTLS record but it is
        // unexpected in the current state of the secure channel
        // represented by this PacketTransformer. This PacketTransformer
        // has not been started (successfully) or has been closed.
        pkt = null;
      }
    } else if (transformEngine.isSrtpDisabled()) {
      // In pure DTLS mode only DTLS records pass through.
      pkt = null;
    } else {
      // DTLS-SRTP has not been initialized yet or has failed to
      // initialize.
      SinglePacketTransformer srtpTransformer = waitInitializeAndGetSRTPTransformer();

      if (srtpTransformer != null) pkt = srtpTransformer.reverseTransform(pkt);
      else if (DROP_UNENCRYPTED_PKTS) pkt = null;
      // XXX Else, it is our explicit policy to let the received packet
      // pass through and rely on the SrtpListener to notify the user that
      // the session is not secured.
    }
    return pkt;
  }

  /**
   * Runs in {@link #connectThread} to initialize {@link #dtlsTransport}.
   *
   * @param dtlsProtocol
   * @param tlsPeer
   * @param datagramTransport
   */
  private void runInConnectThread(
      DTLSProtocol dtlsProtocol, TlsPeer tlsPeer, DatagramTransport datagramTransport) {
    DTLSTransport dtlsTransport = null;
    final boolean srtp = !transformEngine.isSrtpDisabled();
    int srtpProtectionProfile = 0;
    TlsContext tlsContext = null;

    // DTLS client
    if (dtlsProtocol instanceof DTLSClientProtocol) {
      DTLSClientProtocol dtlsClientProtocol = (DTLSClientProtocol) dtlsProtocol;
      TlsClientImpl tlsClient = (TlsClientImpl) tlsPeer;

      for (int i = CONNECT_TRIES - 1; i >= 0; i--) {
        if (!enterRunInConnectThreadLoop(i, datagramTransport)) break;
        try {
          dtlsTransport = dtlsClientProtocol.connect(tlsClient, datagramTransport);
          break;
        } catch (IOException ioe) {
          if (!handleRunInConnectThreadException(
              ioe, "Failed to connect this DTLS client to a DTLS" + " server!", i)) {
            break;
          }
        }
      }
      if (dtlsTransport != null && srtp) {
        srtpProtectionProfile = tlsClient.getChosenProtectionProfile();
        tlsContext = tlsClient.getContext();
      }
    }
    // DTLS server
    else if (dtlsProtocol instanceof DTLSServerProtocol) {
      DTLSServerProtocol dtlsServerProtocol = (DTLSServerProtocol) dtlsProtocol;
      TlsServerImpl tlsServer = (TlsServerImpl) tlsPeer;

      for (int i = CONNECT_TRIES - 1; i >= 0; i--) {
        if (!enterRunInConnectThreadLoop(i, datagramTransport)) break;
        try {
          dtlsTransport = dtlsServerProtocol.accept(tlsServer, datagramTransport);
          break;
        } catch (IOException ioe) {
          if (!handleRunInConnectThreadException(
              ioe, "Failed to accept a connection from a DTLS client!", i)) {
            break;
          }
        }
      }
      if (dtlsTransport != null && srtp) {
        srtpProtectionProfile = tlsServer.getChosenProtectionProfile();
        tlsContext = tlsServer.getContext();
      }
    } else {
      // It MUST be either a DTLS client or a DTLS server.
      throw new IllegalStateException("dtlsProtocol");
    }

    SinglePacketTransformer srtpTransformer =
        (dtlsTransport == null || !srtp)
            ? null
            : initializeSRTPTransformer(srtpProtectionProfile, tlsContext);
    boolean closeSRTPTransformer;

    synchronized (this) {
      if (Thread.currentThread().equals(this.connectThread)
          && datagramTransport.equals(this.datagramTransport)) {
        this.dtlsTransport = dtlsTransport;
        _srtpTransformer = srtpTransformer;
        notifyAll();
      }
      closeSRTPTransformer = (_srtpTransformer != srtpTransformer);
    }
    if (closeSRTPTransformer && srtpTransformer != null) srtpTransformer.close();
  }

  /**
   * Sends the data contained in a specific byte array as application data through the DTLS
   * connection of this <tt>DtlsPacketTransformer</tt>.
   *
   * @param buf the byte array containing data to send.
   * @param off the offset in <tt>buf</tt> where the data begins.
   * @param len the length of data to send.
   */
  public void sendApplicationData(byte[] buf, int off, int len) {
    DTLSTransport dtlsTransport = this.dtlsTransport;
    Throwable throwable = null;

    if (dtlsTransport != null) {
      try {
        dtlsTransport.send(buf, off, len);
      } catch (IOException ioe) {
        throwable = ioe;
      }
    } else {
      throwable = new NullPointerException("dtlsTransport");
    }
    if (throwable != null) {
      // SrtpControl.start(MediaType) starts its associated
      // TransformEngine. We will use that mediaType to signal the normal
      // stop then as well i.e. we will ignore exception after the
      // procedure to stop this PacketTransformer has begun.
      if (mediaType != null && !tlsPeerHasRaisedCloseNotifyWarning) {
        logger.error("Failed to send application data over DTLS transport: ", throwable);
      }
    }
  }

  /**
   * Sets the <tt>RTPConnector</tt> which is to use or uses this <tt>PacketTransformer</tt>.
   *
   * @param connector the <tt>RTPConnector</tt> which is to use or uses this
   *     <tt>PacketTransformer</tt>
   */
  void setConnector(AbstractRTPConnector connector) {
    if (this.connector != connector) {
      this.connector = connector;

      DatagramTransportImpl datagramTransport = this.datagramTransport;

      if (datagramTransport != null) datagramTransport.setConnector(connector);
    }
  }

  /**
   * Sets the <tt>MediaType</tt> of the stream which this instance is to work for/be associated
   * with.
   *
   * @param mediaType the <tt>MediaType</tt> of the stream which this instance is to work for/be
   *     associated with
   */
  synchronized void setMediaType(MediaType mediaType) {
    if (this.mediaType != mediaType) {
      MediaType oldValue = this.mediaType;

      this.mediaType = mediaType;

      if (oldValue != null) stop();
      if (this.mediaType != null) start();
    }
  }

  /**
   * Enables/disables rtcp-mux.
   *
   * @param rtcpmux whether to enable or disable.
   */
  void setRtcpmux(boolean rtcpmux) {
    this.rtcpmux = rtcpmux;
  }

  /**
   * Sets the DTLS protocol according to which this <tt>DtlsPacketTransformer</tt> is to act either
   * as a DTLS server or a DTLS client.
   *
   * @param setup the value of the <tt>setup</tt> SDP attribute to set on this instance in order to
   *     determine whether this instance is to act as a DTLS client or a DTLS server
   */
  void setSetup(DtlsControl.Setup setup) {
    if (this.setup != setup) this.setup = setup;
  }

  /** Starts this <tt>PacketTransformer</tt>. */
  private synchronized void start() {
    if (this.datagramTransport != null) {
      if (this.connectThread == null && dtlsTransport == null) {
        logger.warn(
            getClass().getName()
                + " has been started but has failed to establish"
                + " the DTLS connection!");
      }
      return;
    }

    if (rtcpmux && Component.RTCP == componentID) {
      // In the case of rtcp-mux, the RTCP transformer does not create
      // a DTLS session. The SRTP context (_srtpTransformer) will be
      // initialized on demand using initializeSRTCPTransformerFromRtp().
      return;
    }

    AbstractRTPConnector connector = this.connector;

    if (connector == null) throw new NullPointerException("connector");

    DtlsControl.Setup setup = this.setup;
    SecureRandom secureRandom = DtlsControlImpl.createSecureRandom();
    final DTLSProtocol dtlsProtocolObj;
    final TlsPeer tlsPeer;

    if (DtlsControl.Setup.ACTIVE.equals(setup)) {
      dtlsProtocolObj = new DTLSClientProtocol(secureRandom);
      tlsPeer = new TlsClientImpl(this);
    } else {
      dtlsProtocolObj = new DTLSServerProtocol(secureRandom);
      tlsPeer = new TlsServerImpl(this);
    }
    tlsPeerHasRaisedCloseNotifyWarning = false;

    final DatagramTransportImpl datagramTransport = new DatagramTransportImpl(componentID);

    datagramTransport.setConnector(connector);

    Thread connectThread =
        new Thread() {
          @Override
          public void run() {
            try {
              runInConnectThread(dtlsProtocolObj, tlsPeer, datagramTransport);
            } finally {
              if (Thread.currentThread().equals(DtlsPacketTransformer.this.connectThread)) {
                DtlsPacketTransformer.this.connectThread = null;
              }
            }
          }
        };

    connectThread.setDaemon(true);
    connectThread.setName(DtlsPacketTransformer.class.getName() + ".connectThread");

    this.connectThread = connectThread;
    this.datagramTransport = datagramTransport;

    boolean started = false;

    try {
      connectThread.start();
      started = true;
    } finally {
      if (!started) {
        if (connectThread.equals(this.connectThread)) this.connectThread = null;
        if (datagramTransport.equals(this.datagramTransport)) this.datagramTransport = null;
      }
    }

    notifyAll();
  }

  /** Stops this <tt>PacketTransformer</tt>. */
  private synchronized void stop() {
    if (connectThread != null) connectThread = null;
    try {
      // The dtlsTransport and _srtpTransformer SHOULD be closed, of
      // course. The datagramTransport MUST be closed.
      if (dtlsTransport != null) {
        try {
          dtlsTransport.close();
        } catch (IOException ioe) {
          logger.error("Failed to (properly) close " + dtlsTransport.getClass(), ioe);
        }
        dtlsTransport = null;
      }
      if (_srtpTransformer != null) {
        _srtpTransformer.close();
        _srtpTransformer = null;
      }
    } finally {
      try {
        closeDatagramTransport();
      } finally {
        notifyAll();
      }
    }
  }

  /** {@inheritDoc} */
  @Override
  public RawPacket transform(RawPacket pkt) {
    byte[] buf = pkt.getBuffer();
    int off = pkt.getOffset();
    int len = pkt.getLength();

    // If the specified pkt represents a DTLS record, then it should pass
    // through this PacketTransformer (e.g. it has been sent through
    // DatagramTransportImpl).
    if (isDtlsRecord(buf, off, len)) return pkt;

    // SRTP
    if (!transformEngine.isSrtpDisabled()) {
      // DTLS-SRTP has not been initialized yet or has failed to
      // initialize.
      SinglePacketTransformer srtpTransformer = waitInitializeAndGetSRTPTransformer();

      if (srtpTransformer != null) pkt = srtpTransformer.transform(pkt);
      else if (DROP_UNENCRYPTED_PKTS) pkt = null;
      // XXX Else, it is our explicit policy to let the received packet
      // pass through and rely on the SrtpListener to notify the user that
      // the session is not secured.
    }
    // Pure/non-SRTP DTLS
    else {
      // The specified pkt will pass through this PacketTransformer only
      // if it gets transformed into a DTLS record.
      pkt = null;

      sendApplicationData(buf, off, len);
    }
    return pkt;
  }

  /**
   * Gets the {@code SRTPTransformer} used by this instance. If {@link #_srtpTransformer} does not
   * exist (yet) and the state of this instance indicates that its initialization is in progess,
   * then blocks until {@code _srtpTransformer} is initialized and returns it.
   *
   * @return the {@code SRTPTransformer} used by this instance
   */
  private SinglePacketTransformer waitInitializeAndGetSRTPTransformer() {
    SinglePacketTransformer srtpTransformer = _srtpTransformer;

    if (srtpTransformer != null) return srtpTransformer;

    if (rtcpmux && Component.RTCP == componentID) return initializeSRTCPTransformerFromRtp();

    // XXX It is our explicit policy to rely on the SrtpListener to notify
    // the user that the session is not secure. Unfortunately, (1) the
    // SrtpListener is not supported by this DTLS SrtpControl implementation
    // and (2) encrypted packets may arrive soon enough to be let through
    // while _srtpTransformer is still initializing. Consequently, we will
    // block and wait for _srtpTransformer to initialize.
    boolean interrupted = false;

    try {
      synchronized (this) {
        do {
          srtpTransformer = _srtpTransformer;
          if (srtpTransformer != null) break; // _srtpTransformer is initialized

          if (connectThread == null) {
            // Though _srtpTransformer is NOT initialized, there is
            // no point in waiting because there is no one to
            // initialize it.
            break;
          }

          try {
            // It does not really matter (enough) how much we wait
            // here because we wait in a loop.
            long timeout = CONNECT_TRIES * CONNECT_RETRY_INTERVAL;

            wait(timeout);
          } catch (InterruptedException ie) {
            interrupted = true;
          }
        } while (true);
      }
    } finally {
      if (interrupted) Thread.currentThread().interrupt();
    }

    return srtpTransformer;
  }
}
예제 #27
0
/**
 * A <tt>Recorder</tt> implementation which attaches to an <tt>RTPTranslator</tt>.
 *
 * @author Vladimir Marinov
 * @author Boris Grozev
 */
public class RecorderRtpImpl
    implements Recorder, ReceiveStreamListener, ActiveSpeakerChangedListener, ControllerListener {
  /**
   * The <tt>Logger</tt> used by the <tt>RecorderRtpImpl</tt> class and its instances for logging
   * output.
   */
  private static final Logger logger = Logger.getLogger(RecorderRtpImpl.class);

  // values hard-coded to match chrome
  // TODO: allow to set them dynamically
  private static final byte redPayloadType = 116;
  private static final byte ulpfecPayloadType = 117;
  private static final byte vp8PayloadType = 100;
  private static final byte opusPayloadType = 111;
  private static final Format redFormat = new VideoFormat(Constants.RED);
  private static final Format ulpfecFormat = new VideoFormat(Constants.ULPFEC);
  private static final Format vp8RtpFormat = new VideoFormat(Constants.VP8_RTP);
  private static final Format vp8Format = new VideoFormat(Constants.VP8);
  private static final Format opusFormat =
      new AudioFormat(Constants.OPUS_RTP, 48000, Format.NOT_SPECIFIED, Format.NOT_SPECIFIED);

  private static final int FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE = 300;

  /** The <tt>ContentDescriptor</tt> to use when saving audio. */
  private static final ContentDescriptor AUDIO_CONTENT_DESCRIPTOR =
      new ContentDescriptor(FileTypeDescriptor.MPEG_AUDIO);

  /** The suffix for audio file names. */
  private static final String AUDIO_FILENAME_SUFFIX = ".mp3";

  /** The suffix for video file names. */
  private static final String VIDEO_FILENAME_SUFFIX = ".webm";

  static {
    Registry.set("video_jitter_buffer_MIN_SIZE", FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE);
  }

  /** The <tt>RTPTranslator</tt> that this recorder is/will be attached to. */
  private RTPTranslatorImpl translator;

  /**
   * The custom <tt>RTPConnector</tt> that this instance uses to read from {@link #translator} and
   * write to {@link #rtpManager}.
   */
  private RTPConnectorImpl rtpConnector;

  /** Path to the directory where the output files will be stored. */
  private String path;

  /** The <tt>RTCPFeedbackMessageSender</tt> that we use to send RTCP FIR messages. */
  private RTCPFeedbackMessageSender rtcpFeedbackSender;

  /**
   * The {@link RTPManager} instance we use to handle the packets coming from
   * <tt>RTPTranslator</tt>.
   */
  private RTPManager rtpManager;

  /**
   * The instance which should be notified when events related to recordings (such as the start or
   * end of a recording) occur.
   */
  private RecorderEventHandlerImpl eventHandler;

  /**
   * Holds the <tt>ReceiveStreams</tt> added to this instance by {@link #rtpManager} and additional
   * information associated with each one (e.g. the <tt>Processor</tt>, if any, used for it).
   */
  private final HashSet<ReceiveStreamDesc> receiveStreams = new HashSet<ReceiveStreamDesc>();

  private final Set<Long> activeVideoSsrcs = new HashSet<Long>();

  /**
   * The <tt>ActiveSpeakerDetector</tt> which will listen to the audio receive streams of this
   * <tt>RecorderRtpImpl</tt> and notify it about changes to the active speaker via calls to {@link
   * #activeSpeakerChanged(long)}
   */
  private ActiveSpeakerDetector activeSpeakerDetector = null;

  StreamRTPManager streamRTPManager;

  private SynchronizerImpl synchronizer;
  private boolean started = false;

  /**
   * Constructor.
   *
   * @param translator the <tt>RTPTranslator</tt> to which this instance will attach in order to
   *     record media.
   */
  public RecorderRtpImpl(RTPTranslator translator) {
    this.translator = (RTPTranslatorImpl) translator;
    activeSpeakerDetector = new ActiveSpeakerDetectorImpl();
    activeSpeakerDetector.addActiveSpeakerChangedListener(this);
  }

  /** Implements {@link Recorder#addListener(Recorder.Listener)}. */
  @Override
  public void addListener(Listener listener) {}

  /** Implements {@link Recorder#removeListener(Recorder.Listener)}. */
  @Override
  public void removeListener(Listener listener) {}

  /** Implements {@link Recorder#getSupportedFormats()}. */
  @Override
  public List<String> getSupportedFormats() {
    return null;
  }

  /** Implements {@link Recorder#setMute(boolean)}. */
  @Override
  public void setMute(boolean mute) {}

  /**
   * Implements {@link Recorder#getFilename()}. Returns null, since we don't have a (single)
   * associated filename.
   */
  @Override
  public String getFilename() {
    return null;
  }

  /**
   * Sets the instance which should be notified when events related to recordings (such as the start
   * or end of a recording) occur.
   */
  public void setEventHandler(RecorderEventHandler eventHandler) {
    if (this.eventHandler == null
        || (this.eventHandler != eventHandler && this.eventHandler.handler != eventHandler)) {
      if (this.eventHandler == null) this.eventHandler = new RecorderEventHandlerImpl(eventHandler);
      else this.eventHandler.handler = eventHandler;
    }
  }

  /**
   * {@inheritDoc}
   *
   * @param format unused, since this implementation records multiple streams using potentially
   *     different formats.
   * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the
   *     recorded media files.
   */
  @Override
  public void start(String format, String dirname) throws IOException, MediaException {
    if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode());
    path = dirname;

    MediaService mediaService = LibJitsi.getMediaService();

    /*
     * Note that we use only one RTPConnector for both the RTPTranslator
     * and the RTPManager instances. The this.translator will write to its
     * output streams, and this.rtpManager will read from its input streams.
     */
    rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType);

    rtpManager = RTPManager.newInstance();

    /*
     * Add the formats that we know about.
     */
    rtpManager.addFormat(vp8RtpFormat, vp8PayloadType);
    rtpManager.addFormat(opusFormat, opusPayloadType);
    rtpManager.addReceiveStreamListener(this);

    /*
     * Note: When this.rtpManager sends RTCP sender/receiver reports, they
     * will end up being written to its own input stream. This is not
     * expected to cause problems, but might be something to keep an eye on.
     */
    rtpManager.initialize(rtpConnector);

    /*
     * Register a fake call participant.
     * TODO: can we use a more generic MediaStream here?
     */
    streamRTPManager =
        new StreamRTPManager(
            mediaService.createMediaStream(
                new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)),
            translator);

    streamRTPManager.initialize(rtpConnector);

    rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender();

    translator.addFormat(streamRTPManager, opusFormat, opusPayloadType);

    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat,
    // redPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat,
    // ulpfecPayloadType);
    // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager,
    // mediaFormatImpl.getFormat(), vp8PayloadType);

    started = true;
  }

  @Override
  public void stop() {
    if (started) {
      if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode());

      // remove the recorder from the translator (e.g. stop new packets from
      // being written to rtpConnector
      if (streamRTPManager != null) streamRTPManager.dispose();

      HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>();
      synchronized (receiveStreams) {
        streamsToRemove.addAll(receiveStreams);
      }

      for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false);

      rtpConnector.rtcpPacketTransformer.close();
      rtpConnector.rtpPacketTransformer.close();
      rtpManager.dispose();

      started = false;
    }
  }

  /**
   * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}.
   *
   * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s.
   */
  @Override
  public void update(ReceiveStreamEvent event) {
    if (event == null) return;
    ReceiveStream receiveStream = event.getReceiveStream();

    if (event instanceof NewReceiveStreamEvent) {
      if (receiveStream == null) {
        logger.warn("NewReceiveStreamEvent: null");
        return;
      }

      final long ssrc = getReceiveStreamSSRC(receiveStream);

      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc);

      if (receiveStreamDesc != null) {
        String s = "NewReceiveStreamEvent for an existing SSRC. ";
        if (receiveStream != receiveStreamDesc.receiveStream)
          s += "(but different ReceiveStream object)";
        logger.warn(s);
        return;
      } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream);

      if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc);

      // Find the format of the ReceiveStream
      DataSource dataSource = receiveStream.getDataSource();
      if (dataSource instanceof PushBufferDataSource) {
        Format format = null;
        PushBufferDataSource pbds = (PushBufferDataSource) dataSource;
        for (PushBufferStream pbs : pbds.getStreams()) {
          if ((format = pbs.getFormat()) != null) break;
        }

        if (format == null) {
          logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format");
          return;
        }

        receiveStreamDesc.format = format;
      } else {
        logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource");
        return;
      }

      int rtpClockRate = -1;
      if (receiveStreamDesc.format instanceof AudioFormat)
        rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate();
      else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000;
      getSynchronizer().setRtpClockRate(ssrc, rtpClockRate);

      // create a Processor and configure it
      Processor processor = null;
      try {
        processor = Manager.createProcessor(receiveStream.getDataSource());
      } catch (NoProcessorException npe) {
        logger.error("Failed to create Processor: ", npe);
        return;
      } catch (IOException ioe) {
        logger.error("Failed to create Processor: ", ioe);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc);

      processor.addControllerListener(this);
      receiveStreamDesc.processor = processor;

      final int streamCount;
      synchronized (receiveStreams) {
        receiveStreams.add(receiveStreamDesc);
        streamCount = receiveStreams.size();
      }

      /*
       * XXX TODO IRBABOON
       * This is a terrible hack which works around a failure to realize()
       * some of the Processor-s for audio streams, when multiple streams
       * start nearly simultaneously. The cause of the problem is currently
       * unknown (and synchronizing all FMJ calls in RecorderRtpImpl
       * does not help).
       * XXX TODO NOOBABRI
       */
      if (receiveStreamDesc.format instanceof AudioFormat) {
        final Processor p = processor;
        new Thread() {
          @Override
          public void run() {
            // delay configuring the processors for the different
            // audio streams to decrease the probability that they
            // run together.
            try {
              int ms = 450 * (streamCount - 1);
              logger.warn(
                  "Sleeping for "
                      + ms
                      + "ms before"
                      + " configuring processor for SSRC="
                      + ssrc
                      + " "
                      + System.currentTimeMillis());
              Thread.sleep(ms);
            } catch (Exception e) {
            }

            p.configure();
          }
        }.run();
      } else {
        processor.configure();
      }
    } else if (event instanceof TimeoutEvent) {
      if (receiveStream == null) {
        // TODO: we might want to get the list of ReceiveStream-s from
        // rtpManager and compare it to our list, to see if we should
        // remove a stream.
        logger.warn("TimeoutEvent: null.");
        return;
      }

      // FMJ silently creates new ReceiveStream instances, so we have to
      // recognize them by the SSRC.
      ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream));
      if (receiveStreamDesc != null) {
        if (logger.isInfoEnabled()) {
          logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc);
        }

        removeReceiveStream(receiveStreamDesc, true);
      }
    } else if (event != null && logger.isInfoEnabled()) {
      logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event);
    }
  }

  private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) {
    if (receiveStream.format instanceof VideoFormat) {
      rtpConnector.packetBuffer.disable(receiveStream.ssrc);
      emptyPacketBuffer(receiveStream.ssrc);
    }

    if (receiveStream.dataSink != null) {
      try {
        receiveStream.dataSink.stop();
      } catch (IOException e) {
        logger.error("Failed to stop DataSink " + e);
      }

      receiveStream.dataSink.close();
    }

    if (receiveStream.processor != null) {
      receiveStream.processor.stop();
      receiveStream.processor.close();
    }

    DataSource dataSource = receiveStream.receiveStream.getDataSource();
    if (dataSource != null) {
      try {
        dataSource.stop();
      } catch (IOException ioe) {
        logger.warn("Failed to stop DataSource");
      }
      dataSource.disconnect();
    }

    synchronized (receiveStreams) {
      receiveStreams.remove(receiveStream);
    }
  }

  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }

  /**
   * Restarts the recording for a specific SSRC.
   *
   * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording).
   */
  private void resetRecording(long ssrc, long timestamp) {
    ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);

    // we only restart audio recordings
    if (receiveStream != null && receiveStream.format instanceof AudioFormat) {
      String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX);

      // flush the buffer contained in the MP3 encoder
      String s = "trying to flush ssrc=" + ssrc;
      Processor p = receiveStream.processor;
      if (p != null) {
        s += " p!=null";
        for (TrackControl tc : p.getTrackControls()) {
          Object o = tc.getControl(FlushableControl.class.getName());
          if (o != null) ((FlushableControl) o).flush();
        }
      }

      if (logger.isInfoEnabled()) {
        logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename);
      }

      receiveStream.dataSink.close();
      receiveStream.dataSink = null;

      // flush the FMJ jitter buffer
      // DataSource ds = receiveStream.receiveStream.getDataSource();
      // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource)
      //    ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush();

      receiveStream.filename = newFilename;
      try {
        receiveStream.dataSink =
            Manager.createDataSink(
                receiveStream.dataSource, new MediaLocator("file:" + newFilename));
      } catch (NoDataSinkException ndse) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse);
        removeReceiveStream(receiveStream, false);
      }

      try {
        receiveStream.dataSink.open();
        receiveStream.dataSink.start();
      } catch (IOException ioe) {
        logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe);
        removeReceiveStream(receiveStream, false);
      }

      audioRecordingStarted(ssrc, timestamp);
    }
  }

  private void audioRecordingStarted(long ssrc, long timestamp) {
    ReceiveStreamDesc desc = findReceiveStream(ssrc);
    if (desc == null) return;

    RecorderEvent event = new RecorderEvent();
    event.setType(RecorderEvent.Type.RECORDING_STARTED);
    event.setMediaType(MediaType.AUDIO);
    event.setSsrc(ssrc);
    event.setRtpTimestamp(timestamp);
    event.setFilename(desc.filename);

    if (eventHandler != null) eventHandler.handleEvent(event);
  }

  /**
   * Handles a request from a specific <tt>DataSink</tt> to request a keyframe by sending an RTCP
   * feedback FIR message to the media source.
   *
   * @param dataSink the <tt>DataSink</tt> which requests that a keyframe be requested with a FIR
   *     message.
   * @return <tt>true</tt> if a keyframe was successfully requested, <tt>false</tt> otherwise
   */
  private boolean requestFIR(WebmDataSink dataSink) {
    ReceiveStreamDesc desc = findReceiveStream(dataSink);
    if (desc != null && rtcpFeedbackSender != null) {
      return rtcpFeedbackSender.sendFIR((int) desc.ssrc);
    }

    return false;
  }

  /**
   * Returns "prefix"+"suffix" if the file with this name does not exist. Otherwise, returns the
   * first inexistant filename of the form "prefix-"+i+"suffix", for an integer i. i is bounded by
   * 100 to prevent hanging, and on failure to find an inexistant filename the method will return
   * null.
   *
   * @param prefix
   * @param suffix
   * @return
   */
  private String getNextFilename(String prefix, String suffix) {
    if (!new File(prefix + suffix).exists()) return prefix + suffix;

    int i = 1;
    String s;
    do {
      s = prefix + "-" + i + suffix;
      if (!new File(s).exists()) return s;
      i++;
    } while (i < 1000); // don't hang indefinitely...

    return null;
  }

  /**
   * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>
   *
   * @param processor The <tt>Processor</tt> to match.
   * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>, or <tt>null</tt>.
   */
  private ReceiveStreamDesc findReceiveStream(Processor processor) {
    if (processor == null) return null;

    synchronized (receiveStreams) {
      for (ReceiveStreamDesc r : receiveStreams) if (processor.equals(r.processor)) return r;
    }

    return null;
  }

  /**
   * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>
   *
   * @param dataSink The <tt>DataSink</tt> to match.
   * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>, or <tt>null</tt>.
   */
  private ReceiveStreamDesc findReceiveStream(DataSink dataSink) {
    if (dataSink == null) return null;

    synchronized (receiveStreams) {
      for (ReceiveStreamDesc r : receiveStreams) if (dataSink.equals(r.dataSink)) return r;
    }

    return null;
  }

  /**
   * Finds the <tt>ReceiveStreamDesc</tt> with a particular SSRC.
   *
   * @param ssrc The SSRC to match.
   * @return the <tt>ReceiveStreamDesc</tt> with a particular SSRC, or <tt>null</tt>.
   */
  private ReceiveStreamDesc findReceiveStream(long ssrc) {
    synchronized (receiveStreams) {
      for (ReceiveStreamDesc r : receiveStreams) if (ssrc == r.ssrc) return r;
    }

    return null;
  }

  /**
   * Gets the SSRC of a <tt>ReceiveStream</tt> as a (non-negative) <tt>long</tt>.
   *
   * <p>FMJ stores the 32-bit SSRC values in <tt>int</tt>s, and the <tt>ReceiveStream.getSSRC()</tt>
   * implementation(s) don't take care of converting the negative <tt>int</tt> values sometimes
   * resulting from reading of a 32-bit field into the correct unsigned <tt>long</tt> value. So do
   * the conversion here.
   *
   * @param receiveStream the <tt>ReceiveStream</tt> for which to get the SSRC.
   * @return the SSRC of <tt>receiveStream</tt> an a (non-negative) <tt>long</tt>.
   */
  private long getReceiveStreamSSRC(ReceiveStream receiveStream) {
    return 0xffffffffL & receiveStream.getSSRC();
  }

  /**
   * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this
   * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed,
   * and that the new active stream has SSRC <tt>ssrc</tt>.
   *
   * @param ssrc the SSRC of the new active stream.
   */
  @Override
  public void activeSpeakerChanged(long ssrc) {
    if (eventHandler != null) {
      RecorderEvent e = new RecorderEvent();
      e.setAudioSsrc(ssrc);
      // TODO: how do we time this?
      e.setInstant(System.currentTimeMillis());
      e.setType(RecorderEvent.Type.SPEAKER_CHANGED);
      e.setMediaType(MediaType.VIDEO);
      eventHandler.handleEvent(e);
    }
  }

  private void handleRtpPacket(RawPacket pkt) {
    if (pkt != null && pkt.getPayloadType() == vp8PayloadType) {
      int ssrc = pkt.getSSRC();
      if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) {
        synchronized (activeVideoSsrcs) {
          if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) {
            activeVideoSsrcs.add(ssrc & 0xffffffffL);
            rtcpFeedbackSender.sendFIR(ssrc);
          }
        }
      }
    }
  }

  private void handleRtcpPacket(RawPacket pkt) {
    getSynchronizer().addRTCPPacket(pkt);
    eventHandler.nudge();
  }

  public SynchronizerImpl getSynchronizer() {
    if (synchronizer == null) synchronizer = new SynchronizerImpl();
    return synchronizer;
  }

  public void setSynchronizer(Synchronizer synchronizer) {
    if (synchronizer instanceof SynchronizerImpl) {
      this.synchronizer = (SynchronizerImpl) synchronizer;
    }
  }

  public void connect(Recorder recorder) {
    if (!(recorder instanceof RecorderRtpImpl)) return;

    ((RecorderRtpImpl) recorder).setSynchronizer(getSynchronizer());
  }

  private void emptyPacketBuffer(long ssrc) {
    RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc);
    RTPConnectorImpl.OutputDataStreamImpl dataStream;

    try {
      dataStream = rtpConnector.getDataOutputStream();
    } catch (IOException ioe) {
      logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe);
      return;
    }
    for (RawPacket pkt : pkts)
      dataStream.write(
          pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */);
  }
  /** The <tt>RTPConnector</tt> implementation used by this <tt>RecorderRtpImpl</tt>. */
  private class RTPConnectorImpl implements RTPConnector {
    private PushSourceStreamImpl controlInputStream;
    private OutputDataStreamImpl controlOutputStream;

    private PushSourceStreamImpl dataInputStream;
    private OutputDataStreamImpl dataOutputStream;

    private SourceTransferHandler dataTransferHandler;
    private SourceTransferHandler controlTransferHandler;

    private RawPacket pendingDataPacket = new RawPacket();
    private RawPacket pendingControlPacket = new RawPacket();

    private PacketTransformer rtpPacketTransformer = null;
    private PacketTransformer rtcpPacketTransformer = null;

    /** The PacketBuffer instance which we use as a jitter buffer. */
    private PacketBuffer packetBuffer;

    private RTPConnectorImpl(byte redPT, byte ulpfecPT) {
      packetBuffer = new PacketBuffer();
      // The chain of transformers will be applied in reverse order for
      // incoming packets.
      TransformEngine transformEngine =
          new TransformEngineChain(
              new TransformEngine[] {
                packetBuffer,
                new TransformEngineImpl(),
                new CompoundPacketEngine(),
                new FECTransformEngine(ulpfecPT, (byte) -1),
                new REDTransformEngine(redPT, (byte) -1)
              });

      rtpPacketTransformer = transformEngine.getRTPTransformer();
      rtcpPacketTransformer = transformEngine.getRTCPTransformer();
    }

    private RTPConnectorImpl() {}

    @Override
    public void close() {
      try {
        if (dataOutputStream != null) dataOutputStream.close();
        if (controlOutputStream != null) controlOutputStream.close();
      } catch (IOException ioe) {
        throw new UndeclaredThrowableException(ioe);
      }
    }

    @Override
    public PushSourceStream getControlInputStream() throws IOException {
      if (controlInputStream == null) {
        controlInputStream = new PushSourceStreamImpl(true);
      }

      return controlInputStream;
    }

    @Override
    public OutputDataStream getControlOutputStream() throws IOException {
      if (controlOutputStream == null) {
        controlOutputStream = new OutputDataStreamImpl(true);
      }

      return controlOutputStream;
    }

    @Override
    public PushSourceStream getDataInputStream() throws IOException {
      if (dataInputStream == null) {
        dataInputStream = new PushSourceStreamImpl(false);
      }

      return dataInputStream;
    }

    @Override
    public OutputDataStreamImpl getDataOutputStream() throws IOException {
      if (dataOutputStream == null) {
        dataOutputStream = new OutputDataStreamImpl(false);
      }

      return dataOutputStream;
    }

    @Override
    public double getRTCPBandwidthFraction() {
      return -1;
    }

    @Override
    public double getRTCPSenderBandwidthFraction() {
      return -1;
    }

    @Override
    public int getReceiveBufferSize() {
      // TODO Auto-generated method stub
      return 0;
    }

    @Override
    public int getSendBufferSize() {
      // TODO Auto-generated method stub
      return 0;
    }

    @Override
    public void setReceiveBufferSize(int arg0) throws IOException {
      // TODO Auto-generated method stub

    }

    @Override
    public void setSendBufferSize(int arg0) throws IOException {
      // TODO Auto-generated method stub
    }

    private class OutputDataStreamImpl implements OutputDataStream {
      boolean isControlStream;
      private RawPacket[] rawPacketArray = new RawPacket[1];

      public OutputDataStreamImpl(boolean isControlStream) {
        this.isControlStream = isControlStream;
      }

      public int write(byte[] buffer, int offset, int length) {
        return write(buffer, offset, length, true);
      }

      public int write(byte[] buffer, int offset, int length, boolean transform) {
        RawPacket pkt = rawPacketArray[0];
        if (pkt == null) pkt = new RawPacket();
        rawPacketArray[0] = pkt;

        byte[] pktBuf = pkt.getBuffer();
        if (pktBuf == null || pktBuf.length < length) {
          pktBuf = new byte[length];
          pkt.setBuffer(pktBuf);
        }
        System.arraycopy(buffer, offset, pktBuf, 0, length);
        pkt.setOffset(0);
        pkt.setLength(length);

        if (transform) {
          PacketTransformer packetTransformer =
              isControlStream ? rtcpPacketTransformer : rtpPacketTransformer;

          if (packetTransformer != null)
            rawPacketArray = packetTransformer.reverseTransform(rawPacketArray);
        }

        SourceTransferHandler transferHandler;
        PushSourceStream pushSourceStream;

        try {
          if (isControlStream) {
            transferHandler = controlTransferHandler;
            pushSourceStream = getControlInputStream();
          } else {
            transferHandler = dataTransferHandler;
            pushSourceStream = getDataInputStream();
          }
        } catch (IOException ioe) {
          throw new UndeclaredThrowableException(ioe);
        }

        for (int i = 0; i < rawPacketArray.length; i++) {
          RawPacket packet = rawPacketArray[i];

          // keep the first element for reuse
          if (i != 0) rawPacketArray[i] = null;

          if (packet != null) {
            if (isControlStream) pendingControlPacket = packet;
            else pendingDataPacket = packet;

            if (transferHandler != null) {
              transferHandler.transferData(pushSourceStream);
            }
          }
        }

        return length;
      }

      public void close() throws IOException {}
    }

    /**
     * A dummy implementation of {@link PushSourceStream}.
     *
     * @author Vladimir Marinov
     */
    private class PushSourceStreamImpl implements PushSourceStream {

      private boolean isControlStream = false;

      public PushSourceStreamImpl(boolean isControlStream) {
        this.isControlStream = isControlStream;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public boolean endOfStream() {
        return false;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public ContentDescriptor getContentDescriptor() {
        return null;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public long getContentLength() {
        return 0;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public Object getControl(String arg0) {
        return null;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public Object[] getControls() {
        return null;
      }

      /** Not implemented because there are currently no uses of the underlying functionality. */
      @Override
      public int getMinimumTransferSize() {
        if (isControlStream) {
          if (pendingControlPacket.getBuffer() != null) {
            return pendingControlPacket.getLength();
          }
        } else {
          if (pendingDataPacket.getBuffer() != null) {
            return pendingDataPacket.getLength();
          }
        }

        return 0;
      }

      @Override
      public int read(byte[] buffer, int offset, int length) throws IOException {

        RawPacket pendingPacket;
        if (isControlStream) {
          pendingPacket = pendingControlPacket;
        } else {
          pendingPacket = pendingDataPacket;
        }
        int bytesToRead = 0;
        byte[] pendingPacketBuffer = pendingPacket.getBuffer();
        if (pendingPacketBuffer != null) {
          int pendingPacketLength = pendingPacket.getLength();
          bytesToRead = length > pendingPacketLength ? pendingPacketLength : length;
          System.arraycopy(
              pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead);
        }
        return bytesToRead;
      }

      /**
       * {@inheritDoc}
       *
       * <p>We keep the first non-null <tt>SourceTransferHandler</tt> that was set, because we don't
       * want it to be overwritten when we initialize a second <tt>RTPManager</tt> with this
       * <tt>RTPConnector</tt>.
       *
       * <p>See {@link RecorderRtpImpl#start(String, String)}
       */
      @Override
      public void setTransferHandler(SourceTransferHandler transferHandler) {
        if (isControlStream) {
          if (RTPConnectorImpl.this.controlTransferHandler == null) {
            RTPConnectorImpl.this.controlTransferHandler = transferHandler;
          }
        } else {
          if (RTPConnectorImpl.this.dataTransferHandler == null) {
            RTPConnectorImpl.this.dataTransferHandler = transferHandler;
          }
        }
      }
    }

    /**
     * A transform engine implementation which allows <tt>RecorderRtpImpl</tt> to intercept RTP and
     * RTCP packets in.
     */
    private class TransformEngineImpl implements TransformEngine {
      SinglePacketTransformer rtpTransformer =
          new SinglePacketTransformer() {
            @Override
            public RawPacket transform(RawPacket pkt) {
              return pkt;
            }

            @Override
            public RawPacket reverseTransform(RawPacket pkt) {
              RecorderRtpImpl.this.handleRtpPacket(pkt);
              return pkt;
            }

            @Override
            public void close() {}
          };

      SinglePacketTransformer rtcpTransformer =
          new SinglePacketTransformer() {
            @Override
            public RawPacket transform(RawPacket pkt) {
              return pkt;
            }

            @Override
            public RawPacket reverseTransform(RawPacket pkt) {
              RecorderRtpImpl.this.handleRtcpPacket(pkt);
              if (pkt != null && pkt.getRTCPPayloadType() == 203) {
                // An RTCP BYE packet. Remove the receive stream before
                // it gets to FMJ, because we want to, for example,
                // flush the packet buffer before that.

                long ssrc = pkt.getRTCPSSRC() & 0xffffffffl;
                if (logger.isInfoEnabled()) logger.info("RTCP BYE for SSRC=" + ssrc);

                ReceiveStreamDesc receiveStream = findReceiveStream(ssrc);
                if (receiveStream != null) removeReceiveStream(receiveStream, false);
              }

              return pkt;
            }

            @Override
            public void close() {}
          };

      @Override
      public PacketTransformer getRTPTransformer() {
        return rtpTransformer;
      }

      @Override
      public PacketTransformer getRTCPTransformer() {
        return rtcpTransformer;
      }
    }
  }

  private class RecorderEventHandlerImpl implements RecorderEventHandler {
    private RecorderEventHandler handler;
    private final Set<RecorderEvent> pendingEvents = new HashSet<RecorderEvent>();

    private RecorderEventHandlerImpl(RecorderEventHandler handler) {
      this.handler = handler;
    }

    @Override
    public boolean handleEvent(RecorderEvent ev) {
      if (ev == null) return true;
      if (RecorderEvent.Type.RECORDING_STARTED.equals(ev.getType())) {
        long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp());
        if (instant != -1) {
          ev.setInstant(instant);
          return handler.handleEvent(ev);
        } else {
          pendingEvents.add(ev);
          return true;
        }
      }
      return handler.handleEvent(ev);
    }

    private void nudge() {
      for (Iterator<RecorderEvent> iter = pendingEvents.iterator(); iter.hasNext(); ) {
        RecorderEvent ev = iter.next();
        long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp());
        if (instant != -1) {
          iter.remove();
          ev.setInstant(instant);
          handler.handleEvent(ev);
        }
      }
    }

    @Override
    public void close() {
      for (RecorderEvent ev : pendingEvents) handler.handleEvent(ev);
    }
  }

  /** Represents a <tt>ReceiveStream</tt> for the purposes of this <tt>RecorderRtpImpl</tt>. */
  private class ReceiveStreamDesc {
    /**
     * The actual <tt>ReceiveStream</tt> which is represented by this <tt>ReceiveStreamDesc</tt>.
     */
    private ReceiveStream receiveStream;

    /** The SSRC of the stream. */
    long ssrc;

    /**
     * The <tt>Processor</tt> used to transcode this receive stream into a format appropriate for
     * saving to a file.
     */
    private Processor processor;

    /** The <tt>DataSink</tt> which saves the <tt>this.dataSource</tt> to a file. */
    private DataSink dataSink;

    /**
     * The <tt>DataSource</tt> for this receive stream which is to be saved using a
     * <tt>DataSink</tt> (i.e. the <tt>DataSource</tt> "after" all needed transcoding is done).
     */
    private DataSource dataSource;

    /** The name of the file into which this stream is being saved. */
    private String filename;

    /** The (original) format of this receive stream. */
    private Format format;

    /** The <tt>SilenceEffect</tt> used for this stream (for audio streams only). */
    private SilenceEffect silenceEffect;

    private ReceiveStreamDesc(ReceiveStream receiveStream) {
      this.receiveStream = receiveStream;
      this.ssrc = getReceiveStreamSSRC(receiveStream);
    }
  }
}
예제 #28
0
/**
 * @author Emil Ivov
 * @author Lyubomir Marinov
 */
public class ConfigurationActivator implements BundleActivator {
  /** The <tt>Logger</tt> used by the <tt>ConfigurationActivator</tt> class for logging output. */
  private static final Logger logger = Logger.getLogger(ConfigurationActivator.class);

  /** The currently registered {@link ConfigurationService} instance. */
  private ConfigurationService cs;

  /**
   * Starts the configuration service
   *
   * @param bundleContext the <tt>BundleContext</tt> as provided by the OSGi framework.
   * @throws Exception if anything goes wrong
   */
  public void start(BundleContext bundleContext) throws Exception {
    FileAccessService fas = ServiceUtils.getService(bundleContext, FileAccessService.class);

    if (fas != null) {
      File usePropFileConfig;
      try {
        usePropFileConfig =
            fas.getPrivatePersistentFile(".usepropfileconfig", FileCategory.PROFILE);
      } catch (Exception ise) {
        // There is somewhat of a chicken-and-egg dependency between
        // FileConfigurationServiceImpl and ConfigurationServiceImpl:
        // FileConfigurationServiceImpl throws IllegalStateException if
        // certain System properties are not set,
        // ConfigurationServiceImpl will make sure that these properties
        // are set but it will do that later.
        // A SecurityException is thrown when the destination
        // is not writable or we do not have access to that folder
        usePropFileConfig = null;
      }

      if (usePropFileConfig != null && usePropFileConfig.exists()) {
        logger.info("Using properties file configuration store.");
        this.cs = LibJitsi.getConfigurationService();
      }
    }

    if (this.cs == null) {
      this.cs = new JdbcConfigService(fas);
    }

    bundleContext.registerService(ConfigurationService.class.getName(), this.cs, null);

    fixPermissions(this.cs);
  }

  /**
   * Causes the configuration service to store the properties object and unregisters the
   * configuration service.
   *
   * @param bundleContext <tt>BundleContext</tt>
   * @throws Exception if anything goes wrong while storing the properties managed by the
   *     <tt>ConfigurationService</tt> implementation provided by this bundle and while
   *     unregistering the service in question
   */
  public void stop(BundleContext bundleContext) throws Exception {
    this.cs.storeConfiguration();
    this.cs = null;
  }

  /**
   * Makes home folder and the configuration file readable and writable only to the owner.
   *
   * @param cs the <tt>ConfigurationService</tt> instance to check for home folder and configuration
   *     file.
   */
  private static void fixPermissions(ConfigurationService cs) {
    if (!OSUtils.IS_LINUX && !OSUtils.IS_MAC) return;

    try {
      // let's check config file and config folder
      File homeFolder = new File(cs.getScHomeDirLocation(), cs.getScHomeDirName());
      Set<PosixFilePermission> perms =
          new HashSet<PosixFilePermission>() {
            {
              add(PosixFilePermission.OWNER_READ);
              add(PosixFilePermission.OWNER_WRITE);
              add(PosixFilePermission.OWNER_EXECUTE);
            }
          };
      Files.setPosixFilePermissions(Paths.get(homeFolder.getAbsolutePath()), perms);

      String fileName = cs.getConfigurationFilename();
      if (fileName != null) {
        File cf = new File(homeFolder, fileName);
        if (cf.exists()) {
          perms =
              new HashSet<PosixFilePermission>() {
                {
                  add(PosixFilePermission.OWNER_READ);
                  add(PosixFilePermission.OWNER_WRITE);
                }
              };
          Files.setPosixFilePermissions(Paths.get(cf.getAbsolutePath()), perms);
        }
      }
    } catch (Throwable t) {
      logger.error("Error creating c lib instance for fixing file permissions", t);

      if (t instanceof InterruptedException) Thread.currentThread().interrupt();
      else if (t instanceof ThreadDeath) throw (ThreadDeath) t;
    }
  }
}
예제 #29
0
/**
 * @author Bing SU ([email protected])
 * @author Lyubomir Marinov
 * @author Boris Grozev
 */
public abstract class RTPConnectorInputStream implements PushSourceStream, Runnable {
  /**
   * The value of the property <tt>controls</tt> of <tt>RTPConnectorInputStream</tt> when there are
   * no controls. Explicitly defined in order to reduce unnecessary allocations.
   */
  private static final Object[] EMPTY_CONTROLS = new Object[0];

  /**
   * The length in bytes of the buffers of <tt>RTPConnectorInputStream</tt> receiving packets from
   * the network.
   */
  public static final int PACKET_RECEIVE_BUFFER_LENGTH = 4 * 1024;

  /**
   * The <tt>Logger</tt> used by the <tt>RTPConnectorInputStream</tt> class and its instances to
   * print debug information.
   */
  private static final Logger logger = Logger.getLogger(RTPConnectorInputStream.class);

  /** Packet receive buffer */
  private final byte[] buffer = new byte[PACKET_RECEIVE_BUFFER_LENGTH];

  /** Whether this stream is closed. Used to control the termination of worker thread. */
  protected boolean closed;

  public Participant videoRecorder;

  /**
   * The <tt>DatagramPacketFilter</tt>s which allow dropping <tt>DatagramPacket</tt>s before they
   * are converted into <tt>RawPacket</tt>s.
   */
  private DatagramPacketFilter[] datagramPacketFilters;

  /** Caught an IO exception during read from socket */
  protected boolean ioError = false;

  /**
   * The packet data to be read out of this instance through its {@link #read(byte[], int, int)}
   * method.
   */
  private RawPacket pkt;

  /** The <tt>Object</tt> which synchronizes the access to {@link #pkt}. */
  private final Object pktSyncRoot = new Object();

  /** The adapter of this <tt>PushSourceStream</tt> to the <tt>PushBufferStream</tt> interface. */
  private final PushBufferStream pushBufferStream;

  /**
   * The pool of <tt>RawPacket[]</tt> instances to reduce their allocations and garbage collection.
   * Contains arrays full of <tt>null</tt>.
   */
  private final Queue<RawPacket[]> rawPacketArrayPool = new LinkedBlockingQueue<RawPacket[]>();

  /**
   * The pool of <tt>RawPacket</tt> instances to reduce their allocations and garbage collection.
   */
  private final Queue<RawPacket> rawPacketPool = new LinkedBlockingQueue<RawPacket>();

  /** The Thread receiving packets. */
  protected Thread receiverThread = null;

  /** SourceTransferHandler object which is used to read packets. */
  private SourceTransferHandler transferHandler;

  /**
   * Whether this <tt>RTPConnectorInputStream</tt> is enabled or disabled. While disabled, the
   * stream does not accept any packets.
   */
  private boolean enabled = true;

  /**
   * Initializes a new <tt>RTPConnectorInputStream</tt> which is to receive packet data from a
   * specific UDP socket.
   */
  public RTPConnectorInputStream() {
    // PacketLoggingService
    addDatagramPacketFilter(
        new DatagramPacketFilter() {
          /**
           * Used for debugging. As we don't log every packet, we must count them and decide which
           * to log.
           */
          private long numberOfPackets = 0;

          public boolean accept(DatagramPacket p) {
            numberOfPackets++;
            if (RTPConnectorOutputStream.logPacket(numberOfPackets)) {
              PacketLoggingService packetLogging = LibJitsi.getPacketLoggingService();

              if ((packetLogging != null)
                  && packetLogging.isLoggingEnabled(PacketLoggingService.ProtocolName.RTP))
                doLogPacket(p);
            }

            return true;
          }
        });

    /*
     * Adapt this PushSourceStream to the PushBufferStream interface in
     * order to make it possible to read the Buffer flags of RawPacket.
     */
    pushBufferStream =
        new PushBufferStreamAdapter(this, null) {
          @Override
          protected int doRead(Buffer buffer, byte[] data, int offset, int length)
              throws IOException {
            return RTPConnectorInputStream.this.read(buffer, data, offset, length);
          }
        };
  }

  /** Close this stream, stops the worker thread. */
  public synchronized void close() {}

  /**
   * Creates a new <tt>RawPacket</tt> from a specific <tt>DatagramPacket</tt> in order to have this
   * instance receive its packet data through its {@link #read(byte[], int, int)} method. Returns an
   * array of <tt>RawPacket</tt> with the created packet as its first element (and <tt>null</tt> for
   * the other elements).
   *
   * <p>Allows extenders to intercept the packet data and possibly filter and/or modify it.
   *
   * @param datagramPacket the <tt>DatagramPacket</tt> containing the packet data
   * @return an array of <tt>RawPacket</tt> containing the <tt>RawPacket</tt> which contains the
   *     packet data of the specified <tt>DatagramPacket</tt> as its first element.
   */
  protected RawPacket[] createRawPacket(DatagramPacket datagramPacket) {
    RawPacket[] pkts = rawPacketArrayPool.poll();
    if (pkts == null) pkts = new RawPacket[1];

    RawPacket pkt = rawPacketPool.poll();
    if (pkt == null) pkt = new RawPacket();

    pkt.setBuffer(datagramPacket.getData());
    pkt.setFlags(0);
    pkt.setLength(datagramPacket.getLength());
    pkt.setOffset(datagramPacket.getOffset());

    pkts[0] = pkt;
    return pkts;
  }

  /**
   * Provides a dummy implementation to {@link RTPConnectorInputStream#endOfStream()} that always
   * returns <tt>false</tt>.
   *
   * @return <tt>false</tt>, no matter what.
   */
  public boolean endOfStream() {
    return false;
  }

  /**
   * Provides a dummy implementation to {@link RTPConnectorInputStream#getContentDescriptor()} that
   * always returns <tt>null</tt>.
   *
   * @return <tt>null</tt>, no matter what.
   */
  public ContentDescriptor getContentDescriptor() {
    return null;
  }

  /**
   * Provides a dummy implementation to {@link RTPConnectorInputStream#getContentLength()} that
   * always returns <tt>LENGTH_UNKNOWN</tt>.
   *
   * @return <tt>LENGTH_UNKNOWN</tt>, no matter what.
   */
  public long getContentLength() {
    return LENGTH_UNKNOWN;
  }

  /**
   * Provides a dummy implementation of {@link RTPConnectorInputStream#getControl(String)} that
   * always returns <tt>null</tt>.
   *
   * @param controlType ignored.
   * @return <tt>null</tt>, no matter what.
   */
  public Object getControl(String controlType) {
    if (PushBufferStream.class.getName().equals(controlType)) return pushBufferStream;
    else return null;
  }

  /**
   * Provides a dummy implementation of {@link RTPConnectorInputStream#getControls()} that always
   * returns <tt>EMPTY_CONTROLS</tt>.
   *
   * @return <tt>EMPTY_CONTROLS</tt>, no matter what.
   */
  public Object[] getControls() {
    return EMPTY_CONTROLS;
  }

  /**
   * Provides a dummy implementation to {@link RTPConnectorInputStream#getMinimumTransferSize()}
   * that always returns <tt>2 * 1024</tt>.
   *
   * @return <tt>2 * 1024</tt>, no matter what.
   */
  public int getMinimumTransferSize() {
    return 2 * 1024; // twice the MTU size, just to be safe.
  }

  /**
   * Pools the specified <tt>RawPacket</tt> in order to avoid future allocations and to reduce the
   * effects of garbage collection.
   *
   * @param pkt the <tt>RawPacket</tt> to be offered to {@link #rawPacketPool}
   */
  private void poolRawPacket(RawPacket pkt) {
    pkt.setBuffer(null);
    pkt.setFlags(0);
    pkt.setLength(0);
    pkt.setOffset(0);
    rawPacketPool.offer(pkt);
  }

  /**
   * Copies the content of the most recently received packet into <tt>buffer</tt>.
   *
   * @param buffer the <tt>byte[]</tt> that we'd like to copy the content of the packet to.
   * @param offset the position where we are supposed to start writing in <tt>buffer</tt>.
   * @param length the number of <tt>byte</tt>s available for writing in <tt>buffer</tt>.
   * @return the number of bytes read
   * @throws IOException if <tt>length</tt> is less than the size of the packet.
   */
  public int read(byte[] buffer, int offset, int length) throws IOException {
    return read(null, buffer, offset, length);
  }

  /**
   * Copies the content of the most recently received packet into <tt>data</tt>.
   *
   * @param buffer an optional <tt>Buffer</tt> instance associated with the specified <tt>data</tt>,
   *     <tt>offset</tt> and <tt>length</tt> and provided to the method in case the implementation
   *     would like to provide additional <tt>Buffer</tt> properties such as <tt>flags</tt>
   * @param data the <tt>byte[]</tt> that we'd like to copy the content of the packet to.
   * @param offset the position where we are supposed to start writing in <tt>data</tt>.
   * @param length the number of <tt>byte</tt>s available for writing in <tt>data</tt>.
   * @return the number of bytes read
   * @throws IOException if <tt>length</tt> is less than the size of the packet.
   */
  protected int read(Buffer buffer, byte[] data, int offset, int length) throws IOException {
    if (data == null) throw new NullPointerException("data");

    if (ioError) return -1;

    RawPacket pkt;

    synchronized (pktSyncRoot) {
      pkt = this.pkt;
      this.pkt = null;
    }

    int pktLength;

    if (pkt == null) {
      pktLength = 0;
    } else {
      // By default, pkt will be returned to the pool after it was read.
      boolean poolPkt = true;

      try {
        pktLength = pkt.getLength();
        if (length < pktLength) {
          /*
           * If pkt is still the latest RawPacket made available to
           * reading, reinstate it for the next invocation of read;
           * otherwise, return it to the pool.
           */
          poolPkt = false;
          throw new IOException("Input buffer not big enough for " + pktLength);
        } else {
          byte[] pktBuffer = pkt.getBuffer();

          if (pktBuffer == null) {
            throw new NullPointerException(
                "pkt.buffer null, pkt.length " + pktLength + ", pkt.offset " + pkt.getOffset());
          } else {
            System.arraycopy(pkt.getBuffer(), pkt.getOffset(), data, offset, pktLength);
            if (buffer != null) buffer.setFlags(pkt.getFlags());
          }
        }
      } finally {
        if (!poolPkt) {
          synchronized (pktSyncRoot) {
            if (this.pkt == null) this.pkt = pkt;
            else poolPkt = true;
          }
        }
        if (poolPkt) {
          // Return pkt to the pool because it was successfully read.
          poolRawPacket(pkt);
        }
      }
    }

    return pktLength;
  }

  /**
   * Log the packet.
   *
   * @param packet packet to log
   */
  protected abstract void doLogPacket(DatagramPacket packet);

  /**
   * Receive packet.
   *
   * @param p packet for receiving
   * @throws IOException if something goes wrong during receiving
   */
  protected abstract void receivePacket(DatagramPacket p) throws IOException;

  /**
   * Listens for incoming datagrams, stores them for reading by the <tt>read</tt> method and
   * notifies the local <tt>transferHandler</tt> that there's data to be read.
   */
  public void run() {
    DatagramPacket p = new DatagramPacket(buffer, 0, PACKET_RECEIVE_BUFFER_LENGTH);

    while (!closed) {
      try {
        // http://code.google.com/p/android/issues/detail?id=24765
        if (OSUtils.IS_ANDROID) p.setLength(PACKET_RECEIVE_BUFFER_LENGTH);

        receivePacket(p);
      } catch (IOException e) {
        ioError = true;
        break;
      }

      /*
       * Do the DatagramPacketFilters accept the received DatagramPacket?
       */
      DatagramPacketFilter[] datagramPacketFilters = getDatagramPacketFilters();
      boolean accept;

      if (!enabled) accept = false;
      else if (datagramPacketFilters == null) accept = true;
      else {
        accept = true;
        for (int i = 0; i < datagramPacketFilters.length; i++) {
          try {
            if (!datagramPacketFilters[i].accept(p)) {
              accept = false;
              break;
            }
          } catch (Throwable t) {
            if (t instanceof ThreadDeath) throw (ThreadDeath) t;
          }
        }
      }

      if (accept) {
        RawPacket pkts[] = createRawPacket(p);

        for (int i = 0; i < pkts.length; i++) {
          RawPacket pkt = pkts[i];

          pkts[i] = null;

          if (pkt != null) {
            if (pkt.isInvalid()) {
              /*
               * Return pkt to the pool because it is invalid and,
               * consequently, will not be made available to
               * reading.
               */
              poolRawPacket(pkt);
            } else {
              RawPacket oldPkt;

              synchronized (pktSyncRoot) {
                oldPkt = this.pkt;
                this.pkt = pkt;
              }
              if (oldPkt != null) {
                /*
                 * Return oldPkt to the pool because it was made
                 * available to reading and it was not read.
                 */
                poolRawPacket(oldPkt);
              }

              if (videoRecorder != null) videoRecorder.recordData(pkt);

              if ((transferHandler != null) && !closed) {
                try {
                  transferHandler.transferData(this);
                } catch (Throwable t) {
                  /*
                   * XXX We cannot allow transferHandler to
                   * kill us.
                   */
                  if (t instanceof ThreadDeath) {
                    throw (ThreadDeath) t;
                  } else {
                    logger.warn("An RTP packet may have not been" + " fully handled.", t);
                  }
                }
              }
            }
          }
        }
        rawPacketArrayPool.offer(pkts);
      }
    }
  }

  /**
   * Sets the <tt>transferHandler</tt> that this connector should be notifying when new data is
   * available for reading.
   *
   * @param transferHandler the <tt>transferHandler</tt> that this connector should be notifying
   *     when new data is available for reading.
   */
  public void setTransferHandler(SourceTransferHandler transferHandler) {
    if (!closed) this.transferHandler = transferHandler;
  }

  /**
   * Changes current thread priority.
   *
   * @param priority the new priority.
   */
  public void setPriority(int priority) {
    // currently no priority is set
    //        if (receiverThread != null)
    //            receiverThread.setPriority(priority);
  }

  /**
   * Gets the <tt>DatagramPacketFilter</tt>s which allow dropping <tt>DatagramPacket</tt>s before
   * they are converted into <tt>RawPacket</tt>s.
   *
   * @return the <tt>DatagramPacketFilter</tt>s which allow dropping <tt>DatagramPacket</tt>s before
   *     they are converted into <tt>RawPacket</tt>s.
   */
  public synchronized DatagramPacketFilter[] getDatagramPacketFilters() {
    return datagramPacketFilters;
  }

  /**
   * Adds a <tt>DatagramPacketFilter</tt> which allows dropping <tt>DatagramPacket</tt>s before they
   * are converted into <tt>RawPacket</tt>s.
   *
   * @param datagramPacketFilter the <tt>DatagramPacketFilter</tt> which allows dropping
   *     <tt>DatagramPacket</tt>s before they are converted into <tt>RawPacket</tt>s
   */
  public synchronized void addDatagramPacketFilter(DatagramPacketFilter datagramPacketFilter) {
    if (datagramPacketFilter == null) throw new NullPointerException("datagramPacketFilter");

    if (datagramPacketFilters == null) {
      datagramPacketFilters = new DatagramPacketFilter[] {datagramPacketFilter};
    } else {
      final int length = datagramPacketFilters.length;

      for (int i = 0; i < length; i++)
        if (datagramPacketFilter.equals(datagramPacketFilters[i])) return;

      DatagramPacketFilter[] newDatagramPacketFilters = new DatagramPacketFilter[length + 1];

      System.arraycopy(datagramPacketFilters, 0, newDatagramPacketFilters, 0, length);
      newDatagramPacketFilters[length] = datagramPacketFilter;
      datagramPacketFilters = newDatagramPacketFilters;
    }
  }

  /**
   * Enables or disables this <tt>RTPConnectorInputStream</tt>. While the stream is disabled, it
   * does not accept any packets.
   *
   * @param enabled <tt>true</tt> to enable, <tt>false</tt> to disable.
   */
  public void setEnabled(boolean enabled) {
    if (logger.isDebugEnabled()) logger.debug("setEnabled: " + enabled);

    this.enabled = enabled;
  }
}