/** Implements notification in order to track socket state. */ @Override public synchronized void onSctpNotification(SctpSocket socket, SctpNotification notification) { if (logger.isDebugEnabled()) { logger.debug("socket=" + socket + "; notification=" + notification); } switch (notification.sn_type) { case SctpNotification.SCTP_ASSOC_CHANGE: SctpNotification.AssociationChange assocChange = (SctpNotification.AssociationChange) notification; switch (assocChange.state) { case SctpNotification.AssociationChange.SCTP_COMM_UP: if (!assocIsUp) { boolean wasReady = isReady(); assocIsUp = true; if (isReady() && !wasReady) notifySctpConnectionReady(); } break; case SctpNotification.AssociationChange.SCTP_COMM_LOST: case SctpNotification.AssociationChange.SCTP_SHUTDOWN_COMP: case SctpNotification.AssociationChange.SCTP_CANT_STR_ASSOC: try { closeStream(); } catch (IOException e) { logger.error("Error closing SCTP socket", e); } break; } break; } }
private void runOnDtlsTransport(StreamConnector connector) throws IOException { DtlsControlImpl dtlsControl = (DtlsControlImpl) getTransportManager().getDtlsControl(this); DtlsTransformEngine engine = dtlsControl.getTransformEngine(); final DtlsPacketTransformer transformer = (DtlsPacketTransformer) engine.getRTPTransformer(); byte[] receiveBuffer = new byte[SCTP_BUFFER_SIZE]; if (LOG_SCTP_PACKETS) { System.setProperty( ConfigurationService.PNAME_SC_HOME_DIR_LOCATION, System.getProperty("java.io.tmpdir")); System.setProperty( ConfigurationService.PNAME_SC_HOME_DIR_NAME, SctpConnection.class.getName()); } synchronized (this) { // FIXME local SCTP port is hardcoded in bridge offer SDP (Jitsi // Meet) sctpSocket = Sctp.createSocket(5000); assocIsUp = false; acceptedIncomingConnection = false; } // Implement output network link for SCTP stack on DTLS transport sctpSocket.setLink( new NetworkLink() { @Override public void onConnOut(SctpSocket s, byte[] packet) throws IOException { if (LOG_SCTP_PACKETS) { LibJitsi.getPacketLoggingService() .logPacket( PacketLoggingService.ProtocolName.ICE4J, new byte[] {0, 0, 0, (byte) debugId}, 5000, new byte[] {0, 0, 0, (byte) (debugId + 1)}, remoteSctpPort, PacketLoggingService.TransportName.UDP, true, packet); } // Send through DTLS transport transformer.sendApplicationData(packet, 0, packet.length); } }); if (logger.isDebugEnabled()) { logger.debug("Connecting SCTP to port: " + remoteSctpPort + " to " + getEndpoint().getID()); } sctpSocket.setNotificationListener(this); sctpSocket.listen(); // FIXME manage threads threadPool.execute( new Runnable() { @Override public void run() { SctpSocket sctpSocket = null; try { // sctpSocket is set to null on close sctpSocket = SctpConnection.this.sctpSocket; while (sctpSocket != null) { if (sctpSocket.accept()) { acceptedIncomingConnection = true; break; } Thread.sleep(100); sctpSocket = SctpConnection.this.sctpSocket; } if (isReady()) { notifySctpConnectionReady(); } } catch (Exception e) { logger.error("Error accepting SCTP connection", e); } if (sctpSocket == null && logger.isInfoEnabled()) { logger.info( "SctpConnection " + getID() + " closed" + " before SctpSocket accept()-ed."); } } }); // Notify that from now on SCTP connection is considered functional sctpSocket.setDataCallback(this); // Setup iceSocket DatagramSocket datagramSocket = connector.getDataSocket(); if (datagramSocket != null) { this.iceSocket = new IceUdpSocketWrapper(datagramSocket); } else { this.iceSocket = new IceTcpSocketWrapper(connector.getDataTCPSocket()); } DatagramPacket rcvPacket = new DatagramPacket(receiveBuffer, 0, receiveBuffer.length); // Receive loop, breaks when SCTP socket is closed try { do { iceSocket.receive(rcvPacket); RawPacket raw = new RawPacket(rcvPacket.getData(), rcvPacket.getOffset(), rcvPacket.getLength()); raw = transformer.reverseTransform(raw); // Check for app data if (raw == null) continue; if (LOG_SCTP_PACKETS) { LibJitsi.getPacketLoggingService() .logPacket( PacketLoggingService.ProtocolName.ICE4J, new byte[] {0, 0, 0, (byte) (debugId + 1)}, remoteSctpPort, new byte[] {0, 0, 0, (byte) debugId}, 5000, PacketLoggingService.TransportName.UDP, false, raw.getBuffer(), raw.getOffset(), raw.getLength()); } // Pass network packet to SCTP stack sctpSocket.onConnIn(raw.getBuffer(), raw.getOffset(), raw.getLength()); } while (true); } finally { // Eventually, close the socket although it should happen from // expire(). synchronized (this) { assocIsUp = false; acceptedIncomingConnection = false; if (sctpSocket != null) { sctpSocket.close(); sctpSocket = null; } } } }
/** * Handles control packet. * * @param data raw packet data that arrived on control PPID. * @param sid SCTP stream id on which the data has arrived. */ private synchronized void onCtrlPacket(byte[] data, int sid) throws IOException { ByteBuffer buffer = ByteBuffer.wrap(data); int messageType = /* 1 byte unsigned integer */ 0xFF & buffer.get(); if (messageType == MSG_CHANNEL_ACK) { if (logger.isDebugEnabled()) { logger.debug(getEndpoint().getID() + " ACK received SID: " + sid); } // Open channel ACK WebRtcDataStream channel = channels.get(sid); if (channel != null) { // Ack check prevents from firing multiple notifications // if we get more than one ACKs (by mistake/bug). if (!channel.isAcknowledged()) { channel.ackReceived(); notifyChannelOpened(channel); } else { logger.warn("Redundant ACK received for SID: " + sid); } } else { logger.error("No channel exists on sid: " + sid); } } else if (messageType == MSG_OPEN_CHANNEL) { int channelType = /* 1 byte unsigned integer */ 0xFF & buffer.get(); int priority = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort(); long reliability = /* 4 bytes unsigned integer */ 0xFFFFFFFFL & buffer.getInt(); int labelLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort(); int protocolLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort(); String label; String protocol; if (labelLength == 0) { label = ""; } else { byte[] labelBytes = new byte[labelLength]; buffer.get(labelBytes); label = new String(labelBytes, "UTF-8"); } if (protocolLength == 0) { protocol = ""; } else { byte[] protocolBytes = new byte[protocolLength]; buffer.get(protocolBytes); protocol = new String(protocolBytes, "UTF-8"); } if (logger.isDebugEnabled()) { logger.debug( "!!! " + getEndpoint().getID() + " data channel open request on SID: " + sid + " type: " + channelType + " prio: " + priority + " reliab: " + reliability + " label: " + label + " proto: " + protocol); } if (channels.containsKey(sid)) { logger.error("Channel on sid: " + sid + " already exists"); } WebRtcDataStream newChannel = new WebRtcDataStream(sctpSocket, sid, label, true); channels.put(sid, newChannel); sendOpenChannelAck(sid); notifyChannelOpened(newChannel); } else { logger.error("Unexpected ctrl msg type: " + messageType); } }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
/** * Starts the execution of the neomedia bundle in the specified context. * * @param bundleContext the context in which the neomedia bundle is to start executing * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the * specified context */ public void start(BundleContext bundleContext) throws Exception { if (logger.isDebugEnabled()) logger.debug("Started."); NeomediaActivator.bundleContext = bundleContext; // MediaService mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService(); bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null); if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]"); // mediaConfiguration = new MediaConfigurationImpl(); // bundleContext.registerService( // MediaConfigurationService.class.getStatus(), // getMediaConfiguration(), // null); if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]"); ConfigurationService cfg = NeomediaActivator.getConfigurationService(); Dictionary<String, String> mediaProps = new Hashtable<String, String>(); mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE); // If the audio configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false)) // { // audioConfigurationForm // = new LazyConfigurationForm( // AudioConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.AUDIO_ICON", // "impl.neomedia.configform.AUDIO", // 3); // // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // audioConfigurationForm, // mediaProps); // // if (deviceConfigurationPropertyChangeListener == null) // { // // Initializes and registers the changed device configuration // // event ot the notification service. // getNotificationService(); // // deviceConfigurationPropertyChangeListener // = new AudioDeviceConfigurationListener(); // mediaServiceImpl // .getDeviceConfiguration() // .addPropertyChangeListener( // deviceConfigurationPropertyChangeListener); // } // } // If the video configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false)) // { // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // VideoConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.VIDEO", // 4), // mediaProps); // } // H.264 // If the H.264 configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> h264Props // = new Hashtable<String, String>(); // // h264Props.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // ConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.H264", // -1, // true), // h264Props); // } // ZRTP // If the ZRTP configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> securityProps // = new Hashtable<String, String>(); // // securityProps.put( ConfigurationForm.FORM_TYPE, // ConfigurationForm.SECURITY_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // SecurityConfigForm.class.getStatus(), // getClass().getClassLoader(), // "impl.media.security.zrtp.CONF_ICON", // "impl.media.security.zrtp.TITLE", // 0), // securityProps); // } // we use the nist-sdp stack to make parse sdp and we need to set the // following property to make sure that it would accept java generated // IPv6 addresses that contain address scope zones. System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true"); // AudioNotifierService AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService(); audioNotifierService.setMute( (cfg == null) || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true)); bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null); if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]"); // Call Recording // If the call recording configuration form is disabled don't continue. // if ((cfg == null) // || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> callRecordingProps // = new Hashtable<String, String>(); // // callRecordingProps.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // CallRecordingConfigForm.class.getStatus(), // getClass().getClassLoader(), // null, // "plugin.callrecordingconfig.CALL_RECORDING_CONFIG", // 1100, // true), // callRecordingProps); // } }
/** * Enables or disables this <tt>RTPConnectorInputStream</tt>. While the stream is disabled, it * does not accept any packets. * * @param enabled <tt>true</tt> to enable, <tt>false</tt> to disable. */ public void setEnabled(boolean enabled) { if (logger.isDebugEnabled()) logger.debug("setEnabled: " + enabled); this.enabled = enabled; }