/** * Sets the time in milliseconds of the last activity related to this <tt>Conference</tt> to the * current system time. */ public void touch() { long now = System.currentTimeMillis(); synchronized (this) { if (getLastActivityTime() < now) lastActivityTime = now; } }
/** * Copies the content of the most recently received packet into <tt>data</tt>. * * @param buffer an optional <tt>Buffer</tt> instance associated with the specified <tt>data</tt>, * <tt>offset</tt> and <tt>length</tt> and provided to the method in case the implementation * would like to provide additional <tt>Buffer</tt> properties such as <tt>flags</tt> * @param data the <tt>byte[]</tt> that we'd like to copy the content of the packet to. * @param offset the position where we are supposed to start writing in <tt>data</tt>. * @param length the number of <tt>byte</tt>s available for writing in <tt>data</tt>. * @return the number of bytes read * @throws IOException if <tt>length</tt> is less than the size of the packet. */ protected int read(Buffer buffer, byte[] data, int offset, int length) throws IOException { if (data == null) throw new NullPointerException("data"); if (ioError) return -1; RawPacket pkt; synchronized (pktSyncRoot) { pkt = this.pkt; this.pkt = null; } int pktLength; if (pkt == null) { pktLength = 0; } else { // By default, pkt will be returned to the pool after it was read. boolean poolPkt = true; try { pktLength = pkt.getLength(); if (length < pktLength) { /* * If pkt is still the latest RawPacket made available to * reading, reinstate it for the next invocation of read; * otherwise, return it to the pool. */ poolPkt = false; throw new IOException("Input buffer not big enough for " + pktLength); } else { byte[] pktBuffer = pkt.getBuffer(); if (pktBuffer == null) { throw new NullPointerException( "pkt.buffer null, pkt.length " + pktLength + ", pkt.offset " + pkt.getOffset()); } else { System.arraycopy(pkt.getBuffer(), pkt.getOffset(), data, offset, pktLength); if (buffer != null) buffer.setFlags(pkt.getFlags()); } } } finally { if (!poolPkt) { synchronized (pktSyncRoot) { if (this.pkt == null) this.pkt = pkt; else poolPkt = true; } } if (poolPkt) { // Return pkt to the pool because it was successfully read. poolRawPacket(pkt); } } } return pktLength; }
public int write(byte[] buffer, int offset, int length, boolean transform) { RawPacket pkt = rawPacketArray[0]; if (pkt == null) pkt = new RawPacket(); rawPacketArray[0] = pkt; byte[] pktBuf = pkt.getBuffer(); if (pktBuf == null || pktBuf.length < length) { pktBuf = new byte[length]; pkt.setBuffer(pktBuf); } System.arraycopy(buffer, offset, pktBuf, 0, length); pkt.setOffset(0); pkt.setLength(length); if (transform) { PacketTransformer packetTransformer = isControlStream ? rtcpPacketTransformer : rtpPacketTransformer; if (packetTransformer != null) rawPacketArray = packetTransformer.reverseTransform(rawPacketArray); } SourceTransferHandler transferHandler; PushSourceStream pushSourceStream; try { if (isControlStream) { transferHandler = controlTransferHandler; pushSourceStream = getControlInputStream(); } else { transferHandler = dataTransferHandler; pushSourceStream = getDataInputStream(); } } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } for (int i = 0; i < rawPacketArray.length; i++) { RawPacket packet = rawPacketArray[i]; // keep the first element for reuse if (i != 0) rawPacketArray[i] = null; if (packet != null) { if (isControlStream) pendingControlPacket = packet; else pendingDataPacket = packet; if (transferHandler != null) { transferHandler.transferData(pushSourceStream); } } } return length; }
/** * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed, * and that the new active stream has SSRC <tt>ssrc</tt>. * * @param ssrc the SSRC of the new active stream. */ @Override public void activeSpeakerChanged(long ssrc) { if (eventHandler != null) { RecorderEvent e = new RecorderEvent(); e.setAudioSsrc(ssrc); // TODO: how do we time this? e.setInstant(System.currentTimeMillis()); e.setType(RecorderEvent.Type.SPEAKER_CHANGED); e.setMediaType(MediaType.VIDEO); eventHandler.handleEvent(e); } }
static { ConfigurationService cfg = LibJitsi.getConfigurationService(); boolean dropUnencryptedPkts = false; if (cfg == null) { String s = System.getProperty(DROP_UNENCRYPTED_PKTS_PNAME); if (s != null) dropUnencryptedPkts = Boolean.parseBoolean(s); } else { dropUnencryptedPkts = cfg.getBoolean(DROP_UNENCRYPTED_PKTS_PNAME, dropUnencryptedPkts); } DROP_UNENCRYPTED_PKTS = dropUnencryptedPkts; }
/** * Adds a <tt>DatagramPacketFilter</tt> which allows dropping <tt>DatagramPacket</tt>s before they * are converted into <tt>RawPacket</tt>s. * * @param datagramPacketFilter the <tt>DatagramPacketFilter</tt> which allows dropping * <tt>DatagramPacket</tt>s before they are converted into <tt>RawPacket</tt>s */ public synchronized void addDatagramPacketFilter(DatagramPacketFilter datagramPacketFilter) { if (datagramPacketFilter == null) throw new NullPointerException("datagramPacketFilter"); if (datagramPacketFilters == null) { datagramPacketFilters = new DatagramPacketFilter[] {datagramPacketFilter}; } else { final int length = datagramPacketFilters.length; for (int i = 0; i < length; i++) if (datagramPacketFilter.equals(datagramPacketFilters[i])) return; DatagramPacketFilter[] newDatagramPacketFilters = new DatagramPacketFilter[length + 1]; System.arraycopy(datagramPacketFilters, 0, newDatagramPacketFilters, 0, length); newDatagramPacketFilters[length] = datagramPacketFilter; datagramPacketFilters = newDatagramPacketFilters; } }
@Override public int read(byte[] buffer, int offset, int length) throws IOException { RawPacket pendingPacket; if (isControlStream) { pendingPacket = pendingControlPacket; } else { pendingPacket = pendingDataPacket; } int bytesToRead = 0; byte[] pendingPacketBuffer = pendingPacket.getBuffer(); if (pendingPacketBuffer != null) { int pendingPacketLength = pendingPacket.getLength(); bytesToRead = length > pendingPacketLength ? pendingPacketLength : length; System.arraycopy( pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead); } return bytesToRead; }
private void runOnDtlsTransport(StreamConnector connector) throws IOException { DtlsControlImpl dtlsControl = (DtlsControlImpl) getTransportManager().getDtlsControl(this); DtlsTransformEngine engine = dtlsControl.getTransformEngine(); final DtlsPacketTransformer transformer = (DtlsPacketTransformer) engine.getRTPTransformer(); byte[] receiveBuffer = new byte[SCTP_BUFFER_SIZE]; if (LOG_SCTP_PACKETS) { System.setProperty( ConfigurationService.PNAME_SC_HOME_DIR_LOCATION, System.getProperty("java.io.tmpdir")); System.setProperty( ConfigurationService.PNAME_SC_HOME_DIR_NAME, SctpConnection.class.getName()); } synchronized (this) { // FIXME local SCTP port is hardcoded in bridge offer SDP (Jitsi // Meet) sctpSocket = Sctp.createSocket(5000); assocIsUp = false; acceptedIncomingConnection = false; } // Implement output network link for SCTP stack on DTLS transport sctpSocket.setLink( new NetworkLink() { @Override public void onConnOut(SctpSocket s, byte[] packet) throws IOException { if (LOG_SCTP_PACKETS) { LibJitsi.getPacketLoggingService() .logPacket( PacketLoggingService.ProtocolName.ICE4J, new byte[] {0, 0, 0, (byte) debugId}, 5000, new byte[] {0, 0, 0, (byte) (debugId + 1)}, remoteSctpPort, PacketLoggingService.TransportName.UDP, true, packet); } // Send through DTLS transport transformer.sendApplicationData(packet, 0, packet.length); } }); if (logger.isDebugEnabled()) { logger.debug("Connecting SCTP to port: " + remoteSctpPort + " to " + getEndpoint().getID()); } sctpSocket.setNotificationListener(this); sctpSocket.listen(); // FIXME manage threads threadPool.execute( new Runnable() { @Override public void run() { SctpSocket sctpSocket = null; try { // sctpSocket is set to null on close sctpSocket = SctpConnection.this.sctpSocket; while (sctpSocket != null) { if (sctpSocket.accept()) { acceptedIncomingConnection = true; break; } Thread.sleep(100); sctpSocket = SctpConnection.this.sctpSocket; } if (isReady()) { notifySctpConnectionReady(); } } catch (Exception e) { logger.error("Error accepting SCTP connection", e); } if (sctpSocket == null && logger.isInfoEnabled()) { logger.info( "SctpConnection " + getID() + " closed" + " before SctpSocket accept()-ed."); } } }); // Notify that from now on SCTP connection is considered functional sctpSocket.setDataCallback(this); // Setup iceSocket DatagramSocket datagramSocket = connector.getDataSocket(); if (datagramSocket != null) { this.iceSocket = new IceUdpSocketWrapper(datagramSocket); } else { this.iceSocket = new IceTcpSocketWrapper(connector.getDataTCPSocket()); } DatagramPacket rcvPacket = new DatagramPacket(receiveBuffer, 0, receiveBuffer.length); // Receive loop, breaks when SCTP socket is closed try { do { iceSocket.receive(rcvPacket); RawPacket raw = new RawPacket(rcvPacket.getData(), rcvPacket.getOffset(), rcvPacket.getLength()); raw = transformer.reverseTransform(raw); // Check for app data if (raw == null) continue; if (LOG_SCTP_PACKETS) { LibJitsi.getPacketLoggingService() .logPacket( PacketLoggingService.ProtocolName.ICE4J, new byte[] {0, 0, 0, (byte) (debugId + 1)}, remoteSctpPort, new byte[] {0, 0, 0, (byte) debugId}, 5000, PacketLoggingService.TransportName.UDP, false, raw.getBuffer(), raw.getOffset(), raw.getLength()); } // Pass network packet to SCTP stack sctpSocket.onConnIn(raw.getBuffer(), raw.getOffset(), raw.getLength()); } while (true); } finally { // Eventually, close the socket although it should happen from // expire(). synchronized (this) { assocIsUp = false; acceptedIncomingConnection = false; if (sctpSocket != null) { sctpSocket.close(); sctpSocket = null; } } } }
/** * Initializes a new <tt>SRTPTransformer</tt> instance with a specific (negotiated) * <tt>SRTPProtectionProfile</tt> and the keying material specified by a specific * <tt>TlsContext</tt>. * * @param srtpProtectionProfile the (negotiated) <tt>SRTPProtectionProfile</tt> to initialize the * new instance with * @param tlsContext the <tt>TlsContext</tt> which represents the keying material * @return a new <tt>SRTPTransformer</tt> instance initialized with <tt>srtpProtectionProfile</tt> * and <tt>tlsContext</tt> */ private SinglePacketTransformer initializeSRTPTransformer( int srtpProtectionProfile, TlsContext tlsContext) { boolean rtcp; switch (componentID) { case Component.RTCP: rtcp = true; break; case Component.RTP: rtcp = false; break; default: throw new IllegalStateException("componentID"); } int cipher_key_length; int cipher_salt_length; int cipher; int auth_function; int auth_key_length; int RTCP_auth_tag_length, RTP_auth_tag_length; switch (srtpProtectionProfile) { case SRTPProtectionProfile.SRTP_AES128_CM_HMAC_SHA1_32: cipher_key_length = 128 / 8; cipher_salt_length = 112 / 8; cipher = SRTPPolicy.AESCM_ENCRYPTION; auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION; auth_key_length = 160 / 8; RTCP_auth_tag_length = 80 / 8; RTP_auth_tag_length = 32 / 8; break; case SRTPProtectionProfile.SRTP_AES128_CM_HMAC_SHA1_80: cipher_key_length = 128 / 8; cipher_salt_length = 112 / 8; cipher = SRTPPolicy.AESCM_ENCRYPTION; auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION; auth_key_length = 160 / 8; RTCP_auth_tag_length = RTP_auth_tag_length = 80 / 8; break; case SRTPProtectionProfile.SRTP_NULL_HMAC_SHA1_32: cipher_key_length = 0; cipher_salt_length = 0; cipher = SRTPPolicy.NULL_ENCRYPTION; auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION; auth_key_length = 160 / 8; RTCP_auth_tag_length = 80 / 8; RTP_auth_tag_length = 32 / 8; break; case SRTPProtectionProfile.SRTP_NULL_HMAC_SHA1_80: cipher_key_length = 0; cipher_salt_length = 0; cipher = SRTPPolicy.NULL_ENCRYPTION; auth_function = SRTPPolicy.HMACSHA1_AUTHENTICATION; auth_key_length = 160 / 8; RTCP_auth_tag_length = RTP_auth_tag_length = 80 / 8; break; default: throw new IllegalArgumentException("srtpProtectionProfile"); } byte[] keyingMaterial = tlsContext.exportKeyingMaterial( ExporterLabel.dtls_srtp, null, 2 * (cipher_key_length + cipher_salt_length)); byte[] client_write_SRTP_master_key = new byte[cipher_key_length]; byte[] server_write_SRTP_master_key = new byte[cipher_key_length]; byte[] client_write_SRTP_master_salt = new byte[cipher_salt_length]; byte[] server_write_SRTP_master_salt = new byte[cipher_salt_length]; byte[][] keyingMaterialValues = { client_write_SRTP_master_key, server_write_SRTP_master_key, client_write_SRTP_master_salt, server_write_SRTP_master_salt }; for (int i = 0, keyingMaterialOffset = 0; i < keyingMaterialValues.length; i++) { byte[] keyingMaterialValue = keyingMaterialValues[i]; System.arraycopy( keyingMaterial, keyingMaterialOffset, keyingMaterialValue, 0, keyingMaterialValue.length); keyingMaterialOffset += keyingMaterialValue.length; } SRTPPolicy srtcpPolicy = new SRTPPolicy( cipher, cipher_key_length, auth_function, auth_key_length, RTCP_auth_tag_length, cipher_salt_length); SRTPPolicy srtpPolicy = new SRTPPolicy( cipher, cipher_key_length, auth_function, auth_key_length, RTP_auth_tag_length, cipher_salt_length); SRTPContextFactory clientSRTPContextFactory = new SRTPContextFactory( /* sender */ tlsContext instanceof TlsClientContext, client_write_SRTP_master_key, client_write_SRTP_master_salt, srtpPolicy, srtcpPolicy); SRTPContextFactory serverSRTPContextFactory = new SRTPContextFactory( /* sender */ tlsContext instanceof TlsServerContext, server_write_SRTP_master_key, server_write_SRTP_master_salt, srtpPolicy, srtcpPolicy); SRTPContextFactory forwardSRTPContextFactory; SRTPContextFactory reverseSRTPContextFactory; if (tlsContext instanceof TlsClientContext) { forwardSRTPContextFactory = clientSRTPContextFactory; reverseSRTPContextFactory = serverSRTPContextFactory; } else if (tlsContext instanceof TlsServerContext) { forwardSRTPContextFactory = serverSRTPContextFactory; reverseSRTPContextFactory = clientSRTPContextFactory; } else { throw new IllegalArgumentException("tlsContext"); } SinglePacketTransformer srtpTransformer; if (rtcp) { srtpTransformer = new SRTCPTransformer(forwardSRTPContextFactory, reverseSRTPContextFactory); } else { srtpTransformer = new SRTPTransformer(forwardSRTPContextFactory, reverseSRTPContextFactory); } return srtpTransformer; }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
/** * Starts the execution of the neomedia bundle in the specified context. * * @param bundleContext the context in which the neomedia bundle is to start executing * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the * specified context */ public void start(BundleContext bundleContext) throws Exception { if (logger.isDebugEnabled()) logger.debug("Started."); NeomediaActivator.bundleContext = bundleContext; // MediaService mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService(); bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null); if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]"); // mediaConfiguration = new MediaConfigurationImpl(); // bundleContext.registerService( // MediaConfigurationService.class.getStatus(), // getMediaConfiguration(), // null); if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]"); ConfigurationService cfg = NeomediaActivator.getConfigurationService(); Dictionary<String, String> mediaProps = new Hashtable<String, String>(); mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE); // If the audio configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false)) // { // audioConfigurationForm // = new LazyConfigurationForm( // AudioConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.AUDIO_ICON", // "impl.neomedia.configform.AUDIO", // 3); // // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // audioConfigurationForm, // mediaProps); // // if (deviceConfigurationPropertyChangeListener == null) // { // // Initializes and registers the changed device configuration // // event ot the notification service. // getNotificationService(); // // deviceConfigurationPropertyChangeListener // = new AudioDeviceConfigurationListener(); // mediaServiceImpl // .getDeviceConfiguration() // .addPropertyChangeListener( // deviceConfigurationPropertyChangeListener); // } // } // If the video configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false)) // { // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // VideoConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.VIDEO", // 4), // mediaProps); // } // H.264 // If the H.264 configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> h264Props // = new Hashtable<String, String>(); // // h264Props.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // ConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.H264", // -1, // true), // h264Props); // } // ZRTP // If the ZRTP configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> securityProps // = new Hashtable<String, String>(); // // securityProps.put( ConfigurationForm.FORM_TYPE, // ConfigurationForm.SECURITY_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // SecurityConfigForm.class.getStatus(), // getClass().getClassLoader(), // "impl.media.security.zrtp.CONF_ICON", // "impl.media.security.zrtp.TITLE", // 0), // securityProps); // } // we use the nist-sdp stack to make parse sdp and we need to set the // following property to make sure that it would accept java generated // IPv6 addresses that contain address scope zones. System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true"); // AudioNotifierService AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService(); audioNotifierService.setMute( (cfg == null) || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true)); bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null); if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]"); // Call Recording // If the call recording configuration form is disabled don't continue. // if ((cfg == null) // || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> callRecordingProps // = new Hashtable<String, String>(); // // callRecordingProps.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // CallRecordingConfigForm.class.getStatus(), // getClass().getClassLoader(), // null, // "plugin.callrecordingconfig.CALL_RECORDING_CONFIG", // 1100, // true), // callRecordingProps); // } }