/** * Handles a specific <tt>IOException</tt> which was thrown during the execution of {@link * #runInConnectThread(DTLSProtocol, TlsPeer, DatagramTransport)} while trying to establish a DTLS * connection * * @param ioe the <tt>IOException</tt> to handle * @param msg the human-readable message to log about the specified <tt>ioe</tt> * @param i the number of tries remaining after the current one * @return <tt>true</tt> if the specified <tt>ioe</tt> was successfully handled; <tt>false</tt>, * otherwise */ private boolean handleRunInConnectThreadException(IOException ioe, String msg, int i) { // SrtpControl.start(MediaType) starts its associated TransformEngine. // We will use that mediaType to signal the normal stop then as well // i.e. we will ignore exception after the procedure to stop this // PacketTransformer has begun. if (mediaType == null) return false; if (ioe instanceof TlsFatalAlert) { TlsFatalAlert tfa = (TlsFatalAlert) ioe; short alertDescription = tfa.getAlertDescription(); if (alertDescription == AlertDescription.unexpected_message) { msg += " Received fatal unexpected message."; if (i == 0 || !Thread.currentThread().equals(connectThread) || connector == null || mediaType == null) { msg += " Giving up after " + (CONNECT_TRIES - i) + " retries."; } else { msg += " Will retry."; logger.error(msg, ioe); return true; } } else { msg += " Received fatal alert " + alertDescription + "."; } } logger.error(msg, ioe); return false; }
/** Implements notification in order to track socket state. */ @Override public synchronized void onSctpNotification(SctpSocket socket, SctpNotification notification) { if (logger.isDebugEnabled()) { logger.debug("socket=" + socket + "; notification=" + notification); } switch (notification.sn_type) { case SctpNotification.SCTP_ASSOC_CHANGE: SctpNotification.AssociationChange assocChange = (SctpNotification.AssociationChange) notification; switch (assocChange.state) { case SctpNotification.AssociationChange.SCTP_COMM_UP: if (!assocIsUp) { boolean wasReady = isReady(); assocIsUp = true; if (isReady() && !wasReady) notifySctpConnectionReady(); } break; case SctpNotification.AssociationChange.SCTP_COMM_LOST: case SctpNotification.AssociationChange.SCTP_SHUTDOWN_COMP: case SctpNotification.AssociationChange.SCTP_CANT_STR_ASSOC: try { closeStream(); } catch (IOException e) { logger.error("Error closing SCTP socket", e); } break; } break; } }
/** Stops this <tt>PacketTransformer</tt>. */ private synchronized void stop() { if (connectThread != null) connectThread = null; try { // The dtlsTransport and _srtpTransformer SHOULD be closed, of // course. The datagramTransport MUST be closed. if (dtlsTransport != null) { try { dtlsTransport.close(); } catch (IOException ioe) { logger.error("Failed to (properly) close " + dtlsTransport.getClass(), ioe); } dtlsTransport = null; } if (_srtpTransformer != null) { _srtpTransformer.close(); _srtpTransformer = null; } } finally { try { closeDatagramTransport(); } finally { notifyAll(); } } }
private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } }
/** * Sends acknowledgment for open channel request on given SCTP stream ID. * * @param sid SCTP stream identifier to be used for sending ack. */ private void sendOpenChannelAck(int sid) throws IOException { // Send ACK byte[] ack = MSG_CHANNEL_ACK_BYTES; int sendAck = sctpSocket.send(ack, true, sid, WEB_RTC_PPID_CTRL); if (sendAck != ack.length) { logger.error("Failed to send open channel confirmation"); } }
/** * {@inheritDoc} * * <p>SCTP input data callback. */ @Override public void onSctpPacket( byte[] data, int sid, int ssn, int tsn, long ppid, int context, int flags) { if (ppid == WEB_RTC_PPID_CTRL) { // Channel control PPID try { onCtrlPacket(data, sid); } catch (IOException e) { logger.error("IOException when processing ctrl packet", e); } } else if (ppid == WEB_RTC_PPID_STRING || ppid == WEB_RTC_PPID_BIN) { WebRtcDataStream channel; synchronized (this) { channel = channels.get(sid); } if (channel == null) { logger.error("No channel found for sid: " + sid); return; } if (ppid == WEB_RTC_PPID_STRING) { // WebRTC String String str; String charsetName = "UTF-8"; try { str = new String(data, charsetName); } catch (UnsupportedEncodingException uee) { logger.error("Unsupported charset encoding/name " + charsetName, uee); str = null; } channel.onStringMsg(str); } else { // WebRTC Binary channel.onBinaryMsg(data); } } else { logger.warn("Got message on unsupported PPID: " + ppid); } }
/** * Closes {@link #datagramTransport} if it is non-<tt>null</tt> and logs and swallows any * <tt>IOException</tt>. */ private void closeDatagramTransport() { if (datagramTransport != null) { try { datagramTransport.close(); } catch (IOException ioe) { // DatagramTransportImpl has no reason to fail because it is // merely an adapter of #connector and this PacketTransformer to // the terms of the Bouncy Castle Crypto API. logger.error("Failed to (properly) close " + datagramTransport.getClass(), ioe); } datagramTransport = null; } }
private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); }
/** * Sends the data contained in a specific byte array as application data through the DTLS * connection of this <tt>DtlsPacketTransformer</tt>. * * @param buf the byte array containing data to send. * @param off the offset in <tt>buf</tt> where the data begins. * @param len the length of data to send. */ public void sendApplicationData(byte[] buf, int off, int len) { DTLSTransport dtlsTransport = this.dtlsTransport; Throwable throwable = null; if (dtlsTransport != null) { try { dtlsTransport.send(buf, off, len); } catch (IOException ioe) { throwable = ioe; } } else { throwable = new NullPointerException("dtlsTransport"); } if (throwable != null) { // SrtpControl.start(MediaType) starts its associated // TransformEngine. We will use that mediaType to signal the normal // stop then as well i.e. we will ignore exception after the // procedure to stop this PacketTransformer has begun. if (mediaType != null && !tlsPeerHasRaisedCloseNotifyWarning) { logger.error("Failed to send application data over DTLS transport: ", throwable); } } }
/** * Makes home folder and the configuration file readable and writable only to the owner. * * @param cs the <tt>ConfigurationService</tt> instance to check for home folder and configuration * file. */ private static void fixPermissions(ConfigurationService cs) { if (!OSUtils.IS_LINUX && !OSUtils.IS_MAC) return; try { // let's check config file and config folder File homeFolder = new File(cs.getScHomeDirLocation(), cs.getScHomeDirName()); Set<PosixFilePermission> perms = new HashSet<PosixFilePermission>() { { add(PosixFilePermission.OWNER_READ); add(PosixFilePermission.OWNER_WRITE); add(PosixFilePermission.OWNER_EXECUTE); } }; Files.setPosixFilePermissions(Paths.get(homeFolder.getAbsolutePath()), perms); String fileName = cs.getConfigurationFilename(); if (fileName != null) { File cf = new File(homeFolder, fileName); if (cf.exists()) { perms = new HashSet<PosixFilePermission>() { { add(PosixFilePermission.OWNER_READ); add(PosixFilePermission.OWNER_WRITE); } }; Files.setPosixFilePermissions(Paths.get(cf.getAbsolutePath()), perms); } } } catch (Throwable t) { logger.error("Error creating c lib instance for fixing file permissions", t); if (t instanceof InterruptedException) Thread.currentThread().interrupt(); else if (t instanceof ThreadDeath) throw (ThreadDeath) t; } }
/** * Handles control packet. * * @param data raw packet data that arrived on control PPID. * @param sid SCTP stream id on which the data has arrived. */ private synchronized void onCtrlPacket(byte[] data, int sid) throws IOException { ByteBuffer buffer = ByteBuffer.wrap(data); int messageType = /* 1 byte unsigned integer */ 0xFF & buffer.get(); if (messageType == MSG_CHANNEL_ACK) { if (logger.isDebugEnabled()) { logger.debug(getEndpoint().getID() + " ACK received SID: " + sid); } // Open channel ACK WebRtcDataStream channel = channels.get(sid); if (channel != null) { // Ack check prevents from firing multiple notifications // if we get more than one ACKs (by mistake/bug). if (!channel.isAcknowledged()) { channel.ackReceived(); notifyChannelOpened(channel); } else { logger.warn("Redundant ACK received for SID: " + sid); } } else { logger.error("No channel exists on sid: " + sid); } } else if (messageType == MSG_OPEN_CHANNEL) { int channelType = /* 1 byte unsigned integer */ 0xFF & buffer.get(); int priority = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort(); long reliability = /* 4 bytes unsigned integer */ 0xFFFFFFFFL & buffer.getInt(); int labelLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort(); int protocolLength = /* 2 bytes unsigned integer */ 0xFFFF & buffer.getShort(); String label; String protocol; if (labelLength == 0) { label = ""; } else { byte[] labelBytes = new byte[labelLength]; buffer.get(labelBytes); label = new String(labelBytes, "UTF-8"); } if (protocolLength == 0) { protocol = ""; } else { byte[] protocolBytes = new byte[protocolLength]; buffer.get(protocolBytes); protocol = new String(protocolBytes, "UTF-8"); } if (logger.isDebugEnabled()) { logger.debug( "!!! " + getEndpoint().getID() + " data channel open request on SID: " + sid + " type: " + channelType + " prio: " + priority + " reliab: " + reliability + " label: " + label + " proto: " + protocol); } if (channels.containsKey(sid)) { logger.error("Channel on sid: " + sid + " already exists"); } WebRtcDataStream newChannel = new WebRtcDataStream(sctpSocket, sid, label, true); channels.put(sid, newChannel); sendOpenChannelAck(sid); notifyChannelOpened(newChannel); } else { logger.error("Unexpected ctrl msg type: " + messageType); } }
/** {@inheritDoc} */ @Override public RawPacket reverseTransform(RawPacket pkt) { byte[] buf = pkt.getBuffer(); int off = pkt.getOffset(); int len = pkt.getLength(); if (isDtlsRecord(buf, off, len)) { if (rtcpmux && Component.RTCP == componentID) { // This should never happen. logger.warn( "Dropping a DTLS record, because it was received on the" + " RTCP channel while rtcpmux is in use."); return null; } boolean receive; synchronized (this) { if (datagramTransport == null) { receive = false; } else { datagramTransport.queueReceive(buf, off, len); receive = true; } } if (receive) { DTLSTransport dtlsTransport = this.dtlsTransport; if (dtlsTransport == null) { // The specified pkt looks like a DTLS record and it has // been consumed for the purposes of the secure channel // represented by this PacketTransformer. pkt = null; } else { try { int receiveLimit = dtlsTransport.getReceiveLimit(); int delta = receiveLimit - len; if (delta > 0) { pkt.grow(delta); buf = pkt.getBuffer(); off = pkt.getOffset(); len = pkt.getLength(); } else if (delta < 0) { pkt.shrink(-delta); buf = pkt.getBuffer(); off = pkt.getOffset(); len = pkt.getLength(); } int received = dtlsTransport.receive(buf, off, len, DTLS_TRANSPORT_RECEIVE_WAITMILLIS); if (received <= 0) { // No application data was decoded. pkt = null; } else { delta = len - received; if (delta > 0) pkt.shrink(delta); } } catch (IOException ioe) { pkt = null; // SrtpControl.start(MediaType) starts its associated // TransformEngine. We will use that mediaType to signal // the normal stop then as well i.e. we will ignore // exception after the procedure to stop this // PacketTransformer has begun. if (mediaType != null && !tlsPeerHasRaisedCloseNotifyWarning) { logger.error("Failed to decode a DTLS record!", ioe); } } } } else { // The specified pkt looks like a DTLS record but it is // unexpected in the current state of the secure channel // represented by this PacketTransformer. This PacketTransformer // has not been started (successfully) or has been closed. pkt = null; } } else if (transformEngine.isSrtpDisabled()) { // In pure DTLS mode only DTLS records pass through. pkt = null; } else { // DTLS-SRTP has not been initialized yet or has failed to // initialize. SinglePacketTransformer srtpTransformer = waitInitializeAndGetSRTPTransformer(); if (srtpTransformer != null) pkt = srtpTransformer.reverseTransform(pkt); else if (DROP_UNENCRYPTED_PKTS) pkt = null; // XXX Else, it is our explicit policy to let the received packet // pass through and rely on the SrtpListener to notify the user that // the session is not secured. } return pkt; }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
/** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } }