private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } }
private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
/** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } }