/** * Restarts the recording for a specific SSRC. * * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording). */ private void resetRecording(long ssrc, long timestamp) { ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); // we only restart audio recordings if (receiveStream != null && receiveStream.format instanceof AudioFormat) { String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX); // flush the buffer contained in the MP3 encoder String s = "trying to flush ssrc=" + ssrc; Processor p = receiveStream.processor; if (p != null) { s += " p!=null"; for (TrackControl tc : p.getTrackControls()) { Object o = tc.getControl(FlushableControl.class.getName()); if (o != null) ((FlushableControl) o).flush(); } } if (logger.isInfoEnabled()) { logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename); } receiveStream.dataSink.close(); receiveStream.dataSink = null; // flush the FMJ jitter buffer // DataSource ds = receiveStream.receiveStream.getDataSource(); // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource) // ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush(); receiveStream.filename = newFilename; try { receiveStream.dataSink = Manager.createDataSink( receiveStream.dataSource, new MediaLocator("file:" + newFilename)); } catch (NoDataSinkException ndse) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse); removeReceiveStream(receiveStream, false); } try { receiveStream.dataSink.open(); receiveStream.dataSink.start(); } catch (IOException ioe) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe); removeReceiveStream(receiveStream, false); } audioRecordingStarted(ssrc, timestamp); } }
private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } }
/** * Start desktop capture stream. * * @see AbstractPullBufferStream#start() */ @Override public void start() throws IOException { super.start(); if (desktopInteract == null) { try { desktopInteract = new DesktopInteractImpl(); } catch (Exception e) { logger.warn("Cannot create DesktopInteract object!"); } } }
/** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); }
protected void doInitialize() throws Exception { DSCaptureDevice devices[] = DSManager.getInstance().getCaptureDevices(); boolean captureDeviceInfoIsAdded = false; for (int i = 0, count = (devices == null) ? 0 : devices.length; i < count; i++) { long pixelFormat = devices[i].getFormat().getPixelFormat(); int ffmpegPixFmt = (int) DataSource.getFFmpegPixFmt(pixelFormat); Format format = null; if (ffmpegPixFmt != FFmpeg.PIX_FMT_NONE) { format = new AVFrameFormat(ffmpegPixFmt, (int) pixelFormat); } else { logger.warn( "No support for this webcam: " + devices[i].getName() + "(format " + pixelFormat + " not supported)"); continue; } if (logger.isInfoEnabled()) { for (DSFormat f : devices[i].getSupportedFormats()) { if (f.getWidth() != 0 && f.getHeight() != 0) logger.info( "Webcam available resolution for " + devices[i].getName() + ":" + f.getWidth() + "x" + f.getHeight()); } } CaptureDeviceInfo device = new CaptureDeviceInfo( devices[i].getName(), new MediaLocator(LOCATOR_PROTOCOL + ':' + devices[i].getName()), new Format[] {format}); if (logger.isInfoEnabled()) logger.info("Found[" + i + "]: " + device.getName()); CaptureDeviceManager.addDevice(device); captureDeviceInfoIsAdded = true; } if (captureDeviceInfoIsAdded && !MediaServiceImpl.isJmfRegistryDisableLoad()) CaptureDeviceManager.commit(); DSManager.dispose(); }
/** {@inheritDoc} */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { byte[] inData = (byte[]) inBuffer.getData(); int inOffset = inBuffer.getOffset(); if (!VP8PayloadDescriptor.isValid(inData, inOffset)) { logger.warn("Invalid RTP/VP8 packet discarded."); outBuffer.setDiscard(true); return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK? } long inSeq = inBuffer.getSequenceNumber(); long inRtpTimestamp = inBuffer.getRtpTimeStamp(); int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset); boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0; boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset); int inLength = inBuffer.getLength(); int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset); int inPayloadLength = inLength - inPdSize; if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) { if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } if (!empty) { // if the incoming packet has a different PictureID or timestamp // than those of the current frame, then it belongs to a different // frame. if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId) | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) { if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq { // the packet belongs to a previous frame. discard it if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } else // inSeq > firstSeq (and also presumably isSeq > lastSeq) { // the packet belongs to a subsequent frame (to the one // currently being held). Drop the current frame. if (logger.isInfoEnabled()) logger.info( "Discarding saved packets on arrival of" + " a packet for a subsequent frame: " + inSeq); // TODO: this would be the place to complain about the // not-well-received PictureID by sending a RTCP SLI or NACK. reinit(); } } } // a whole frame in a single packet. avoid the extra copy to // this.data and output it immediately. if (empty && inMarker && inIsStartOfFrame) { byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false); System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength); outBuffer.setOffset(0); outBuffer.setLength(inPayloadLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = inSeq; return BUFFER_PROCESSED_OK; } // add to this.data Container container = free.poll(); if (container == null) container = new Container(); if (container.buf == null || container.buf.length < inPayloadLength) container.buf = new byte[inPayloadLength]; if (data.get(inSeq) != null) { if (logger.isInfoEnabled()) logger.info("(Probable) duplicate packet detected, discarding " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength); container.len = inPayloadLength; data.put(inSeq, container); // update fields frameLength += inPayloadLength; if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq; if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq; if (empty) { // the first received packet for the current frame was just added empty = false; timestamp = inRtpTimestamp; pictureId = inPictureId; } if (inMarker) haveEnd = true; if (inIsStartOfFrame) haveStart = true; // check if we have a full frame if (frameComplete()) { byte[] outData = validateByteArraySize(outBuffer, frameLength, false); int ptr = 0; Container b; for (Map.Entry<Long, Container> entry : data.entrySet()) { b = entry.getValue(); System.arraycopy(b.buf, 0, outData, ptr, b.len); ptr += b.len; } outBuffer.setOffset(0); outBuffer.setLength(frameLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = lastSeq; // prepare for the next frame reinit(); return BUFFER_PROCESSED_OK; } else { // frame not complete yet outBuffer.setDiscard(true); return OUTPUT_BUFFER_NOT_FILLED; } }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
/** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } }
/** * Listens for incoming datagrams, stores them for reading by the <tt>read</tt> method and * notifies the local <tt>transferHandler</tt> that there's data to be read. */ public void run() { DatagramPacket p = new DatagramPacket(buffer, 0, PACKET_RECEIVE_BUFFER_LENGTH); while (!closed) { try { // http://code.google.com/p/android/issues/detail?id=24765 if (OSUtils.IS_ANDROID) p.setLength(PACKET_RECEIVE_BUFFER_LENGTH); receivePacket(p); } catch (IOException e) { ioError = true; break; } /* * Do the DatagramPacketFilters accept the received DatagramPacket? */ DatagramPacketFilter[] datagramPacketFilters = getDatagramPacketFilters(); boolean accept; if (!enabled) accept = false; else if (datagramPacketFilters == null) accept = true; else { accept = true; for (int i = 0; i < datagramPacketFilters.length; i++) { try { if (!datagramPacketFilters[i].accept(p)) { accept = false; break; } } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; } } } if (accept) { RawPacket pkts[] = createRawPacket(p); for (int i = 0; i < pkts.length; i++) { RawPacket pkt = pkts[i]; pkts[i] = null; if (pkt != null) { if (pkt.isInvalid()) { /* * Return pkt to the pool because it is invalid and, * consequently, will not be made available to * reading. */ poolRawPacket(pkt); } else { RawPacket oldPkt; synchronized (pktSyncRoot) { oldPkt = this.pkt; this.pkt = pkt; } if (oldPkt != null) { /* * Return oldPkt to the pool because it was made * available to reading and it was not read. */ poolRawPacket(oldPkt); } if (videoRecorder != null) videoRecorder.recordData(pkt); if ((transferHandler != null) && !closed) { try { transferHandler.transferData(this); } catch (Throwable t) { /* * XXX We cannot allow transferHandler to * kill us. */ if (t instanceof ThreadDeath) { throw (ThreadDeath) t; } else { logger.warn("An RTP packet may have not been" + " fully handled.", t); } } } } } } rawPacketArrayPool.offer(pkts); } } }