/** * {@inheritDoc} * * @param format unused, since this implementation records multiple streams using potentially * different formats. * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the * recorded media files. */ @Override public void start(String format, String dirname) throws IOException, MediaException { if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode()); path = dirname; MediaService mediaService = LibJitsi.getMediaService(); /* * Note that we use only one RTPConnector for both the RTPTranslator * and the RTPManager instances. The this.translator will write to its * output streams, and this.rtpManager will read from its input streams. */ rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType); rtpManager = RTPManager.newInstance(); /* * Add the formats that we know about. */ rtpManager.addFormat(vp8RtpFormat, vp8PayloadType); rtpManager.addFormat(opusFormat, opusPayloadType); rtpManager.addReceiveStreamListener(this); /* * Note: When this.rtpManager sends RTCP sender/receiver reports, they * will end up being written to its own input stream. This is not * expected to cause problems, but might be something to keep an eye on. */ rtpManager.initialize(rtpConnector); /* * Register a fake call participant. * TODO: can we use a more generic MediaStream here? */ streamRTPManager = new StreamRTPManager( mediaService.createMediaStream( new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)), translator); streamRTPManager.initialize(rtpConnector); rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender(); translator.addFormat(streamRTPManager, opusFormat, opusPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat, // redPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat, // ulpfecPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, // mediaFormatImpl.getFormat(), vp8PayloadType); started = true; }
private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } }
/** * Gets the <tt>MediaFormat</tt>s among the specified <tt>mediaFormats</tt> which have the * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>. * * @param mediaFormats the <tt>MediaFormat</tt>s from which to filter out only the ones which have * the specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt> * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link * #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s among <tt>mediaFormats</tt> which have the * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt> */ private List<MediaFormat> getMatchingMediaFormats( MediaFormat[] mediaFormats, String encoding, double clockRate) { /* * XXX Use String#equalsIgnoreCase(String) because some clients transmit * some of the codecs starting with capital letters. */ /* * As per RFC 3551.4.5.2, because of a mistake in RFC 1890 and for * backward compatibility, G.722 should always be announced as 8000 even * though it is wideband. So, if someone is looking for G722/16000, * then: Forgive them, for they know not what they do! */ if ("G722".equalsIgnoreCase(encoding) && (16000 == clockRate)) { clockRate = 8000; if (logger.isInfoEnabled()) logger.info("Suppressing erroneous 16000 announcement for G.722"); } List<MediaFormat> supportedMediaFormats = new ArrayList<MediaFormat>(); for (MediaFormat mediaFormat : mediaFormats) { if (mediaFormat.getEncoding().equalsIgnoreCase(encoding) && ((CLOCK_RATE_NOT_SPECIFIED == clockRate) || (mediaFormat.getClockRate() == clockRate))) { supportedMediaFormats.add(mediaFormat); } } return supportedMediaFormats; }
/** * Stop desktop capture stream. * * @see AbstractPullBufferStream#stop() */ @Override public void stop() throws IOException { try { if (logger.isInfoEnabled()) logger.info("Stop stream"); } finally { super.stop(); byteBufferPool.drain(); } }
/** * Restarts the recording for a specific SSRC. * * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording). */ private void resetRecording(long ssrc, long timestamp) { ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); // we only restart audio recordings if (receiveStream != null && receiveStream.format instanceof AudioFormat) { String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX); // flush the buffer contained in the MP3 encoder String s = "trying to flush ssrc=" + ssrc; Processor p = receiveStream.processor; if (p != null) { s += " p!=null"; for (TrackControl tc : p.getTrackControls()) { Object o = tc.getControl(FlushableControl.class.getName()); if (o != null) ((FlushableControl) o).flush(); } } if (logger.isInfoEnabled()) { logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename); } receiveStream.dataSink.close(); receiveStream.dataSink = null; // flush the FMJ jitter buffer // DataSource ds = receiveStream.receiveStream.getDataSource(); // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource) // ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush(); receiveStream.filename = newFilename; try { receiveStream.dataSink = Manager.createDataSink( receiveStream.dataSource, new MediaLocator("file:" + newFilename)); } catch (NoDataSinkException ndse) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse); removeReceiveStream(receiveStream, false); } try { receiveStream.dataSink.open(); receiveStream.dataSink.start(); } catch (IOException ioe) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe); removeReceiveStream(receiveStream, false); } audioRecordingStarted(ssrc, timestamp); } }
protected void doInitialize() throws Exception { DSCaptureDevice devices[] = DSManager.getInstance().getCaptureDevices(); boolean captureDeviceInfoIsAdded = false; for (int i = 0, count = (devices == null) ? 0 : devices.length; i < count; i++) { long pixelFormat = devices[i].getFormat().getPixelFormat(); int ffmpegPixFmt = (int) DataSource.getFFmpegPixFmt(pixelFormat); Format format = null; if (ffmpegPixFmt != FFmpeg.PIX_FMT_NONE) { format = new AVFrameFormat(ffmpegPixFmt, (int) pixelFormat); } else { logger.warn( "No support for this webcam: " + devices[i].getName() + "(format " + pixelFormat + " not supported)"); continue; } if (logger.isInfoEnabled()) { for (DSFormat f : devices[i].getSupportedFormats()) { if (f.getWidth() != 0 && f.getHeight() != 0) logger.info( "Webcam available resolution for " + devices[i].getName() + ":" + f.getWidth() + "x" + f.getHeight()); } } CaptureDeviceInfo device = new CaptureDeviceInfo( devices[i].getName(), new MediaLocator(LOCATOR_PROTOCOL + ':' + devices[i].getName()), new Format[] {format}); if (logger.isInfoEnabled()) logger.info("Found[" + i + "]: " + device.getName()); CaptureDeviceManager.addDevice(device); captureDeviceInfoIsAdded = true; } if (captureDeviceInfoIsAdded && !MediaServiceImpl.isJmfRegistryDisableLoad()) CaptureDeviceManager.commit(); DSManager.dispose(); }
// methods start,stop,forward,rewind,insert-time,position moved as static to h4JmfPlugin // kleine delay inbouwen forward,rewind ???,sometimes player does not respind well ?? why?? // EBComponent interface public void handleMessage(EBMessage message) { logger.info(message.toString()); if (message instanceof PropertiesChanged) { // propertiesChanged(); } // AWT-EventQueue-0: INFO: EditorExiting[source=null] else if (message instanceof org.gjt.sp.jedit.msg.PluginUpdate) { Object what = ((PluginUpdate) message).getWhat(); if (what.equals(PluginUpdate.DEACTIVATED)) { // if(h4JmfPlugin.playMP3!=null) h4JmfPlugin.playMP3.close(); // h4JmfPlugin.playMP3=null; logger.info("close done in h4JmfPlugin"); } } // PluginUpdate } // handleMessage
public h4Jmf(View view, String position) { super(); count++; logger.info("args constructor " + count); this.setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); // logger.info("view="+view+" position="+position); this.view = view; // position.equals(DockableWindowManager.FLOATING); // cnsl=new Console(100); h4JmfPlugin.cnsl.setPreferredSize(new Dimension(500, 200)); JScrollPane jscrllpn = new JScrollPane(h4JmfPlugin.cnsl); // jscrllpn.setPreferredSize(new Dimension(500,200)); // add(BorderLayout.SOUTH,jscrllpn); add(jscrllpn); if (h4JmfPlugin.playMP3 != null) { java.awt.Component cpc = h4JmfPlugin.playMP3.getControlPanelComponent(); // logger.info("cpc="+cpc); if (cpc != null) // after panel has been closed, new instance !! { cpc.setPreferredSize(new Dimension(500, 50)); this.add(cpc); } } this.setPreferredSize(new Dimension(500, 300)); // cnsl.append("begin"); // cnsl.append("SettingsDirectory="+jEdit.getSettingsDirectory()); h4JmfPlugin.cnsl.append("args constructor " + count); // EditBus.addToBus(this);//moved to notify /** * *********************** //check a few jmf-classes begin done in h4JmfPlugin Class tst=null; * try { tst=Class.forName("com.sun.media.codec.audio.mp3.JavaDecoder"); jmf_ok=true; * cnsl.append("jmf Java Media Framework seems to be installed."); } catch(Exception excptn) { * cnsl.append("trying presence of jmf"); cnsl.append(excptn); cnsl.append("jmf Java Media * Framework does not seem to be installed!"); cnsl.append("see README for more info"); } * //check a few jmf-classes end ** */ if (!h4JmfPlugin.jmf_ok) { h4JmfPlugin.cnsl.append("JMF Java Media Framework does not seem to be installed!"); h4JmfPlugin.cnsl.append("see README for more info"); } logger.info("constructor end"); } // constructor
@Override public Format[] getSupportedOutputFormats(Format input) { if (input == null) return outputFormats; else { if (!(input instanceof AudioFormat)) { logger.warning( this.getClass().getSimpleName() + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for " + input); // this can cause an NPE in JMF if it ever // happens. return new Format[] {null}; } final AudioFormat inputCast = (AudioFormat) input; if (!inputCast.getEncoding().equals(AudioFormat.ALAW) || (inputCast.getSampleSizeInBits() != 8 && inputCast.getSampleSizeInBits() != Format.NOT_SPECIFIED) || (inputCast.getChannels() != 1 && inputCast.getChannels() != Format.NOT_SPECIFIED) || (inputCast.getFrameSizeInBits() != 8 && inputCast.getFrameSizeInBits() != Format.NOT_SPECIFIED)) { logger.warning( this.getClass().getSimpleName() + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for " + input); // this can cause an NPE in JMF if it ever // happens. return new Format[] {null}; } final AudioFormat result = new AudioFormat( BonusAudioFormatEncodings.ALAW_RTP, inputCast.getSampleRate(), 8, 1, inputCast.getEndian(), inputCast.getSigned(), 8, inputCast.getFrameRate(), inputCast.getDataType()); return new Format[] {result}; } }
private InputStream getInputStream( String urlStr, Format outputFormat, ContentDescriptor outputContentDescriptor) throws Exception { final ProcessorModel processorModel = new ProcessorModel( new MediaLocator(urlStr), outputFormat == null ? null : new Format[] {outputFormat}, outputContentDescriptor); final Processor processor = Manager.createRealizedProcessor(processorModel); final DataSource ds = processor.getDataOutput(); final DataSink[] streamDataSinkHolder = new DataSink[] {null}; // connect the data output of the processor to a StreamDataSink, which // will make the data available to PipedInputStream, which we return. final PipedInputStream in = new PipedInputStream() { // override close to clean up everything when the media has been // served. @Override public void close() throws IOException { super.close(); logger.fine("Closed input stream"); logger.fine("Stopping processor"); processor.stop(); logger.fine("Closing processor"); processor.close(); logger.fine("Deallocating processor"); processor.deallocate(); if (streamDataSinkHolder[0] != null) { logger.fine("Closing StreamDataSink"); streamDataSinkHolder[0].close(); } } }; final PipedOutputStream out = new PipedOutputStream(in); final DataSink streamDataSink = new StreamDataSink(out); streamDataSinkHolder[0] = streamDataSink; streamDataSink.setSource(ds); streamDataSink.open(); streamDataSink.start(); logger.info("Starting processor"); processor.start(); // TODO: if there is an error, make sure we clean up. // for example, if the client breaks the connection. // we need a controller listener to listen for errors. return in; }
/** * Start desktop capture stream. * * @see AbstractPullBufferStream#start() */ @Override public void start() throws IOException { super.start(); if (desktopInteract == null) { try { desktopInteract = new DesktopInteractImpl(); } catch (Exception e) { logger.warn("Cannot create DesktopInteract object!"); } } }
/** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); }
/** Configures echo cancellation and noise suppression effects. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void configureEffects() { if (!AndroidUtils.hasAPI(16)) return; AudioSystem audioSystem = AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD); // Creates echo canceler if available if (AcousticEchoCanceler.isAvailable()) { AcousticEchoCanceler echoCanceller = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); if (echoCanceller != null) { echoCanceller.setEnableStatusListener(this); echoCanceller.setEnabled(audioSystem.isEchoCancel()); logger.info("Echo cancellation: " + echoCanceller.getEnabled()); } } // Automatic gain control if (AutomaticGainControl.isAvailable()) { AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId()); if (agc != null) { agc.setEnableStatusListener(this); agc.setEnabled(audioSystem.isAutomaticGainControl()); logger.info("Auto gain control: " + agc.getEnabled()); } } // Creates noise suppressor if available if (NoiseSuppressor.isAvailable()) { NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); if (noiseSuppressor != null) { noiseSuppressor.setEnableStatusListener(this); noiseSuppressor.setEnabled(audioSystem.isDenoise()); logger.info("Noise suppressor: " + noiseSuppressor.getEnabled()); } } }
private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); }
@Override public void stop() { if (started) { if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode()); // remove the recorder from the translator (e.g. stop new packets from // being written to rtpConnector if (streamRTPManager != null) streamRTPManager.dispose(); HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>(); synchronized (receiveStreams) { streamsToRemove.addAll(receiveStreams); } for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false); rtpConnector.rtcpPacketTransformer.close(); rtpConnector.rtpPacketTransformer.close(); rtpManager.dispose(); started = false; } }
public void player_begin(URL url) { if (h4JmfPlugin.playMP3 != null) { logger.severe("playMP3!=null"); h4JmfPlugin.cnsl.append("playMP3!=null"); return; } if (url == null) { h4JmfPlugin.cnsl.append("url==null"); return; } MediaLocator mediaLocator = new MediaLocator(url); try { // final JPanel jpnl_this=this; DataSource ds = Manager.createDataSource(mediaLocator); // cnsl.append("ds="+ds); h4JmfPlugin.playMP3 = Manager.createPlayer(ds); /** * ********************************************************** ControllerListener moved to * outer class addControllerListener done in h4JmfPlugin ********************************** */ h4JmfPlugin.player_begin(); } catch (Exception e) { logger.severe(e.getMessage()); h4JmfPlugin.cnsl.append(e); return; } // h4JmfPlugin.playMP3.realize(); // logger.info("after realize()"); // but possible [JMF thread: com.sun.media.PlaybackEngine@1ac13d7[ // com.sun.media.PlaybackEngine@1ac13d7 ] ( realizeThread)] [error] PlaybackEngine@1ac13d7 ] ( // realizeThread): Unable to handle format: mpeglayer3, 16000.0 Hz, 16-bit, Mono, // LittleEndian, Signed, 2000.0 frame rate, FrameSize=16384 bits // running tshvr under hedwig :11:17:08 PM [JMF thread: // com.sun.media.content.unknown.Handler@8c7be5 ( prefetchThread)] [error] Handler@8c7be5 ( // prefetchThread): Error: Unable to prefetch com.sun.media.PlaybackEngine@6d3b92 } // player_begin
/** * Create preview component. * * @param type type * @param comboBox the options. * @param prefSize the preferred size * @return the component. */ private static Component createPreview(int type, final JComboBox comboBox, Dimension prefSize) { JComponent preview = null; if (type == DeviceConfigurationComboBoxModel.AUDIO) { Object selectedItem = comboBox.getSelectedItem(); if (selectedItem instanceof AudioSystem) { AudioSystem audioSystem = (AudioSystem) selectedItem; if (!NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(audioSystem.getLocatorProtocol())) { preview = new TransparentPanel(new GridBagLayout()); createAudioSystemControls(audioSystem, preview); } } } else if (type == DeviceConfigurationComboBoxModel.VIDEO) { JLabel noPreview = new JLabel( NeomediaActivator.getResources().getI18NString("impl.media.configform.NO_PREVIEW")); noPreview.setHorizontalAlignment(SwingConstants.CENTER); noPreview.setVerticalAlignment(SwingConstants.CENTER); preview = createVideoContainer(noPreview); preview.setPreferredSize(prefSize); Object selectedItem = comboBox.getSelectedItem(); CaptureDeviceInfo device = null; if (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) device = ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info; Exception exception; try { createVideoPreview(device, preview); exception = null; } catch (IOException ex) { exception = ex; } catch (MediaException ex) { exception = ex; } if (exception != null) { logger.error("Failed to create preview for device " + device, exception); device = null; } } return preview; }
private void fillBuffer() { if (buffer == null) { buffer = new Buffer(); buffer.setFormat(track.getFormat()); } do { if (buffer.isEOM()) return; if (buffer.getLength() > 0) return; // still have data in buffer // TODO: any fields to set? track.readFrame(buffer); logger.fine("Read buffer from track: " + buffer.getLength()); } while (buffer.isDiscard()); }
// @Override @Override public void open() throws ResourceUnavailableException { try { // source.connect(); // TODO: assume source is already connected source.start(); // TODO: stop/disconnect on stop/close. final PullSourceStream[] streams = source.getStreams(); tracks = new PullSourceStreamTrack[streams.length]; for (int i = 0; i < streams.length; ++i) { tracks[i] = new VideoTrack(streams[i]); } } catch (IOException e) { logger.log(Level.WARNING, "" + e, e); throw new ResourceUnavailableException("" + e); } super.open(); }
public void chooseFile() { if (!h4JmfPlugin.jmf_ok) return; if (h4JmfPlugin.playMP3 != null) { h4JmfPlugin.cnsl.append("playMP3!=null"); return; } String tmpdir = System.getProperty("java.io.tmpdir"); String[] paths = GUIUtilities.showVFSFileDialog( view, tmpdir + File.separator, JFileChooser.OPEN_DIALOG, false); // if(paths!=null && !paths[0].equals(filename)) if (paths != null) { String filename = paths[0]; h4JmfPlugin.cnsl.append("filename=" + filename); try { URL url = new URL("file://" + filename); player_begin(url); } catch (Exception e) { logger.severe(e.getMessage()); h4JmfPlugin.cnsl.append(e); } } } // chooseFile
/** * The stream used by JMF for our image streaming. * * @author Sebastien Vincent * @author Lyubomir Marinov * @author Damian Minkov */ public class ImageStream extends AbstractVideoPullBufferStream<DataSource> { /** * The <tt>Logger</tt> used by the <tt>ImageStream</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(ImageStream.class); /** * The pool of <tt>ByteBuffer</tt>s this instances is using to optimize the allocations and * deallocations of <tt>ByteBuffer</tt>s. */ private final ByteBufferPool byteBufferPool = new ByteBufferPool(); /** Desktop interaction (screen capture, key press, ...). */ private DesktopInteract desktopInteract = null; /** Index of display that we will capture from. */ private int displayIndex = -1; /** Sequence number. */ private long seqNo = 0; /** X origin. */ private int x = 0; /** Y origin. */ private int y = 0; /** * Initializes a new <tt>ImageStream</tt> instance which is to have a specific * <tt>FormatControl</tt> * * @param dataSource the <tt>DataSource</tt> which is creating the new instance so that it becomes * one of its <tt>streams</tt> * @param formatControl the <tt>FormatControl</tt> of the new instance which is to specify the * format in which it is to provide its media data */ ImageStream(DataSource dataSource, FormatControl formatControl) { super(dataSource, formatControl); } /** * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>. * * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the * specified <tt>Buffer</tt> * @see AbstractVideoPullBufferStream#doRead(Buffer) */ @Override protected void doRead(Buffer buffer) throws IOException { /* * Determine the Format in which we're expected to output. We cannot * rely on the Format always being specified in the Buffer because it is * not its responsibility, the DataSource of this ImageStream knows the * output Format. */ Format format = buffer.getFormat(); if (format == null) { format = getFormat(); if (format != null) buffer.setFormat(format); } if (format instanceof AVFrameFormat) { Object o = buffer.getData(); AVFrame frame; if (o instanceof AVFrame) frame = (AVFrame) o; else { frame = new AVFrame(); buffer.setData(frame); } AVFrameFormat avFrameFormat = (AVFrameFormat) format; Dimension size = avFrameFormat.getSize(); ByteBuffer data = readScreenNative(size); if (data != null) { if (frame.avpicture_fill(data, avFrameFormat) < 0) { data.free(); throw new IOException("avpicture_fill"); } } else { /* * This can happen when we disconnect a monitor from computer * before or during grabbing. */ throw new IOException("Failed to grab screen."); } } else { byte[] bytes = (byte[]) buffer.getData(); Dimension size = ((VideoFormat) format).getSize(); bytes = readScreen(bytes, size); buffer.setData(bytes); buffer.setOffset(0); buffer.setLength(bytes.length); } buffer.setHeader(null); buffer.setTimeStamp(System.nanoTime()); buffer.setSequenceNumber(seqNo); buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA); seqNo++; } /** * Read screen. * * @param output output buffer for screen bytes * @param dim dimension of the screen * @return raw bytes, it could be equal to output or not. Take care in the caller to check if * output is the returned value. */ public byte[] readScreen(byte[] output, Dimension dim) { VideoFormat format = (VideoFormat) getFormat(); Dimension formatSize = format.getSize(); int width = formatSize.width; int height = formatSize.height; BufferedImage scaledScreen = null; BufferedImage screen = null; byte data[] = null; int size = width * height * 4; // If output is not large enough, enlarge it. if ((output == null) || (output.length < size)) output = new byte[size]; /* get desktop screen via native grabber if available */ if (desktopInteract.captureScreen(displayIndex, x, y, dim.width, dim.height, output)) { return output; } System.out.println("failed to grab with native! " + output.length); /* OK native grabber failed or is not available, * try with AWT Robot and convert it to the right format * * Note that it is very memory consuming since memory are allocated * to capture screen (via Robot) and then for converting to raw bytes * Moreover support for multiple display has not yet been investigated * * Normally not of our supported platform (Windows (x86, x64), * Linux (x86, x86-64), Mac OS X (i386, x86-64, ppc) and * FreeBSD (x86, x86-64) should go here. */ screen = desktopInteract.captureScreen(); if (screen != null) { /* convert to ARGB BufferedImage */ scaledScreen = ImgStreamingUtils.getScaledImage(screen, width, height, BufferedImage.TYPE_INT_ARGB); /* get raw bytes */ data = ImgStreamingUtils.getImageBytes(scaledScreen, output); } screen = null; scaledScreen = null; return data; } /** * Read screen and store result in native buffer. * * @param dim dimension of the video * @return true if success, false otherwise */ private ByteBuffer readScreenNative(Dimension dim) { int size = dim.width * dim.height * 4 + FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE; ByteBuffer data = byteBufferPool.getBuffer(size); data.setLength(size); /* get desktop screen via native grabber */ boolean b; try { b = desktopInteract.captureScreen( displayIndex, x, y, dim.width, dim.height, data.getPtr(), data.getLength()); } catch (Throwable t) { if (t instanceof ThreadDeath) { throw (ThreadDeath) t; } else { b = false; // logger.error("Failed to grab screen!", t); } } if (!b) { data.free(); data = null; } return data; } /** * Sets the index of the display to be used by this <tt>ImageStream</tt>. * * @param displayIndex the index of the display to be used by this <tt>ImageStream</tt> */ public void setDisplayIndex(int displayIndex) { this.displayIndex = displayIndex; } /** * Sets the origin to be captured by this <tt>ImageStream</tt>. * * @param x the x coordinate of the origin to be set on this instance * @param y the y coordinate of the origin to be set on this instance */ public void setOrigin(int x, int y) { this.x = x; this.y = y; } /** * Start desktop capture stream. * * @see AbstractPullBufferStream#start() */ @Override public void start() throws IOException { super.start(); if (desktopInteract == null) { try { desktopInteract = new DesktopInteractImpl(); } catch (Exception e) { logger.warn("Cannot create DesktopInteract object!"); } } } /** * Stop desktop capture stream. * * @see AbstractPullBufferStream#stop() */ @Override public void stop() throws IOException { try { if (logger.isInfoEnabled()) logger.info("Stop stream"); } finally { super.stop(); byteBufferPool.drain(); } } }
/** * A depacketizer from VP8. See {@link "http://tools.ietf.org/html/draft-ietf-payload-vp8-11"} * * @author Boris Grozev * @author George Politis */ public class DePacketizer extends AbstractCodec2 { /** * The <tt>Logger</tt> used by the <tt>DePacketizer</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(DePacketizer.class); /** Whether trace logging is enabled. */ private static final boolean TRACE = logger.isTraceEnabled(); /** * A <tt>Comparator</tt> implementation for RTP sequence numbers. Compares <tt>a</tt> and * <tt>b</tt>, taking into account the wrap at 2^16. * * <p>IMPORTANT: This is a valid <tt>Comparator</tt> implementation only if used for subsets of * [0, 2^16) which don't span more than 2^15 elements. * * <p>E.g. it works for: [0, 2^15-1] and ([50000, 2^16) u [0, 10000]) Doesn't work for: [0, 2^15] * and ([0, 2^15-1] u {2^16-1}) and [0, 2^16) * * <p>NOTE: An identical implementation for Integers can be found in the class SeqNumComparator. * Sequence numbers are 16 bits and unsigned, so an Integer should be sufficient to hold that. */ private static final Comparator<? super Long> seqNumComparator = new Comparator<Long>() { @Override public int compare(Long a, Long b) { if (a.equals(b)) return 0; else if (a > b) { if (a - b < 32768) return 1; else return -1; } else // a < b { if (b - a < 32768) return -1; else return 1; } } }; /** * Stores the RTP payloads (VP8 payload descriptor stripped) from RTP packets belonging to a * single VP8 compressed frame. */ private SortedMap<Long, Container> data = new TreeMap<Long, Container>(seqNumComparator); /** Stores unused <tt>Container</tt>'s. */ private Queue<Container> free = new ArrayBlockingQueue<Container>(100); /** * Stores the first (earliest) sequence number stored in <tt>data</tt>, or -1 if <tt>data</tt> is * empty. */ private long firstSeq = -1; /** * Stores the last (latest) sequence number stored in <tt>data</tt>, or -1 if <tt>data</tt> is * empty. */ private long lastSeq = -1; /** * Stores the value of the <tt>PictureID</tt> field for the VP8 compressed frame, parts of which * are currently stored in <tt>data</tt>, or -1 if the <tt>PictureID</tt> field is not in use or * <tt>data</tt> is empty. */ private int pictureId = -1; /** * Stores the RTP timestamp of the packets stored in <tt>data</tt>, or -1 if they don't have a * timestamp set. */ private long timestamp = -1; /** Whether we have stored any packets in <tt>data</tt>. Equivalent to <tt>data.isEmpty()</tt>. */ private boolean empty = true; /** * Whether we have stored in <tt>data</tt> the last RTP packet of the VP8 compressed frame, parts * of which are currently stored in <tt>data</tt>. */ private boolean haveEnd = false; /** * Whether we have stored in <tt>data</tt> the first RTP packet of the VP8 compressed frame, parts * of which are currently stored in <tt>data</tt>. */ private boolean haveStart = false; /** * Stores the sum of the lengths of the data stored in <tt>data</tt>, that is the total length of * the VP8 compressed frame to be constructed. */ private int frameLength = 0; /** The sequence number of the last RTP packet, which was included in the output. */ private long lastSentSeq = -1; /** Initializes a new <tt>JNIEncoder</tt> instance. */ public DePacketizer() { super( "VP8 RTP DePacketizer", VideoFormat.class, new VideoFormat[] {new VideoFormat(Constants.VP8)}); inputFormats = new VideoFormat[] {new VideoFormat(Constants.VP8_RTP)}; } /** {@inheritDoc} */ @Override protected void doClose() {} /** {@inheritDoc} */ @Override protected void doOpen() throws ResourceUnavailableException { if (logger.isInfoEnabled()) logger.info("Opened VP8 depacketizer"); } /** * Re-initializes the fields which store information about the currently held data. Empties * <tt>data</tt>. */ private void reinit() { firstSeq = lastSeq = timestamp = -1; pictureId = -1; empty = true; haveEnd = haveStart = false; frameLength = 0; Iterator<Map.Entry<Long, Container>> it = data.entrySet().iterator(); Map.Entry<Long, Container> e; while (it.hasNext()) { e = it.next(); free.offer(e.getValue()); it.remove(); } } /** * Checks whether the currently held VP8 compressed frame is complete (e.g all its packets are * stored in <tt>data</tt>). * * @return <tt>true</tt> if the currently help VP8 compressed frame is complete, <tt>false</tt> * otherwise. */ private boolean frameComplete() { return haveStart && haveEnd && !haveMissing(); } /** * Checks whether there are packets with sequence numbers between <tt>firstSeq</tt> and * <tt>lastSeq</tt> which are *not* stored in <tt>data</tt>. * * @return <tt>true</tt> if there are packets with sequence numbers between <tt>firstSeq</tt> and * <tt>lastSeq</tt> which are *not* stored in <tt>data</tt>. */ private boolean haveMissing() { Set<Long> seqs = data.keySet(); long s = firstSeq; while (s != lastSeq) { if (!seqs.contains(s)) return true; s = (s + 1) % (1 << 16); } return false; } /** {@inheritDoc} */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { byte[] inData = (byte[]) inBuffer.getData(); int inOffset = inBuffer.getOffset(); if (!VP8PayloadDescriptor.isValid(inData, inOffset)) { logger.warn("Invalid RTP/VP8 packet discarded."); outBuffer.setDiscard(true); return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK? } long inSeq = inBuffer.getSequenceNumber(); long inRtpTimestamp = inBuffer.getRtpTimeStamp(); int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset); boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0; boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset); int inLength = inBuffer.getLength(); int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset); int inPayloadLength = inLength - inPdSize; if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) { if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } if (!empty) { // if the incoming packet has a different PictureID or timestamp // than those of the current frame, then it belongs to a different // frame. if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId) | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) { if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq { // the packet belongs to a previous frame. discard it if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } else // inSeq > firstSeq (and also presumably isSeq > lastSeq) { // the packet belongs to a subsequent frame (to the one // currently being held). Drop the current frame. if (logger.isInfoEnabled()) logger.info( "Discarding saved packets on arrival of" + " a packet for a subsequent frame: " + inSeq); // TODO: this would be the place to complain about the // not-well-received PictureID by sending a RTCP SLI or NACK. reinit(); } } } // a whole frame in a single packet. avoid the extra copy to // this.data and output it immediately. if (empty && inMarker && inIsStartOfFrame) { byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false); System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength); outBuffer.setOffset(0); outBuffer.setLength(inPayloadLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = inSeq; return BUFFER_PROCESSED_OK; } // add to this.data Container container = free.poll(); if (container == null) container = new Container(); if (container.buf == null || container.buf.length < inPayloadLength) container.buf = new byte[inPayloadLength]; if (data.get(inSeq) != null) { if (logger.isInfoEnabled()) logger.info("(Probable) duplicate packet detected, discarding " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength); container.len = inPayloadLength; data.put(inSeq, container); // update fields frameLength += inPayloadLength; if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq; if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq; if (empty) { // the first received packet for the current frame was just added empty = false; timestamp = inRtpTimestamp; pictureId = inPictureId; } if (inMarker) haveEnd = true; if (inIsStartOfFrame) haveStart = true; // check if we have a full frame if (frameComplete()) { byte[] outData = validateByteArraySize(outBuffer, frameLength, false); int ptr = 0; Container b; for (Map.Entry<Long, Container> entry : data.entrySet()) { b = entry.getValue(); System.arraycopy(b.buf, 0, outData, ptr, b.len); ptr += b.len; } outBuffer.setOffset(0); outBuffer.setLength(frameLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = lastSeq; // prepare for the next frame reinit(); return BUFFER_PROCESSED_OK; } else { // frame not complete yet outBuffer.setDiscard(true); return OUTPUT_BUFFER_NOT_FILLED; } } /** * Returns true if the buffer contains a VP8 key frame at offset <tt>offset</tt>. * * @param buff the byte buffer to check * @param off the offset in the byte buffer where the actual data starts * @param len the length of the data in the byte buffer * @return true if the buffer contains a VP8 key frame at offset <tt>offset</tt>. */ public static boolean isKeyFrame(byte[] buff, int off, int len) { if (buff == null || buff.length < off + len || len < RawPacket.FIXED_HEADER_SIZE) { return false; } // Check if this is the start of a VP8 partition in the payload // descriptor. if (!DePacketizer.VP8PayloadDescriptor.isValid(buff, off)) { return false; } if (!DePacketizer.VP8PayloadDescriptor.isStartOfFrame(buff, off)) { return false; } int szVP8PayloadDescriptor = DePacketizer.VP8PayloadDescriptor.getSize(buff, off); return DePacketizer.VP8PayloadHeader.isKeyFrame(buff, off + szVP8PayloadDescriptor); } /** * A class that represents the VP8 Payload Descriptor structure defined in {@link * "http://tools.ietf.org/html/draft-ietf-payload-vp8-10"} */ public static class VP8PayloadDescriptor { /** I bit from the X byte of the Payload Descriptor. */ private static final byte I_BIT = (byte) 0x80; /** K bit from the X byte of the Payload Descriptor. */ private static final byte K_BIT = (byte) 0x10; /** L bit from the X byte of the Payload Descriptor. */ private static final byte L_BIT = (byte) 0x40; /** I bit from the I byte of the Payload Descriptor. */ private static final byte M_BIT = (byte) 0x80; /** Maximum length of a VP8 Payload Descriptor. */ public static final int MAX_LENGTH = 6; /** S bit from the first byte of the Payload Descriptor. */ private static final byte S_BIT = (byte) 0x10; /** T bit from the X byte of the Payload Descriptor. */ private static final byte T_BIT = (byte) 0x20; /** X bit from the first byte of the Payload Descriptor. */ private static final byte X_BIT = (byte) 0x80; /** * Gets the temporal layer index (TID), if that's set. * * @param buf the byte buffer that holds the VP8 packet. * @param off the offset in the byte buffer where the VP8 packet starts. * @param len the length of the VP8 packet. * @return the temporal layer index (TID), if that's set, -1 otherwise. */ public static int getTemporalLayerIndex(byte[] buf, int off, int len) { if (buf == null || buf.length < off + len || len < 2) { return -1; } if ((buf[off] & X_BIT) == 0 || (buf[off + 1] & T_BIT) == 0) { return -1; } int sz = getSize(buf, off); if (buf.length < off + sz || sz < 1) { return -1; } return (buf[off + sz - 1] & 0xc0) >> 6; } /** * Returns a simple Payload Descriptor, with PartID = 0, the 'start of partition' bit set * according to <tt>startOfPartition</tt>, and all other bits set to 0. * * @param startOfPartition whether to 'start of partition' bit should be set * @return a simple Payload Descriptor, with PartID = 0, the 'start of partition' bit set * according to <tt>startOfPartition</tt>, and all other bits set to 0. */ public static byte[] create(boolean startOfPartition) { byte[] pd = new byte[1]; pd[0] = startOfPartition ? (byte) 0x10 : 0; return pd; } /** * The size in bytes of the Payload Descriptor at offset <tt>offset</tt> in <tt>input</tt>. The * size is between 1 and 6. * * @param input input * @param offset offset * @return The size in bytes of the Payload Descriptor at offset <tt>offset</tt> in * <tt>input</tt>, or -1 if the input is not a valid VP8 Payload Descriptor. The size is * between 1 and 6. */ public static int getSize(byte[] input, int offset) { if (!isValid(input, offset)) return -1; if ((input[offset] & X_BIT) == 0) return 1; int size = 2; if ((input[offset + 1] & I_BIT) != 0) { size++; if ((input[offset + 2] & M_BIT) != 0) size++; } if ((input[offset + 1] & L_BIT) != 0) size++; if ((input[offset + 1] & (T_BIT | K_BIT)) != 0) size++; return size; } /** * Gets the value of the PictureID field of a VP8 Payload Descriptor. * * @param input * @param offset * @return the value of the PictureID field of a VP8 Payload Descriptor, or -1 if the fields is * not present. */ private static int getPictureId(byte[] input, int offset) { if (!isValid(input, offset)) return -1; if ((input[offset] & X_BIT) == 0 || (input[offset + 1] & I_BIT) == 0) return -1; boolean isLong = (input[offset + 2] & M_BIT) != 0; if (isLong) return (input[offset + 2] & 0x7f) << 8 | (input[offset + 3] & 0xff); else return input[offset + 2] & 0x7f; } public static boolean isValid(byte[] input, int offset) { return true; } /** * Checks whether the '<tt>start of partition</tt>' bit is set in the VP8 Payload Descriptor at * offset <tt>offset</tt> in <tt>input</tt>. * * @param input input * @param offset offset * @return <tt>true</tt> if the '<tt>start of partition</tt>' bit is set, <tt>false</tt> * otherwise. */ public static boolean isStartOfPartition(byte[] input, int offset) { return (input[offset] & S_BIT) != 0; } /** * Returns <tt>true</tt> if both the '<tt>start of partition</tt>' bit is set and the * <tt>PID</tt> fields has value 0 in the VP8 Payload Descriptor at offset <tt>offset</tt> in * <tt>input</tt>. * * @param input * @param offset * @return <tt>true</tt> if both the '<tt>start of partition</tt>' bit is set and the * <tt>PID</tt> fields has value 0 in the VP8 Payload Descriptor at offset <tt>offset</tt> * in <tt>input</tt>. */ public static boolean isStartOfFrame(byte[] input, int offset) { return isStartOfPartition(input, offset) && getPartitionId(input, offset) == 0; } /** * Returns the value of the <tt>PID</tt> (partition ID) field of the VP8 Payload Descriptor at * offset <tt>offset</tt> in <tt>input</tt>. * * @param input * @param offset * @return the value of the <tt>PID</tt> (partition ID) field of the VP8 Payload Descriptor at * offset <tt>offset</tt> in <tt>input</tt>. */ public static int getPartitionId(byte[] input, int offset) { return input[offset] & 0x07; } } /** * A class that represents the VP8 Payload Header structure defined in {@link * "http://tools.ietf.org/html/draft-ietf-payload-vp8-10"} */ public static class VP8PayloadHeader { /** S bit of the Payload Descriptor. */ private static final byte S_BIT = (byte) 0x01; /** * Returns true if the <tt>P</tt> (inverse key frame flag) field of the VP8 Payload Header at * offset <tt>offset</tt> in <tt>input</tt> is 0. * * @return true if the <tt>P</tt> (inverse key frame flag) field of the VP8 Payload Header at * offset <tt>offset</tt> in <tt>input</tt> is 0, false otherwise. */ public static boolean isKeyFrame(byte[] input, int offset) { // When set to 0 the current frame is a key frame. When set to 1 // the current frame is an interframe. Defined in [RFC6386] return (input[offset] & S_BIT) == 0; } } /** A simple container for a <tt>byte[]</tt> and an integer. */ private static class Container { /** This <tt>Container</tt>'s data. */ private byte[] buf; /** Length used. */ private int len = 0; } }
/** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } }
/** {@inheritDoc} */ @Override protected void doOpen() throws ResourceUnavailableException { if (logger.isInfoEnabled()) logger.info("Opened VP8 depacketizer"); }
/** {@inheritDoc} */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { byte[] inData = (byte[]) inBuffer.getData(); int inOffset = inBuffer.getOffset(); if (!VP8PayloadDescriptor.isValid(inData, inOffset)) { logger.warn("Invalid RTP/VP8 packet discarded."); outBuffer.setDiscard(true); return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK? } long inSeq = inBuffer.getSequenceNumber(); long inRtpTimestamp = inBuffer.getRtpTimeStamp(); int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset); boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0; boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset); int inLength = inBuffer.getLength(); int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset); int inPayloadLength = inLength - inPdSize; if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) { if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } if (!empty) { // if the incoming packet has a different PictureID or timestamp // than those of the current frame, then it belongs to a different // frame. if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId) | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) { if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq { // the packet belongs to a previous frame. discard it if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } else // inSeq > firstSeq (and also presumably isSeq > lastSeq) { // the packet belongs to a subsequent frame (to the one // currently being held). Drop the current frame. if (logger.isInfoEnabled()) logger.info( "Discarding saved packets on arrival of" + " a packet for a subsequent frame: " + inSeq); // TODO: this would be the place to complain about the // not-well-received PictureID by sending a RTCP SLI or NACK. reinit(); } } } // a whole frame in a single packet. avoid the extra copy to // this.data and output it immediately. if (empty && inMarker && inIsStartOfFrame) { byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false); System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength); outBuffer.setOffset(0); outBuffer.setLength(inPayloadLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = inSeq; return BUFFER_PROCESSED_OK; } // add to this.data Container container = free.poll(); if (container == null) container = new Container(); if (container.buf == null || container.buf.length < inPayloadLength) container.buf = new byte[inPayloadLength]; if (data.get(inSeq) != null) { if (logger.isInfoEnabled()) logger.info("(Probable) duplicate packet detected, discarding " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength); container.len = inPayloadLength; data.put(inSeq, container); // update fields frameLength += inPayloadLength; if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq; if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq; if (empty) { // the first received packet for the current frame was just added empty = false; timestamp = inRtpTimestamp; pictureId = inPictureId; } if (inMarker) haveEnd = true; if (inIsStartOfFrame) haveStart = true; // check if we have a full frame if (frameComplete()) { byte[] outData = validateByteArraySize(outBuffer, frameLength, false); int ptr = 0; Container b; for (Map.Entry<Long, Container> entry : data.entrySet()) { b = entry.getValue(); System.arraycopy(b.buf, 0, outData, ptr, b.len); ptr += b.len; } outBuffer.setOffset(0); outBuffer.setLength(frameLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = lastSeq; // prepare for the next frame reinit(); return BUFFER_PROCESSED_OK; } else { // frame not complete yet outBuffer.setDiscard(true); return OUTPUT_BUFFER_NOT_FILLED; } }
/** * A <tt>Recorder</tt> implementation which attaches to an <tt>RTPTranslator</tt>. * * @author Vladimir Marinov * @author Boris Grozev */ public class RecorderRtpImpl implements Recorder, ReceiveStreamListener, ActiveSpeakerChangedListener, ControllerListener { /** * The <tt>Logger</tt> used by the <tt>RecorderRtpImpl</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(RecorderRtpImpl.class); // values hard-coded to match chrome // TODO: allow to set them dynamically private static final byte redPayloadType = 116; private static final byte ulpfecPayloadType = 117; private static final byte vp8PayloadType = 100; private static final byte opusPayloadType = 111; private static final Format redFormat = new VideoFormat(Constants.RED); private static final Format ulpfecFormat = new VideoFormat(Constants.ULPFEC); private static final Format vp8RtpFormat = new VideoFormat(Constants.VP8_RTP); private static final Format vp8Format = new VideoFormat(Constants.VP8); private static final Format opusFormat = new AudioFormat(Constants.OPUS_RTP, 48000, Format.NOT_SPECIFIED, Format.NOT_SPECIFIED); private static final int FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE = 300; /** The <tt>ContentDescriptor</tt> to use when saving audio. */ private static final ContentDescriptor AUDIO_CONTENT_DESCRIPTOR = new ContentDescriptor(FileTypeDescriptor.MPEG_AUDIO); /** The suffix for audio file names. */ private static final String AUDIO_FILENAME_SUFFIX = ".mp3"; /** The suffix for video file names. */ private static final String VIDEO_FILENAME_SUFFIX = ".webm"; static { Registry.set("video_jitter_buffer_MIN_SIZE", FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE); } /** The <tt>RTPTranslator</tt> that this recorder is/will be attached to. */ private RTPTranslatorImpl translator; /** * The custom <tt>RTPConnector</tt> that this instance uses to read from {@link #translator} and * write to {@link #rtpManager}. */ private RTPConnectorImpl rtpConnector; /** Path to the directory where the output files will be stored. */ private String path; /** The <tt>RTCPFeedbackMessageSender</tt> that we use to send RTCP FIR messages. */ private RTCPFeedbackMessageSender rtcpFeedbackSender; /** * The {@link RTPManager} instance we use to handle the packets coming from * <tt>RTPTranslator</tt>. */ private RTPManager rtpManager; /** * The instance which should be notified when events related to recordings (such as the start or * end of a recording) occur. */ private RecorderEventHandlerImpl eventHandler; /** * Holds the <tt>ReceiveStreams</tt> added to this instance by {@link #rtpManager} and additional * information associated with each one (e.g. the <tt>Processor</tt>, if any, used for it). */ private final HashSet<ReceiveStreamDesc> receiveStreams = new HashSet<ReceiveStreamDesc>(); private final Set<Long> activeVideoSsrcs = new HashSet<Long>(); /** * The <tt>ActiveSpeakerDetector</tt> which will listen to the audio receive streams of this * <tt>RecorderRtpImpl</tt> and notify it about changes to the active speaker via calls to {@link * #activeSpeakerChanged(long)} */ private ActiveSpeakerDetector activeSpeakerDetector = null; StreamRTPManager streamRTPManager; private SynchronizerImpl synchronizer; private boolean started = false; /** * Constructor. * * @param translator the <tt>RTPTranslator</tt> to which this instance will attach in order to * record media. */ public RecorderRtpImpl(RTPTranslator translator) { this.translator = (RTPTranslatorImpl) translator; activeSpeakerDetector = new ActiveSpeakerDetectorImpl(); activeSpeakerDetector.addActiveSpeakerChangedListener(this); } /** Implements {@link Recorder#addListener(Recorder.Listener)}. */ @Override public void addListener(Listener listener) {} /** Implements {@link Recorder#removeListener(Recorder.Listener)}. */ @Override public void removeListener(Listener listener) {} /** Implements {@link Recorder#getSupportedFormats()}. */ @Override public List<String> getSupportedFormats() { return null; } /** Implements {@link Recorder#setMute(boolean)}. */ @Override public void setMute(boolean mute) {} /** * Implements {@link Recorder#getFilename()}. Returns null, since we don't have a (single) * associated filename. */ @Override public String getFilename() { return null; } /** * Sets the instance which should be notified when events related to recordings (such as the start * or end of a recording) occur. */ public void setEventHandler(RecorderEventHandler eventHandler) { if (this.eventHandler == null || (this.eventHandler != eventHandler && this.eventHandler.handler != eventHandler)) { if (this.eventHandler == null) this.eventHandler = new RecorderEventHandlerImpl(eventHandler); else this.eventHandler.handler = eventHandler; } } /** * {@inheritDoc} * * @param format unused, since this implementation records multiple streams using potentially * different formats. * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the * recorded media files. */ @Override public void start(String format, String dirname) throws IOException, MediaException { if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode()); path = dirname; MediaService mediaService = LibJitsi.getMediaService(); /* * Note that we use only one RTPConnector for both the RTPTranslator * and the RTPManager instances. The this.translator will write to its * output streams, and this.rtpManager will read from its input streams. */ rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType); rtpManager = RTPManager.newInstance(); /* * Add the formats that we know about. */ rtpManager.addFormat(vp8RtpFormat, vp8PayloadType); rtpManager.addFormat(opusFormat, opusPayloadType); rtpManager.addReceiveStreamListener(this); /* * Note: When this.rtpManager sends RTCP sender/receiver reports, they * will end up being written to its own input stream. This is not * expected to cause problems, but might be something to keep an eye on. */ rtpManager.initialize(rtpConnector); /* * Register a fake call participant. * TODO: can we use a more generic MediaStream here? */ streamRTPManager = new StreamRTPManager( mediaService.createMediaStream( new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)), translator); streamRTPManager.initialize(rtpConnector); rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender(); translator.addFormat(streamRTPManager, opusFormat, opusPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat, // redPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat, // ulpfecPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, // mediaFormatImpl.getFormat(), vp8PayloadType); started = true; } @Override public void stop() { if (started) { if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode()); // remove the recorder from the translator (e.g. stop new packets from // being written to rtpConnector if (streamRTPManager != null) streamRTPManager.dispose(); HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>(); synchronized (receiveStreams) { streamsToRemove.addAll(receiveStreams); } for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false); rtpConnector.rtcpPacketTransformer.close(); rtpConnector.rtpPacketTransformer.close(); rtpManager.dispose(); started = false; } } /** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } } private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } } /** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } } /** * Restarts the recording for a specific SSRC. * * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording). */ private void resetRecording(long ssrc, long timestamp) { ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); // we only restart audio recordings if (receiveStream != null && receiveStream.format instanceof AudioFormat) { String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX); // flush the buffer contained in the MP3 encoder String s = "trying to flush ssrc=" + ssrc; Processor p = receiveStream.processor; if (p != null) { s += " p!=null"; for (TrackControl tc : p.getTrackControls()) { Object o = tc.getControl(FlushableControl.class.getName()); if (o != null) ((FlushableControl) o).flush(); } } if (logger.isInfoEnabled()) { logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename); } receiveStream.dataSink.close(); receiveStream.dataSink = null; // flush the FMJ jitter buffer // DataSource ds = receiveStream.receiveStream.getDataSource(); // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource) // ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush(); receiveStream.filename = newFilename; try { receiveStream.dataSink = Manager.createDataSink( receiveStream.dataSource, new MediaLocator("file:" + newFilename)); } catch (NoDataSinkException ndse) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse); removeReceiveStream(receiveStream, false); } try { receiveStream.dataSink.open(); receiveStream.dataSink.start(); } catch (IOException ioe) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe); removeReceiveStream(receiveStream, false); } audioRecordingStarted(ssrc, timestamp); } } private void audioRecordingStarted(long ssrc, long timestamp) { ReceiveStreamDesc desc = findReceiveStream(ssrc); if (desc == null) return; RecorderEvent event = new RecorderEvent(); event.setType(RecorderEvent.Type.RECORDING_STARTED); event.setMediaType(MediaType.AUDIO); event.setSsrc(ssrc); event.setRtpTimestamp(timestamp); event.setFilename(desc.filename); if (eventHandler != null) eventHandler.handleEvent(event); } /** * Handles a request from a specific <tt>DataSink</tt> to request a keyframe by sending an RTCP * feedback FIR message to the media source. * * @param dataSink the <tt>DataSink</tt> which requests that a keyframe be requested with a FIR * message. * @return <tt>true</tt> if a keyframe was successfully requested, <tt>false</tt> otherwise */ private boolean requestFIR(WebmDataSink dataSink) { ReceiveStreamDesc desc = findReceiveStream(dataSink); if (desc != null && rtcpFeedbackSender != null) { return rtcpFeedbackSender.sendFIR((int) desc.ssrc); } return false; } /** * Returns "prefix"+"suffix" if the file with this name does not exist. Otherwise, returns the * first inexistant filename of the form "prefix-"+i+"suffix", for an integer i. i is bounded by * 100 to prevent hanging, and on failure to find an inexistant filename the method will return * null. * * @param prefix * @param suffix * @return */ private String getNextFilename(String prefix, String suffix) { if (!new File(prefix + suffix).exists()) return prefix + suffix; int i = 1; String s; do { s = prefix + "-" + i + suffix; if (!new File(s).exists()) return s; i++; } while (i < 1000); // don't hang indefinitely... return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt> * * @param processor The <tt>Processor</tt> to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(Processor processor) { if (processor == null) return null; synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (processor.equals(r.processor)) return r; } return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt> * * @param dataSink The <tt>DataSink</tt> to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(DataSink dataSink) { if (dataSink == null) return null; synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (dataSink.equals(r.dataSink)) return r; } return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular SSRC. * * @param ssrc The SSRC to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular SSRC, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(long ssrc) { synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (ssrc == r.ssrc) return r; } return null; } /** * Gets the SSRC of a <tt>ReceiveStream</tt> as a (non-negative) <tt>long</tt>. * * <p>FMJ stores the 32-bit SSRC values in <tt>int</tt>s, and the <tt>ReceiveStream.getSSRC()</tt> * implementation(s) don't take care of converting the negative <tt>int</tt> values sometimes * resulting from reading of a 32-bit field into the correct unsigned <tt>long</tt> value. So do * the conversion here. * * @param receiveStream the <tt>ReceiveStream</tt> for which to get the SSRC. * @return the SSRC of <tt>receiveStream</tt> an a (non-negative) <tt>long</tt>. */ private long getReceiveStreamSSRC(ReceiveStream receiveStream) { return 0xffffffffL & receiveStream.getSSRC(); } /** * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed, * and that the new active stream has SSRC <tt>ssrc</tt>. * * @param ssrc the SSRC of the new active stream. */ @Override public void activeSpeakerChanged(long ssrc) { if (eventHandler != null) { RecorderEvent e = new RecorderEvent(); e.setAudioSsrc(ssrc); // TODO: how do we time this? e.setInstant(System.currentTimeMillis()); e.setType(RecorderEvent.Type.SPEAKER_CHANGED); e.setMediaType(MediaType.VIDEO); eventHandler.handleEvent(e); } } private void handleRtpPacket(RawPacket pkt) { if (pkt != null && pkt.getPayloadType() == vp8PayloadType) { int ssrc = pkt.getSSRC(); if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { synchronized (activeVideoSsrcs) { if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { activeVideoSsrcs.add(ssrc & 0xffffffffL); rtcpFeedbackSender.sendFIR(ssrc); } } } } } private void handleRtcpPacket(RawPacket pkt) { getSynchronizer().addRTCPPacket(pkt); eventHandler.nudge(); } public SynchronizerImpl getSynchronizer() { if (synchronizer == null) synchronizer = new SynchronizerImpl(); return synchronizer; } public void setSynchronizer(Synchronizer synchronizer) { if (synchronizer instanceof SynchronizerImpl) { this.synchronizer = (SynchronizerImpl) synchronizer; } } public void connect(Recorder recorder) { if (!(recorder instanceof RecorderRtpImpl)) return; ((RecorderRtpImpl) recorder).setSynchronizer(getSynchronizer()); } private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); } /** The <tt>RTPConnector</tt> implementation used by this <tt>RecorderRtpImpl</tt>. */ private class RTPConnectorImpl implements RTPConnector { private PushSourceStreamImpl controlInputStream; private OutputDataStreamImpl controlOutputStream; private PushSourceStreamImpl dataInputStream; private OutputDataStreamImpl dataOutputStream; private SourceTransferHandler dataTransferHandler; private SourceTransferHandler controlTransferHandler; private RawPacket pendingDataPacket = new RawPacket(); private RawPacket pendingControlPacket = new RawPacket(); private PacketTransformer rtpPacketTransformer = null; private PacketTransformer rtcpPacketTransformer = null; /** The PacketBuffer instance which we use as a jitter buffer. */ private PacketBuffer packetBuffer; private RTPConnectorImpl(byte redPT, byte ulpfecPT) { packetBuffer = new PacketBuffer(); // The chain of transformers will be applied in reverse order for // incoming packets. TransformEngine transformEngine = new TransformEngineChain( new TransformEngine[] { packetBuffer, new TransformEngineImpl(), new CompoundPacketEngine(), new FECTransformEngine(ulpfecPT, (byte) -1), new REDTransformEngine(redPT, (byte) -1) }); rtpPacketTransformer = transformEngine.getRTPTransformer(); rtcpPacketTransformer = transformEngine.getRTCPTransformer(); } private RTPConnectorImpl() {} @Override public void close() { try { if (dataOutputStream != null) dataOutputStream.close(); if (controlOutputStream != null) controlOutputStream.close(); } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } } @Override public PushSourceStream getControlInputStream() throws IOException { if (controlInputStream == null) { controlInputStream = new PushSourceStreamImpl(true); } return controlInputStream; } @Override public OutputDataStream getControlOutputStream() throws IOException { if (controlOutputStream == null) { controlOutputStream = new OutputDataStreamImpl(true); } return controlOutputStream; } @Override public PushSourceStream getDataInputStream() throws IOException { if (dataInputStream == null) { dataInputStream = new PushSourceStreamImpl(false); } return dataInputStream; } @Override public OutputDataStreamImpl getDataOutputStream() throws IOException { if (dataOutputStream == null) { dataOutputStream = new OutputDataStreamImpl(false); } return dataOutputStream; } @Override public double getRTCPBandwidthFraction() { return -1; } @Override public double getRTCPSenderBandwidthFraction() { return -1; } @Override public int getReceiveBufferSize() { // TODO Auto-generated method stub return 0; } @Override public int getSendBufferSize() { // TODO Auto-generated method stub return 0; } @Override public void setReceiveBufferSize(int arg0) throws IOException { // TODO Auto-generated method stub } @Override public void setSendBufferSize(int arg0) throws IOException { // TODO Auto-generated method stub } private class OutputDataStreamImpl implements OutputDataStream { boolean isControlStream; private RawPacket[] rawPacketArray = new RawPacket[1]; public OutputDataStreamImpl(boolean isControlStream) { this.isControlStream = isControlStream; } public int write(byte[] buffer, int offset, int length) { return write(buffer, offset, length, true); } public int write(byte[] buffer, int offset, int length, boolean transform) { RawPacket pkt = rawPacketArray[0]; if (pkt == null) pkt = new RawPacket(); rawPacketArray[0] = pkt; byte[] pktBuf = pkt.getBuffer(); if (pktBuf == null || pktBuf.length < length) { pktBuf = new byte[length]; pkt.setBuffer(pktBuf); } System.arraycopy(buffer, offset, pktBuf, 0, length); pkt.setOffset(0); pkt.setLength(length); if (transform) { PacketTransformer packetTransformer = isControlStream ? rtcpPacketTransformer : rtpPacketTransformer; if (packetTransformer != null) rawPacketArray = packetTransformer.reverseTransform(rawPacketArray); } SourceTransferHandler transferHandler; PushSourceStream pushSourceStream; try { if (isControlStream) { transferHandler = controlTransferHandler; pushSourceStream = getControlInputStream(); } else { transferHandler = dataTransferHandler; pushSourceStream = getDataInputStream(); } } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } for (int i = 0; i < rawPacketArray.length; i++) { RawPacket packet = rawPacketArray[i]; // keep the first element for reuse if (i != 0) rawPacketArray[i] = null; if (packet != null) { if (isControlStream) pendingControlPacket = packet; else pendingDataPacket = packet; if (transferHandler != null) { transferHandler.transferData(pushSourceStream); } } } return length; } public void close() throws IOException {} } /** * A dummy implementation of {@link PushSourceStream}. * * @author Vladimir Marinov */ private class PushSourceStreamImpl implements PushSourceStream { private boolean isControlStream = false; public PushSourceStreamImpl(boolean isControlStream) { this.isControlStream = isControlStream; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public boolean endOfStream() { return false; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public ContentDescriptor getContentDescriptor() { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public long getContentLength() { return 0; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public Object getControl(String arg0) { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public Object[] getControls() { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public int getMinimumTransferSize() { if (isControlStream) { if (pendingControlPacket.getBuffer() != null) { return pendingControlPacket.getLength(); } } else { if (pendingDataPacket.getBuffer() != null) { return pendingDataPacket.getLength(); } } return 0; } @Override public int read(byte[] buffer, int offset, int length) throws IOException { RawPacket pendingPacket; if (isControlStream) { pendingPacket = pendingControlPacket; } else { pendingPacket = pendingDataPacket; } int bytesToRead = 0; byte[] pendingPacketBuffer = pendingPacket.getBuffer(); if (pendingPacketBuffer != null) { int pendingPacketLength = pendingPacket.getLength(); bytesToRead = length > pendingPacketLength ? pendingPacketLength : length; System.arraycopy( pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead); } return bytesToRead; } /** * {@inheritDoc} * * <p>We keep the first non-null <tt>SourceTransferHandler</tt> that was set, because we don't * want it to be overwritten when we initialize a second <tt>RTPManager</tt> with this * <tt>RTPConnector</tt>. * * <p>See {@link RecorderRtpImpl#start(String, String)} */ @Override public void setTransferHandler(SourceTransferHandler transferHandler) { if (isControlStream) { if (RTPConnectorImpl.this.controlTransferHandler == null) { RTPConnectorImpl.this.controlTransferHandler = transferHandler; } } else { if (RTPConnectorImpl.this.dataTransferHandler == null) { RTPConnectorImpl.this.dataTransferHandler = transferHandler; } } } } /** * A transform engine implementation which allows <tt>RecorderRtpImpl</tt> to intercept RTP and * RTCP packets in. */ private class TransformEngineImpl implements TransformEngine { SinglePacketTransformer rtpTransformer = new SinglePacketTransformer() { @Override public RawPacket transform(RawPacket pkt) { return pkt; } @Override public RawPacket reverseTransform(RawPacket pkt) { RecorderRtpImpl.this.handleRtpPacket(pkt); return pkt; } @Override public void close() {} }; SinglePacketTransformer rtcpTransformer = new SinglePacketTransformer() { @Override public RawPacket transform(RawPacket pkt) { return pkt; } @Override public RawPacket reverseTransform(RawPacket pkt) { RecorderRtpImpl.this.handleRtcpPacket(pkt); if (pkt != null && pkt.getRTCPPayloadType() == 203) { // An RTCP BYE packet. Remove the receive stream before // it gets to FMJ, because we want to, for example, // flush the packet buffer before that. long ssrc = pkt.getRTCPSSRC() & 0xffffffffl; if (logger.isInfoEnabled()) logger.info("RTCP BYE for SSRC=" + ssrc); ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); if (receiveStream != null) removeReceiveStream(receiveStream, false); } return pkt; } @Override public void close() {} }; @Override public PacketTransformer getRTPTransformer() { return rtpTransformer; } @Override public PacketTransformer getRTCPTransformer() { return rtcpTransformer; } } } private class RecorderEventHandlerImpl implements RecorderEventHandler { private RecorderEventHandler handler; private final Set<RecorderEvent> pendingEvents = new HashSet<RecorderEvent>(); private RecorderEventHandlerImpl(RecorderEventHandler handler) { this.handler = handler; } @Override public boolean handleEvent(RecorderEvent ev) { if (ev == null) return true; if (RecorderEvent.Type.RECORDING_STARTED.equals(ev.getType())) { long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp()); if (instant != -1) { ev.setInstant(instant); return handler.handleEvent(ev); } else { pendingEvents.add(ev); return true; } } return handler.handleEvent(ev); } private void nudge() { for (Iterator<RecorderEvent> iter = pendingEvents.iterator(); iter.hasNext(); ) { RecorderEvent ev = iter.next(); long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp()); if (instant != -1) { iter.remove(); ev.setInstant(instant); handler.handleEvent(ev); } } } @Override public void close() { for (RecorderEvent ev : pendingEvents) handler.handleEvent(ev); } } /** Represents a <tt>ReceiveStream</tt> for the purposes of this <tt>RecorderRtpImpl</tt>. */ private class ReceiveStreamDesc { /** * The actual <tt>ReceiveStream</tt> which is represented by this <tt>ReceiveStreamDesc</tt>. */ private ReceiveStream receiveStream; /** The SSRC of the stream. */ long ssrc; /** * The <tt>Processor</tt> used to transcode this receive stream into a format appropriate for * saving to a file. */ private Processor processor; /** The <tt>DataSink</tt> which saves the <tt>this.dataSource</tt> to a file. */ private DataSink dataSink; /** * The <tt>DataSource</tt> for this receive stream which is to be saved using a * <tt>DataSink</tt> (i.e. the <tt>DataSource</tt> "after" all needed transcoding is done). */ private DataSource dataSource; /** The name of the file into which this stream is being saved. */ private String filename; /** The (original) format of this receive stream. */ private Format format; /** The <tt>SilenceEffect</tt> used for this stream (for audio streams only). */ private SilenceEffect silenceEffect; private ReceiveStreamDesc(ReceiveStream receiveStream) { this.receiveStream = receiveStream; this.ssrc = getReceiveStreamSSRC(receiveStream); } } }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
@Override public Response serve(String uri, String method, Properties header, Properties parms) { if (!uri.equals("/mediaserver")) { return super.serve(uri, method, header, parms); // this way we can // also serve up // normal files and // content } logger.fine(method + " '" + uri + "' "); Enumeration<?> e = header.propertyNames(); while (e.hasMoreElements()) { String value = (String) e.nextElement(); logger.fine(" HDR: '" + value + "' = '" + header.getProperty(value) + "'"); } e = parms.propertyNames(); while (e.hasMoreElements()) { String value = (String) e.nextElement(); logger.fine(" PRM: '" + value + "' = '" + parms.getProperty(value) + "'"); } // TODO: check the actual path... final String mediaPath = parms.getProperty("media"); final String outputFormatStr = parms.getProperty("format"); final String mimeType = parms.getProperty("mime"); logger.info("requested media: " + mediaPath); logger.info("requested mime type: " + mimeType); if (mediaPath == null) return new Response(HTTP_FORBIDDEN, "text/plain", "mediaPath parameter not specified"); if (mimeType == null) return new Response(HTTP_FORBIDDEN, "text/plain", "mimeType parameter not specified"); // TODO: if we aren't performing any transcoding, just serve the file up // directly. // TODO: capture sources need to be treated as singletons, with some // kind of broadcasting/cloning to ensure // that multiple connections can be made. final String serverSideUrlStr = mediaPath; // URLUtils.createUrlStr(new // File(mediaPath)); // TODO: // enforce that we can't just // serve up anything anywhere final ContentDescriptor outputContentDescriptor = new FileTypeDescriptor(ContentDescriptor.mimeTypeToPackageName(mimeType)); final Format outputFormat; if (outputFormatStr == null) { outputFormat = null; } else { try { outputFormat = FormatArgUtils.parse(outputFormatStr); } catch (ParseException e1) { logger.log(Level.WARNING, "" + e1, e1); return new Response(HTTP_FORBIDDEN, "text/plain", "" + e1); } } logger.info("serverSideUrlStr: " + serverSideUrlStr); logger.info("outputContentDescriptor: " + outputContentDescriptor); logger.info("outputFormat: " + outputFormat); final InputStream is; try { is = getInputStream(serverSideUrlStr, outputFormat, outputContentDescriptor); } catch (Exception e1) { return new Response(HTTP_FORBIDDEN, "text/plain", "" + e1); } final String responseMimeType; // workaround for the problem that the multipart/x-mixed-replace // boundary is not stored anywhere. // this assumes that if we are serving multipart/x-mixed-replace data, // that MultipartMixedReplaceMux is being used. if (mimeType.equals("multipart/x-mixed-replace")) responseMimeType = mimeType + ";boundary=" + MultipartMixedReplaceMux.BOUNDARY; else responseMimeType = mimeType; logger.info("Response mime type: " + responseMimeType); return new Response(HTTP_OK, responseMimeType, is); }
/** * @author Lyubomir Marinov * @author Damian Minkov * @author Yana Stamcheva */ public class MediaConfiguration { /** The <tt>Logger</tt> used by the <tt>MediaConfiguration</tt> class for logging output. */ private static final Logger logger = Logger.getLogger(MediaConfiguration.class); /** The <tt>MediaService</tt> implementation used by <tt>MediaConfiguration</tt>. */ private static final MediaServiceImpl mediaService = NeomediaActivator.getMediaServiceImpl(); /** The preferred width of all panels. */ private static final int WIDTH = 350; /** * Indicates if the Devices settings configuration tab should be disabled, i.e. not visible to the * user. */ private static final String DEVICES_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.devicesconfig.DISABLED"; /** * Indicates if the Audio/Video encodings configuration tab should be disabled, i.e. not visible * to the user. */ private static final String ENCODINGS_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.encodingsconfig.DISABLED"; /** * Indicates if the Video/More Settings configuration tab should be disabled, i.e. not visible to * the user. */ private static final String VIDEO_MORE_SETTINGS_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.videomoresettingsconfig.DISABLED"; /** * Returns the audio configuration panel. * * @return the audio configuration panel */ public static Component createAudioConfigPanel() { return createControls(DeviceConfigurationComboBoxModel.AUDIO); } /** * Returns the video configuration panel. * * @return the video configuration panel */ public static Component createVideoConfigPanel() { return createControls(DeviceConfigurationComboBoxModel.VIDEO); } private static void createAudioPreview( final AudioSystem audioSystem, final JComboBox comboBox, final SoundLevelIndicator soundLevelIndicator) { final ActionListener captureComboActionListener = new ActionListener() { private final SimpleAudioLevelListener audioLevelListener = new SimpleAudioLevelListener() { public void audioLevelChanged(int level) { soundLevelIndicator.updateSoundLevel(level); } }; private AudioMediaDeviceSession deviceSession; private final BufferTransferHandler transferHandler = new BufferTransferHandler() { public void transferData(PushBufferStream stream) { try { stream.read(transferHandlerBuffer); } catch (IOException ioe) { } } }; private final Buffer transferHandlerBuffer = new Buffer(); public void actionPerformed(ActionEvent event) { setDeviceSession(null); CaptureDeviceInfo cdi; if (comboBox == null) { cdi = soundLevelIndicator.isShowing() ? audioSystem.getCaptureDevice() : null; } else { Object selectedItem = soundLevelIndicator.isShowing() ? comboBox.getSelectedItem() : null; cdi = (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) ? ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info : null; } if (cdi != null) { for (MediaDevice md : mediaService.getDevices(MediaType.AUDIO, MediaUseCase.ANY)) { if (md instanceof AudioMediaDeviceImpl) { AudioMediaDeviceImpl amd = (AudioMediaDeviceImpl) md; if (cdi.equals(amd.getCaptureDeviceInfo())) { try { MediaDeviceSession deviceSession = amd.createSession(); boolean setDeviceSession = false; try { if (deviceSession instanceof AudioMediaDeviceSession) { setDeviceSession((AudioMediaDeviceSession) deviceSession); setDeviceSession = true; } } finally { if (!setDeviceSession) deviceSession.close(); } } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; } break; } } } } } private void setDeviceSession(AudioMediaDeviceSession deviceSession) { if (this.deviceSession == deviceSession) return; if (this.deviceSession != null) { try { this.deviceSession.close(); } finally { this.deviceSession.setLocalUserAudioLevelListener(null); soundLevelIndicator.resetSoundLevel(); } } this.deviceSession = deviceSession; if (this.deviceSession != null) { this.deviceSession.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW)); this.deviceSession.setLocalUserAudioLevelListener(audioLevelListener); this.deviceSession.start(MediaDirection.SENDONLY); try { DataSource dataSource = this.deviceSession.getOutputDataSource(); dataSource.connect(); PushBufferStream[] streams = ((PushBufferDataSource) dataSource).getStreams(); for (PushBufferStream stream : streams) stream.setTransferHandler(transferHandler); dataSource.start(); } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; else setDeviceSession(null); } } } }; if (comboBox != null) comboBox.addActionListener(captureComboActionListener); soundLevelIndicator.addHierarchyListener( new HierarchyListener() { public void hierarchyChanged(HierarchyEvent event) { if ((event.getChangeFlags() & HierarchyEvent.SHOWING_CHANGED) != 0) { SwingUtilities.invokeLater( new Runnable() { public void run() { captureComboActionListener.actionPerformed(null); } }); } } }); } /** * Creates the UI controls which are to control the details of a specific <tt>AudioSystem</tt>. * * @param audioSystem the <tt>AudioSystem</tt> for which the UI controls to control its details * are to be created * @param container the <tt>JComponent</tt> into which the UI controls which are to control the * details of the specified <tt>audioSystem</tt> are to be added */ public static void createAudioSystemControls(AudioSystem audioSystem, JComponent container) { GridBagConstraints constraints = new GridBagConstraints(); constraints.anchor = GridBagConstraints.NORTHWEST; constraints.fill = GridBagConstraints.HORIZONTAL; constraints.weighty = 0; int audioSystemFeatures = audioSystem.getFeatures(); boolean featureNotifyAndPlaybackDevices = ((audioSystemFeatures & AudioSystem.FEATURE_NOTIFY_AND_PLAYBACK_DEVICES) != 0); constraints.gridx = 0; constraints.insets = new Insets(3, 0, 3, 3); constraints.weightx = 0; constraints.gridy = 0; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)), constraints); if (featureNotifyAndPlaybackDevices) { constraints.gridy = 2; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)), constraints); constraints.gridy = 3; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)), constraints); } constraints.gridx = 1; constraints.insets = new Insets(3, 3, 3, 0); constraints.weightx = 1; JComboBox captureCombo = null; if (featureNotifyAndPlaybackDevices) { captureCombo = new JComboBox(); captureCombo.setEditable(false); captureCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)); constraints.gridy = 0; container.add(captureCombo, constraints); } int anchor = constraints.anchor; SoundLevelIndicator capturePreview = new SoundLevelIndicator( SimpleAudioLevelListener.MIN_LEVEL, SimpleAudioLevelListener.MAX_LEVEL); constraints.anchor = GridBagConstraints.CENTER; constraints.gridy = (captureCombo == null) ? 0 : 1; container.add(capturePreview, constraints); constraints.anchor = anchor; constraints.gridy = GridBagConstraints.RELATIVE; if (featureNotifyAndPlaybackDevices) { JComboBox playbackCombo = new JComboBox(); playbackCombo.setEditable(false); playbackCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)); container.add(playbackCombo, constraints); JComboBox notifyCombo = new JComboBox(); notifyCombo.setEditable(false); notifyCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)); container.add(notifyCombo, constraints); } if ((AudioSystem.FEATURE_ECHO_CANCELLATION & audioSystemFeatures) != 0) { final SIPCommCheckBox echoCancelCheckBox = new SIPCommCheckBox( NeomediaActivator.getResources().getI18NString("impl.media.configform.ECHOCANCEL")); /* * First set the selected one, then add the listener in order to * avoid saving the value when using the default one and only * showing to user without modification. */ echoCancelCheckBox.setSelected(mediaService.getDeviceConfiguration().isEchoCancel()); echoCancelCheckBox.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { mediaService.getDeviceConfiguration().setEchoCancel(echoCancelCheckBox.isSelected()); } }); container.add(echoCancelCheckBox, constraints); } if ((AudioSystem.FEATURE_DENOISE & audioSystemFeatures) != 0) { final SIPCommCheckBox denoiseCheckBox = new SIPCommCheckBox( NeomediaActivator.getResources().getI18NString("impl.media.configform.DENOISE")); /* * First set the selected one, then add the listener in order to * avoid saving the value when using the default one and only * showing to user without modification. */ denoiseCheckBox.setSelected(mediaService.getDeviceConfiguration().isDenoise()); denoiseCheckBox.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { mediaService.getDeviceConfiguration().setDenoise(denoiseCheckBox.isSelected()); } }); container.add(denoiseCheckBox, constraints); } createAudioPreview(audioSystem, captureCombo, capturePreview); } /** * Creates basic controls for a type (AUDIO or VIDEO). * * @param type the type. * @return the build Component. */ public static Component createBasicControls(final int type) { final JComboBox deviceComboBox = new JComboBox(); deviceComboBox.setEditable(false); deviceComboBox.setModel( new DeviceConfigurationComboBoxModel( deviceComboBox, mediaService.getDeviceConfiguration(), type)); JLabel deviceLabel = new JLabel(getLabelText(type)); deviceLabel.setDisplayedMnemonic(getDisplayedMnemonic(type)); deviceLabel.setLabelFor(deviceComboBox); final Container devicePanel = new TransparentPanel(new FlowLayout(FlowLayout.CENTER)); devicePanel.setMaximumSize(new Dimension(WIDTH, 25)); devicePanel.add(deviceLabel); devicePanel.add(deviceComboBox); final JPanel deviceAndPreviewPanel = new TransparentPanel(new BorderLayout()); int preferredDeviceAndPreviewPanelHeight; switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: preferredDeviceAndPreviewPanelHeight = 225; break; case DeviceConfigurationComboBoxModel.VIDEO: preferredDeviceAndPreviewPanelHeight = 305; break; default: preferredDeviceAndPreviewPanelHeight = 0; break; } if (preferredDeviceAndPreviewPanelHeight > 0) deviceAndPreviewPanel.setPreferredSize( new Dimension(WIDTH, preferredDeviceAndPreviewPanelHeight)); deviceAndPreviewPanel.add(devicePanel, BorderLayout.NORTH); final ActionListener deviceComboBoxActionListener = new ActionListener() { public void actionPerformed(ActionEvent event) { boolean revalidateAndRepaint = false; for (int i = deviceAndPreviewPanel.getComponentCount() - 1; i >= 0; i--) { Component c = deviceAndPreviewPanel.getComponent(i); if (c != devicePanel) { deviceAndPreviewPanel.remove(i); revalidateAndRepaint = true; } } Component preview = null; if ((deviceComboBox.getSelectedItem() != null) && deviceComboBox.isShowing()) { preview = createPreview(type, deviceComboBox, deviceAndPreviewPanel.getPreferredSize()); } if (preview != null) { deviceAndPreviewPanel.add(preview, BorderLayout.CENTER); revalidateAndRepaint = true; } if (revalidateAndRepaint) { deviceAndPreviewPanel.revalidate(); deviceAndPreviewPanel.repaint(); } } }; deviceComboBox.addActionListener(deviceComboBoxActionListener); /* * We have to initialize the controls to reflect the configuration * at the time of creating this instance. Additionally, because the * video preview will stop when it and its associated controls * become unnecessary, we have to restart it when the mentioned * controls become necessary again. We'll address the two goals * described by pretending there's a selection in the video combo * box when the combo box in question becomes displayable. */ deviceComboBox.addHierarchyListener( new HierarchyListener() { public void hierarchyChanged(HierarchyEvent event) { if ((event.getChangeFlags() & HierarchyEvent.SHOWING_CHANGED) != 0) { SwingUtilities.invokeLater( new Runnable() { public void run() { deviceComboBoxActionListener.actionPerformed(null); } }); } } }); return deviceAndPreviewPanel; } /** * Creates all the controls (including encoding) for a type(AUDIO or VIDEO) * * @param type the type. * @return the build Component. */ private static Component createControls(int type) { ConfigurationService cfg = NeomediaActivator.getConfigurationService(); SIPCommTabbedPane container = new SIPCommTabbedPane(); ResourceManagementService res = NeomediaActivator.getResources(); if ((cfg == null) || !cfg.getBoolean(DEVICES_DISABLED_PROP, false)) { container.insertTab( res.getI18NString("impl.media.configform.DEVICES"), null, createBasicControls(type), null, 0); } if ((cfg == null) || !cfg.getBoolean(ENCODINGS_DISABLED_PROP, false)) { container.insertTab( res.getI18NString("impl.media.configform.ENCODINGS"), null, new PriorityTable( new EncodingConfigurationTableModel(mediaService.getEncodingConfiguration(), type), 100), null, 1); } if ((type == DeviceConfigurationComboBoxModel.VIDEO) && ((cfg == null) || !cfg.getBoolean(VIDEO_MORE_SETTINGS_DISABLED_PROP, false))) { container.insertTab( res.getI18NString("impl.media.configform.VIDEO_MORE_SETTINGS"), null, createVideoAdvancedSettings(), null, 2); } return container; } /** * Creates preview for the (video) device in the video container. * * @param device the device * @param videoContainer the video container * @throws IOException a problem accessing the device * @throws MediaException a problem getting preview */ private static void createVideoPreview(CaptureDeviceInfo device, JComponent videoContainer) throws IOException, MediaException { videoContainer.removeAll(); videoContainer.revalidate(); videoContainer.repaint(); if (device == null) return; for (MediaDevice mediaDevice : mediaService.getDevices(MediaType.VIDEO, MediaUseCase.ANY)) { if (((MediaDeviceImpl) mediaDevice).getCaptureDeviceInfo().equals(device)) { Dimension videoContainerSize = videoContainer.getPreferredSize(); Component preview = (Component) mediaService.getVideoPreviewComponent( mediaDevice, videoContainerSize.width, videoContainerSize.height); if (preview != null) videoContainer.add(preview); break; } } } /** * Create preview component. * * @param type type * @param comboBox the options. * @param prefSize the preferred size * @return the component. */ private static Component createPreview(int type, final JComboBox comboBox, Dimension prefSize) { JComponent preview = null; if (type == DeviceConfigurationComboBoxModel.AUDIO) { Object selectedItem = comboBox.getSelectedItem(); if (selectedItem instanceof AudioSystem) { AudioSystem audioSystem = (AudioSystem) selectedItem; if (!NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(audioSystem.getLocatorProtocol())) { preview = new TransparentPanel(new GridBagLayout()); createAudioSystemControls(audioSystem, preview); } } } else if (type == DeviceConfigurationComboBoxModel.VIDEO) { JLabel noPreview = new JLabel( NeomediaActivator.getResources().getI18NString("impl.media.configform.NO_PREVIEW")); noPreview.setHorizontalAlignment(SwingConstants.CENTER); noPreview.setVerticalAlignment(SwingConstants.CENTER); preview = createVideoContainer(noPreview); preview.setPreferredSize(prefSize); Object selectedItem = comboBox.getSelectedItem(); CaptureDeviceInfo device = null; if (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) device = ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info; Exception exception; try { createVideoPreview(device, preview); exception = null; } catch (IOException ex) { exception = ex; } catch (MediaException ex) { exception = ex; } if (exception != null) { logger.error("Failed to create preview for device " + device, exception); device = null; } } return preview; } /** * Creates the video container. * * @param noVideoComponent the container component. * @return the video container. */ private static JComponent createVideoContainer(Component noVideoComponent) { return new VideoContainer(noVideoComponent, false); } /** * The mnemonic for a type. * * @param type audio or video type. * @return the mnemonic. */ private static char getDisplayedMnemonic(int type) { switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: return NeomediaActivator.getResources().getI18nMnemonic("impl.media.configform.AUDIO"); case DeviceConfigurationComboBoxModel.VIDEO: return NeomediaActivator.getResources().getI18nMnemonic("impl.media.configform.VIDEO"); default: throw new IllegalArgumentException("type"); } } /** * A label for a type. * * @param type the type. * @return the label. */ private static String getLabelText(int type) { switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO"); case DeviceConfigurationComboBoxModel.AUDIO_CAPTURE: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_IN"); case DeviceConfigurationComboBoxModel.AUDIO_NOTIFY: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_NOTIFY"); case DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_OUT"); case DeviceConfigurationComboBoxModel.VIDEO: return NeomediaActivator.getResources().getI18NString("impl.media.configform.VIDEO"); default: throw new IllegalArgumentException("type"); } } /** * Creates the video advanced settings. * * @return video advanced settings panel. */ private static Component createVideoAdvancedSettings() { ResourceManagementService resources = NeomediaActivator.getResources(); final DeviceConfiguration deviceConfig = mediaService.getDeviceConfiguration(); TransparentPanel centerPanel = new TransparentPanel(new GridBagLayout()); centerPanel.setMaximumSize(new Dimension(WIDTH, 150)); JButton resetDefaultsButton = new JButton(resources.getI18NString("impl.media.configform.VIDEO_RESET")); JPanel resetButtonPanel = new TransparentPanel(new FlowLayout(FlowLayout.RIGHT)); resetButtonPanel.add(resetDefaultsButton); final JPanel centerAdvancedPanel = new TransparentPanel(new BorderLayout()); centerAdvancedPanel.add(centerPanel, BorderLayout.NORTH); centerAdvancedPanel.add(resetButtonPanel, BorderLayout.SOUTH); GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.anchor = GridBagConstraints.NORTHWEST; constraints.insets = new Insets(5, 5, 0, 0); constraints.gridx = 0; constraints.weightx = 0; constraints.weighty = 0; constraints.gridy = 0; centerPanel.add( new JLabel(resources.getI18NString("impl.media.configform.VIDEO_RESOLUTION")), constraints); constraints.gridy = 1; constraints.insets = new Insets(0, 0, 0, 0); final JCheckBox frameRateCheck = new SIPCommCheckBox(resources.getI18NString("impl.media.configform.VIDEO_FRAME_RATE")); centerPanel.add(frameRateCheck, constraints); constraints.gridy = 2; constraints.insets = new Insets(5, 5, 0, 0); centerPanel.add( new JLabel(resources.getI18NString("impl.media.configform.VIDEO_PACKETS_POLICY")), constraints); constraints.weightx = 1; constraints.gridx = 1; constraints.gridy = 0; constraints.insets = new Insets(5, 0, 0, 5); Object[] resolutionValues = new Object[DeviceConfiguration.SUPPORTED_RESOLUTIONS.length + 1]; System.arraycopy( DeviceConfiguration.SUPPORTED_RESOLUTIONS, 0, resolutionValues, 1, DeviceConfiguration.SUPPORTED_RESOLUTIONS.length); final JComboBox sizeCombo = new JComboBox(resolutionValues); sizeCombo.setRenderer(new ResolutionCellRenderer()); sizeCombo.setEditable(false); centerPanel.add(sizeCombo, constraints); // default value is 20 final JSpinner frameRate = new JSpinner(new SpinnerNumberModel(20, 5, 30, 1)); frameRate.addChangeListener( new ChangeListener() { public void stateChanged(ChangeEvent e) { deviceConfig.setFrameRate( ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue()); } }); constraints.gridy = 1; constraints.insets = new Insets(0, 0, 0, 5); centerPanel.add(frameRate, constraints); frameRateCheck.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { if (frameRateCheck.isSelected()) { deviceConfig.setFrameRate( ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue()); } else // unlimited framerate deviceConfig.setFrameRate(-1); frameRate.setEnabled(frameRateCheck.isSelected()); } }); final JSpinner videoMaxBandwidth = new JSpinner( new SpinnerNumberModel(deviceConfig.getVideoMaxBandwidth(), 1, Integer.MAX_VALUE, 1)); videoMaxBandwidth.addChangeListener( new ChangeListener() { public void stateChanged(ChangeEvent e) { deviceConfig.setVideoMaxBandwidth( ((SpinnerNumberModel) videoMaxBandwidth.getModel()).getNumber().intValue()); } }); constraints.gridx = 1; constraints.gridy = 2; constraints.insets = new Insets(0, 0, 5, 5); centerPanel.add(videoMaxBandwidth, constraints); resetDefaultsButton.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { // reset to defaults sizeCombo.setSelectedIndex(0); frameRateCheck.setSelected(false); frameRate.setEnabled(false); frameRate.setValue(20); // unlimited framerate deviceConfig.setFrameRate(-1); videoMaxBandwidth.setValue(DeviceConfiguration.DEFAULT_VIDEO_MAX_BANDWIDTH); } }); // load selected value or auto Dimension videoSize = deviceConfig.getVideoSize(); if ((videoSize.getHeight() != DeviceConfiguration.DEFAULT_VIDEO_HEIGHT) && (videoSize.getWidth() != DeviceConfiguration.DEFAULT_VIDEO_WIDTH)) sizeCombo.setSelectedItem(deviceConfig.getVideoSize()); else sizeCombo.setSelectedIndex(0); sizeCombo.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { Dimension selectedVideoSize = (Dimension) sizeCombo.getSelectedItem(); if (selectedVideoSize == null) { // the auto value, default one selectedVideoSize = new Dimension( DeviceConfiguration.DEFAULT_VIDEO_WIDTH, DeviceConfiguration.DEFAULT_VIDEO_HEIGHT); } deviceConfig.setVideoSize(selectedVideoSize); } }); frameRateCheck.setSelected( deviceConfig.getFrameRate() != DeviceConfiguration.DEFAULT_VIDEO_FRAMERATE); frameRate.setEnabled(frameRateCheck.isSelected()); if (frameRate.isEnabled()) frameRate.setValue(deviceConfig.getFrameRate()); return centerAdvancedPanel; } /** Renders the available resolutions in the combo box. */ private static class ResolutionCellRenderer extends DefaultListCellRenderer { /** * The serialization version number of the <tt>ResolutionCellRenderer</tt> class. Defined to the * value of <tt>0</tt> because the <tt>ResolutionCellRenderer</tt> instances do not have state * of their own. */ private static final long serialVersionUID = 0L; /** * Sets readable text describing the resolution if the selected value is null we return the * string "Auto". * * @param list * @param value * @param index * @param isSelected * @param cellHasFocus * @return Component */ @Override public Component getListCellRendererComponent( JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { // call super to set backgrounds and fonts super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); // now just change the text if (value == null) setText("Auto"); else if (value instanceof Dimension) { Dimension d = (Dimension) value; setText(((int) d.getWidth()) + "x" + ((int) d.getHeight())); } return this; } } }
/** * Discovers and registers DirectShow video capture devices with JMF. * * @author Sebastien Vincent */ public class DirectShowSystem extends DeviceSystem { /** * The <tt>Logger</tt> used by the <tt>DirectShowSystem</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(DirectShowSystem.class); /** The protocol of the <tt>MediaLocator</tt>s identifying QuickTime/QTKit capture devices. */ private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_DIRECTSHOW; /** * Constructor. Discover and register DirectShow capture devices with JMF. * * @throws Exception if anything goes wrong while discovering and registering DirectShow capture * defines with JMF */ public DirectShowSystem() throws Exception { super(MediaType.VIDEO, LOCATOR_PROTOCOL); } protected void doInitialize() throws Exception { DSCaptureDevice devices[] = DSManager.getInstance().getCaptureDevices(); boolean captureDeviceInfoIsAdded = false; for (int i = 0, count = (devices == null) ? 0 : devices.length; i < count; i++) { long pixelFormat = devices[i].getFormat().getPixelFormat(); int ffmpegPixFmt = (int) DataSource.getFFmpegPixFmt(pixelFormat); Format format = null; if (ffmpegPixFmt != FFmpeg.PIX_FMT_NONE) { format = new AVFrameFormat(ffmpegPixFmt, (int) pixelFormat); } else { logger.warn( "No support for this webcam: " + devices[i].getName() + "(format " + pixelFormat + " not supported)"); continue; } if (logger.isInfoEnabled()) { for (DSFormat f : devices[i].getSupportedFormats()) { if (f.getWidth() != 0 && f.getHeight() != 0) logger.info( "Webcam available resolution for " + devices[i].getName() + ":" + f.getWidth() + "x" + f.getHeight()); } } CaptureDeviceInfo device = new CaptureDeviceInfo( devices[i].getName(), new MediaLocator(LOCATOR_PROTOCOL + ':' + devices[i].getName()), new Format[] {format}); if (logger.isInfoEnabled()) logger.info("Found[" + i + "]: " + device.getName()); CaptureDeviceManager.addDevice(device); captureDeviceInfoIsAdded = true; } if (captureDeviceInfoIsAdded && !MediaServiceImpl.isJmfRegistryDisableLoad()) CaptureDeviceManager.commit(); DSManager.dispose(); } }