/** * A depacketizer from VP8. See {@link "http://tools.ietf.org/html/draft-ietf-payload-vp8-11"} * * @author Boris Grozev * @author George Politis */ public class DePacketizer extends AbstractCodec2 { /** * The <tt>Logger</tt> used by the <tt>DePacketizer</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(DePacketizer.class); /** Whether trace logging is enabled. */ private static final boolean TRACE = logger.isTraceEnabled(); /** * A <tt>Comparator</tt> implementation for RTP sequence numbers. Compares <tt>a</tt> and * <tt>b</tt>, taking into account the wrap at 2^16. * * <p>IMPORTANT: This is a valid <tt>Comparator</tt> implementation only if used for subsets of * [0, 2^16) which don't span more than 2^15 elements. * * <p>E.g. it works for: [0, 2^15-1] and ([50000, 2^16) u [0, 10000]) Doesn't work for: [0, 2^15] * and ([0, 2^15-1] u {2^16-1}) and [0, 2^16) * * <p>NOTE: An identical implementation for Integers can be found in the class SeqNumComparator. * Sequence numbers are 16 bits and unsigned, so an Integer should be sufficient to hold that. */ private static final Comparator<? super Long> seqNumComparator = new Comparator<Long>() { @Override public int compare(Long a, Long b) { if (a.equals(b)) return 0; else if (a > b) { if (a - b < 32768) return 1; else return -1; } else // a < b { if (b - a < 32768) return -1; else return 1; } } }; /** * Stores the RTP payloads (VP8 payload descriptor stripped) from RTP packets belonging to a * single VP8 compressed frame. */ private SortedMap<Long, Container> data = new TreeMap<Long, Container>(seqNumComparator); /** Stores unused <tt>Container</tt>'s. */ private Queue<Container> free = new ArrayBlockingQueue<Container>(100); /** * Stores the first (earliest) sequence number stored in <tt>data</tt>, or -1 if <tt>data</tt> is * empty. */ private long firstSeq = -1; /** * Stores the last (latest) sequence number stored in <tt>data</tt>, or -1 if <tt>data</tt> is * empty. */ private long lastSeq = -1; /** * Stores the value of the <tt>PictureID</tt> field for the VP8 compressed frame, parts of which * are currently stored in <tt>data</tt>, or -1 if the <tt>PictureID</tt> field is not in use or * <tt>data</tt> is empty. */ private int pictureId = -1; /** * Stores the RTP timestamp of the packets stored in <tt>data</tt>, or -1 if they don't have a * timestamp set. */ private long timestamp = -1; /** Whether we have stored any packets in <tt>data</tt>. Equivalent to <tt>data.isEmpty()</tt>. */ private boolean empty = true; /** * Whether we have stored in <tt>data</tt> the last RTP packet of the VP8 compressed frame, parts * of which are currently stored in <tt>data</tt>. */ private boolean haveEnd = false; /** * Whether we have stored in <tt>data</tt> the first RTP packet of the VP8 compressed frame, parts * of which are currently stored in <tt>data</tt>. */ private boolean haveStart = false; /** * Stores the sum of the lengths of the data stored in <tt>data</tt>, that is the total length of * the VP8 compressed frame to be constructed. */ private int frameLength = 0; /** The sequence number of the last RTP packet, which was included in the output. */ private long lastSentSeq = -1; /** Initializes a new <tt>JNIEncoder</tt> instance. */ public DePacketizer() { super( "VP8 RTP DePacketizer", VideoFormat.class, new VideoFormat[] {new VideoFormat(Constants.VP8)}); inputFormats = new VideoFormat[] {new VideoFormat(Constants.VP8_RTP)}; } /** {@inheritDoc} */ @Override protected void doClose() {} /** {@inheritDoc} */ @Override protected void doOpen() throws ResourceUnavailableException { if (logger.isInfoEnabled()) logger.info("Opened VP8 depacketizer"); } /** * Re-initializes the fields which store information about the currently held data. Empties * <tt>data</tt>. */ private void reinit() { firstSeq = lastSeq = timestamp = -1; pictureId = -1; empty = true; haveEnd = haveStart = false; frameLength = 0; Iterator<Map.Entry<Long, Container>> it = data.entrySet().iterator(); Map.Entry<Long, Container> e; while (it.hasNext()) { e = it.next(); free.offer(e.getValue()); it.remove(); } } /** * Checks whether the currently held VP8 compressed frame is complete (e.g all its packets are * stored in <tt>data</tt>). * * @return <tt>true</tt> if the currently help VP8 compressed frame is complete, <tt>false</tt> * otherwise. */ private boolean frameComplete() { return haveStart && haveEnd && !haveMissing(); } /** * Checks whether there are packets with sequence numbers between <tt>firstSeq</tt> and * <tt>lastSeq</tt> which are *not* stored in <tt>data</tt>. * * @return <tt>true</tt> if there are packets with sequence numbers between <tt>firstSeq</tt> and * <tt>lastSeq</tt> which are *not* stored in <tt>data</tt>. */ private boolean haveMissing() { Set<Long> seqs = data.keySet(); long s = firstSeq; while (s != lastSeq) { if (!seqs.contains(s)) return true; s = (s + 1) % (1 << 16); } return false; } /** {@inheritDoc} */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { byte[] inData = (byte[]) inBuffer.getData(); int inOffset = inBuffer.getOffset(); if (!VP8PayloadDescriptor.isValid(inData, inOffset)) { logger.warn("Invalid RTP/VP8 packet discarded."); outBuffer.setDiscard(true); return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK? } long inSeq = inBuffer.getSequenceNumber(); long inRtpTimestamp = inBuffer.getRtpTimeStamp(); int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset); boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0; boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset); int inLength = inBuffer.getLength(); int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset); int inPayloadLength = inLength - inPdSize; if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) { if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } if (!empty) { // if the incoming packet has a different PictureID or timestamp // than those of the current frame, then it belongs to a different // frame. if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId) | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) { if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq { // the packet belongs to a previous frame. discard it if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } else // inSeq > firstSeq (and also presumably isSeq > lastSeq) { // the packet belongs to a subsequent frame (to the one // currently being held). Drop the current frame. if (logger.isInfoEnabled()) logger.info( "Discarding saved packets on arrival of" + " a packet for a subsequent frame: " + inSeq); // TODO: this would be the place to complain about the // not-well-received PictureID by sending a RTCP SLI or NACK. reinit(); } } } // a whole frame in a single packet. avoid the extra copy to // this.data and output it immediately. if (empty && inMarker && inIsStartOfFrame) { byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false); System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength); outBuffer.setOffset(0); outBuffer.setLength(inPayloadLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = inSeq; return BUFFER_PROCESSED_OK; } // add to this.data Container container = free.poll(); if (container == null) container = new Container(); if (container.buf == null || container.buf.length < inPayloadLength) container.buf = new byte[inPayloadLength]; if (data.get(inSeq) != null) { if (logger.isInfoEnabled()) logger.info("(Probable) duplicate packet detected, discarding " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength); container.len = inPayloadLength; data.put(inSeq, container); // update fields frameLength += inPayloadLength; if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq; if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq; if (empty) { // the first received packet for the current frame was just added empty = false; timestamp = inRtpTimestamp; pictureId = inPictureId; } if (inMarker) haveEnd = true; if (inIsStartOfFrame) haveStart = true; // check if we have a full frame if (frameComplete()) { byte[] outData = validateByteArraySize(outBuffer, frameLength, false); int ptr = 0; Container b; for (Map.Entry<Long, Container> entry : data.entrySet()) { b = entry.getValue(); System.arraycopy(b.buf, 0, outData, ptr, b.len); ptr += b.len; } outBuffer.setOffset(0); outBuffer.setLength(frameLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = lastSeq; // prepare for the next frame reinit(); return BUFFER_PROCESSED_OK; } else { // frame not complete yet outBuffer.setDiscard(true); return OUTPUT_BUFFER_NOT_FILLED; } } /** * Returns true if the buffer contains a VP8 key frame at offset <tt>offset</tt>. * * @param buff the byte buffer to check * @param off the offset in the byte buffer where the actual data starts * @param len the length of the data in the byte buffer * @return true if the buffer contains a VP8 key frame at offset <tt>offset</tt>. */ public static boolean isKeyFrame(byte[] buff, int off, int len) { if (buff == null || buff.length < off + len || len < RawPacket.FIXED_HEADER_SIZE) { return false; } // Check if this is the start of a VP8 partition in the payload // descriptor. if (!DePacketizer.VP8PayloadDescriptor.isValid(buff, off)) { return false; } if (!DePacketizer.VP8PayloadDescriptor.isStartOfFrame(buff, off)) { return false; } int szVP8PayloadDescriptor = DePacketizer.VP8PayloadDescriptor.getSize(buff, off); return DePacketizer.VP8PayloadHeader.isKeyFrame(buff, off + szVP8PayloadDescriptor); } /** * A class that represents the VP8 Payload Descriptor structure defined in {@link * "http://tools.ietf.org/html/draft-ietf-payload-vp8-10"} */ public static class VP8PayloadDescriptor { /** I bit from the X byte of the Payload Descriptor. */ private static final byte I_BIT = (byte) 0x80; /** K bit from the X byte of the Payload Descriptor. */ private static final byte K_BIT = (byte) 0x10; /** L bit from the X byte of the Payload Descriptor. */ private static final byte L_BIT = (byte) 0x40; /** I bit from the I byte of the Payload Descriptor. */ private static final byte M_BIT = (byte) 0x80; /** Maximum length of a VP8 Payload Descriptor. */ public static final int MAX_LENGTH = 6; /** S bit from the first byte of the Payload Descriptor. */ private static final byte S_BIT = (byte) 0x10; /** T bit from the X byte of the Payload Descriptor. */ private static final byte T_BIT = (byte) 0x20; /** X bit from the first byte of the Payload Descriptor. */ private static final byte X_BIT = (byte) 0x80; /** * Gets the temporal layer index (TID), if that's set. * * @param buf the byte buffer that holds the VP8 packet. * @param off the offset in the byte buffer where the VP8 packet starts. * @param len the length of the VP8 packet. * @return the temporal layer index (TID), if that's set, -1 otherwise. */ public static int getTemporalLayerIndex(byte[] buf, int off, int len) { if (buf == null || buf.length < off + len || len < 2) { return -1; } if ((buf[off] & X_BIT) == 0 || (buf[off + 1] & T_BIT) == 0) { return -1; } int sz = getSize(buf, off); if (buf.length < off + sz || sz < 1) { return -1; } return (buf[off + sz - 1] & 0xc0) >> 6; } /** * Returns a simple Payload Descriptor, with PartID = 0, the 'start of partition' bit set * according to <tt>startOfPartition</tt>, and all other bits set to 0. * * @param startOfPartition whether to 'start of partition' bit should be set * @return a simple Payload Descriptor, with PartID = 0, the 'start of partition' bit set * according to <tt>startOfPartition</tt>, and all other bits set to 0. */ public static byte[] create(boolean startOfPartition) { byte[] pd = new byte[1]; pd[0] = startOfPartition ? (byte) 0x10 : 0; return pd; } /** * The size in bytes of the Payload Descriptor at offset <tt>offset</tt> in <tt>input</tt>. The * size is between 1 and 6. * * @param input input * @param offset offset * @return The size in bytes of the Payload Descriptor at offset <tt>offset</tt> in * <tt>input</tt>, or -1 if the input is not a valid VP8 Payload Descriptor. The size is * between 1 and 6. */ public static int getSize(byte[] input, int offset) { if (!isValid(input, offset)) return -1; if ((input[offset] & X_BIT) == 0) return 1; int size = 2; if ((input[offset + 1] & I_BIT) != 0) { size++; if ((input[offset + 2] & M_BIT) != 0) size++; } if ((input[offset + 1] & L_BIT) != 0) size++; if ((input[offset + 1] & (T_BIT | K_BIT)) != 0) size++; return size; } /** * Gets the value of the PictureID field of a VP8 Payload Descriptor. * * @param input * @param offset * @return the value of the PictureID field of a VP8 Payload Descriptor, or -1 if the fields is * not present. */ private static int getPictureId(byte[] input, int offset) { if (!isValid(input, offset)) return -1; if ((input[offset] & X_BIT) == 0 || (input[offset + 1] & I_BIT) == 0) return -1; boolean isLong = (input[offset + 2] & M_BIT) != 0; if (isLong) return (input[offset + 2] & 0x7f) << 8 | (input[offset + 3] & 0xff); else return input[offset + 2] & 0x7f; } public static boolean isValid(byte[] input, int offset) { return true; } /** * Checks whether the '<tt>start of partition</tt>' bit is set in the VP8 Payload Descriptor at * offset <tt>offset</tt> in <tt>input</tt>. * * @param input input * @param offset offset * @return <tt>true</tt> if the '<tt>start of partition</tt>' bit is set, <tt>false</tt> * otherwise. */ public static boolean isStartOfPartition(byte[] input, int offset) { return (input[offset] & S_BIT) != 0; } /** * Returns <tt>true</tt> if both the '<tt>start of partition</tt>' bit is set and the * <tt>PID</tt> fields has value 0 in the VP8 Payload Descriptor at offset <tt>offset</tt> in * <tt>input</tt>. * * @param input * @param offset * @return <tt>true</tt> if both the '<tt>start of partition</tt>' bit is set and the * <tt>PID</tt> fields has value 0 in the VP8 Payload Descriptor at offset <tt>offset</tt> * in <tt>input</tt>. */ public static boolean isStartOfFrame(byte[] input, int offset) { return isStartOfPartition(input, offset) && getPartitionId(input, offset) == 0; } /** * Returns the value of the <tt>PID</tt> (partition ID) field of the VP8 Payload Descriptor at * offset <tt>offset</tt> in <tt>input</tt>. * * @param input * @param offset * @return the value of the <tt>PID</tt> (partition ID) field of the VP8 Payload Descriptor at * offset <tt>offset</tt> in <tt>input</tt>. */ public static int getPartitionId(byte[] input, int offset) { return input[offset] & 0x07; } } /** * A class that represents the VP8 Payload Header structure defined in {@link * "http://tools.ietf.org/html/draft-ietf-payload-vp8-10"} */ public static class VP8PayloadHeader { /** S bit of the Payload Descriptor. */ private static final byte S_BIT = (byte) 0x01; /** * Returns true if the <tt>P</tt> (inverse key frame flag) field of the VP8 Payload Header at * offset <tt>offset</tt> in <tt>input</tt> is 0. * * @return true if the <tt>P</tt> (inverse key frame flag) field of the VP8 Payload Header at * offset <tt>offset</tt> in <tt>input</tt> is 0, false otherwise. */ public static boolean isKeyFrame(byte[] input, int offset) { // When set to 0 the current frame is a key frame. When set to 1 // the current frame is an interframe. Defined in [RFC6386] return (input[offset] & S_BIT) == 0; } } /** A simple container for a <tt>byte[]</tt> and an integer. */ private static class Container { /** This <tt>Container</tt>'s data. */ private byte[] buf; /** Length used. */ private int len = 0; } }
/** * The stream used by JMF for our image streaming. * * @author Sebastien Vincent * @author Lyubomir Marinov * @author Damian Minkov */ public class ImageStream extends AbstractVideoPullBufferStream<DataSource> { /** * The <tt>Logger</tt> used by the <tt>ImageStream</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(ImageStream.class); /** * The pool of <tt>ByteBuffer</tt>s this instances is using to optimize the allocations and * deallocations of <tt>ByteBuffer</tt>s. */ private final ByteBufferPool byteBufferPool = new ByteBufferPool(); /** Desktop interaction (screen capture, key press, ...). */ private DesktopInteract desktopInteract = null; /** Index of display that we will capture from. */ private int displayIndex = -1; /** Sequence number. */ private long seqNo = 0; /** X origin. */ private int x = 0; /** Y origin. */ private int y = 0; /** * Initializes a new <tt>ImageStream</tt> instance which is to have a specific * <tt>FormatControl</tt> * * @param dataSource the <tt>DataSource</tt> which is creating the new instance so that it becomes * one of its <tt>streams</tt> * @param formatControl the <tt>FormatControl</tt> of the new instance which is to specify the * format in which it is to provide its media data */ ImageStream(DataSource dataSource, FormatControl formatControl) { super(dataSource, formatControl); } /** * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>. * * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the * specified <tt>Buffer</tt> * @see AbstractVideoPullBufferStream#doRead(Buffer) */ @Override protected void doRead(Buffer buffer) throws IOException { /* * Determine the Format in which we're expected to output. We cannot * rely on the Format always being specified in the Buffer because it is * not its responsibility, the DataSource of this ImageStream knows the * output Format. */ Format format = buffer.getFormat(); if (format == null) { format = getFormat(); if (format != null) buffer.setFormat(format); } if (format instanceof AVFrameFormat) { Object o = buffer.getData(); AVFrame frame; if (o instanceof AVFrame) frame = (AVFrame) o; else { frame = new AVFrame(); buffer.setData(frame); } AVFrameFormat avFrameFormat = (AVFrameFormat) format; Dimension size = avFrameFormat.getSize(); ByteBuffer data = readScreenNative(size); if (data != null) { if (frame.avpicture_fill(data, avFrameFormat) < 0) { data.free(); throw new IOException("avpicture_fill"); } } else { /* * This can happen when we disconnect a monitor from computer * before or during grabbing. */ throw new IOException("Failed to grab screen."); } } else { byte[] bytes = (byte[]) buffer.getData(); Dimension size = ((VideoFormat) format).getSize(); bytes = readScreen(bytes, size); buffer.setData(bytes); buffer.setOffset(0); buffer.setLength(bytes.length); } buffer.setHeader(null); buffer.setTimeStamp(System.nanoTime()); buffer.setSequenceNumber(seqNo); buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA); seqNo++; } /** * Read screen. * * @param output output buffer for screen bytes * @param dim dimension of the screen * @return raw bytes, it could be equal to output or not. Take care in the caller to check if * output is the returned value. */ public byte[] readScreen(byte[] output, Dimension dim) { VideoFormat format = (VideoFormat) getFormat(); Dimension formatSize = format.getSize(); int width = formatSize.width; int height = formatSize.height; BufferedImage scaledScreen = null; BufferedImage screen = null; byte data[] = null; int size = width * height * 4; // If output is not large enough, enlarge it. if ((output == null) || (output.length < size)) output = new byte[size]; /* get desktop screen via native grabber if available */ if (desktopInteract.captureScreen(displayIndex, x, y, dim.width, dim.height, output)) { return output; } System.out.println("failed to grab with native! " + output.length); /* OK native grabber failed or is not available, * try with AWT Robot and convert it to the right format * * Note that it is very memory consuming since memory are allocated * to capture screen (via Robot) and then for converting to raw bytes * Moreover support for multiple display has not yet been investigated * * Normally not of our supported platform (Windows (x86, x64), * Linux (x86, x86-64), Mac OS X (i386, x86-64, ppc) and * FreeBSD (x86, x86-64) should go here. */ screen = desktopInteract.captureScreen(); if (screen != null) { /* convert to ARGB BufferedImage */ scaledScreen = ImgStreamingUtils.getScaledImage(screen, width, height, BufferedImage.TYPE_INT_ARGB); /* get raw bytes */ data = ImgStreamingUtils.getImageBytes(scaledScreen, output); } screen = null; scaledScreen = null; return data; } /** * Read screen and store result in native buffer. * * @param dim dimension of the video * @return true if success, false otherwise */ private ByteBuffer readScreenNative(Dimension dim) { int size = dim.width * dim.height * 4 + FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE; ByteBuffer data = byteBufferPool.getBuffer(size); data.setLength(size); /* get desktop screen via native grabber */ boolean b; try { b = desktopInteract.captureScreen( displayIndex, x, y, dim.width, dim.height, data.getPtr(), data.getLength()); } catch (Throwable t) { if (t instanceof ThreadDeath) { throw (ThreadDeath) t; } else { b = false; // logger.error("Failed to grab screen!", t); } } if (!b) { data.free(); data = null; } return data; } /** * Sets the index of the display to be used by this <tt>ImageStream</tt>. * * @param displayIndex the index of the display to be used by this <tt>ImageStream</tt> */ public void setDisplayIndex(int displayIndex) { this.displayIndex = displayIndex; } /** * Sets the origin to be captured by this <tt>ImageStream</tt>. * * @param x the x coordinate of the origin to be set on this instance * @param y the y coordinate of the origin to be set on this instance */ public void setOrigin(int x, int y) { this.x = x; this.y = y; } /** * Start desktop capture stream. * * @see AbstractPullBufferStream#start() */ @Override public void start() throws IOException { super.start(); if (desktopInteract == null) { try { desktopInteract = new DesktopInteractImpl(); } catch (Exception e) { logger.warn("Cannot create DesktopInteract object!"); } } } /** * Stop desktop capture stream. * * @see AbstractPullBufferStream#stop() */ @Override public void stop() throws IOException { try { if (logger.isInfoEnabled()) logger.info("Stop stream"); } finally { super.stop(); byteBufferPool.drain(); } } }
/** * @author Lyubomir Marinov * @author Damian Minkov * @author Yana Stamcheva */ public class MediaConfiguration { /** The <tt>Logger</tt> used by the <tt>MediaConfiguration</tt> class for logging output. */ private static final Logger logger = Logger.getLogger(MediaConfiguration.class); /** The <tt>MediaService</tt> implementation used by <tt>MediaConfiguration</tt>. */ private static final MediaServiceImpl mediaService = NeomediaActivator.getMediaServiceImpl(); /** The preferred width of all panels. */ private static final int WIDTH = 350; /** * Indicates if the Devices settings configuration tab should be disabled, i.e. not visible to the * user. */ private static final String DEVICES_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.devicesconfig.DISABLED"; /** * Indicates if the Audio/Video encodings configuration tab should be disabled, i.e. not visible * to the user. */ private static final String ENCODINGS_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.encodingsconfig.DISABLED"; /** * Indicates if the Video/More Settings configuration tab should be disabled, i.e. not visible to * the user. */ private static final String VIDEO_MORE_SETTINGS_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.videomoresettingsconfig.DISABLED"; /** * Returns the audio configuration panel. * * @return the audio configuration panel */ public static Component createAudioConfigPanel() { return createControls(DeviceConfigurationComboBoxModel.AUDIO); } /** * Returns the video configuration panel. * * @return the video configuration panel */ public static Component createVideoConfigPanel() { return createControls(DeviceConfigurationComboBoxModel.VIDEO); } private static void createAudioPreview( final AudioSystem audioSystem, final JComboBox comboBox, final SoundLevelIndicator soundLevelIndicator) { final ActionListener captureComboActionListener = new ActionListener() { private final SimpleAudioLevelListener audioLevelListener = new SimpleAudioLevelListener() { public void audioLevelChanged(int level) { soundLevelIndicator.updateSoundLevel(level); } }; private AudioMediaDeviceSession deviceSession; private final BufferTransferHandler transferHandler = new BufferTransferHandler() { public void transferData(PushBufferStream stream) { try { stream.read(transferHandlerBuffer); } catch (IOException ioe) { } } }; private final Buffer transferHandlerBuffer = new Buffer(); public void actionPerformed(ActionEvent event) { setDeviceSession(null); CaptureDeviceInfo cdi; if (comboBox == null) { cdi = soundLevelIndicator.isShowing() ? audioSystem.getCaptureDevice() : null; } else { Object selectedItem = soundLevelIndicator.isShowing() ? comboBox.getSelectedItem() : null; cdi = (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) ? ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info : null; } if (cdi != null) { for (MediaDevice md : mediaService.getDevices(MediaType.AUDIO, MediaUseCase.ANY)) { if (md instanceof AudioMediaDeviceImpl) { AudioMediaDeviceImpl amd = (AudioMediaDeviceImpl) md; if (cdi.equals(amd.getCaptureDeviceInfo())) { try { MediaDeviceSession deviceSession = amd.createSession(); boolean setDeviceSession = false; try { if (deviceSession instanceof AudioMediaDeviceSession) { setDeviceSession((AudioMediaDeviceSession) deviceSession); setDeviceSession = true; } } finally { if (!setDeviceSession) deviceSession.close(); } } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; } break; } } } } } private void setDeviceSession(AudioMediaDeviceSession deviceSession) { if (this.deviceSession == deviceSession) return; if (this.deviceSession != null) { try { this.deviceSession.close(); } finally { this.deviceSession.setLocalUserAudioLevelListener(null); soundLevelIndicator.resetSoundLevel(); } } this.deviceSession = deviceSession; if (this.deviceSession != null) { this.deviceSession.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW)); this.deviceSession.setLocalUserAudioLevelListener(audioLevelListener); this.deviceSession.start(MediaDirection.SENDONLY); try { DataSource dataSource = this.deviceSession.getOutputDataSource(); dataSource.connect(); PushBufferStream[] streams = ((PushBufferDataSource) dataSource).getStreams(); for (PushBufferStream stream : streams) stream.setTransferHandler(transferHandler); dataSource.start(); } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; else setDeviceSession(null); } } } }; if (comboBox != null) comboBox.addActionListener(captureComboActionListener); soundLevelIndicator.addHierarchyListener( new HierarchyListener() { public void hierarchyChanged(HierarchyEvent event) { if ((event.getChangeFlags() & HierarchyEvent.SHOWING_CHANGED) != 0) { SwingUtilities.invokeLater( new Runnable() { public void run() { captureComboActionListener.actionPerformed(null); } }); } } }); } /** * Creates the UI controls which are to control the details of a specific <tt>AudioSystem</tt>. * * @param audioSystem the <tt>AudioSystem</tt> for which the UI controls to control its details * are to be created * @param container the <tt>JComponent</tt> into which the UI controls which are to control the * details of the specified <tt>audioSystem</tt> are to be added */ public static void createAudioSystemControls(AudioSystem audioSystem, JComponent container) { GridBagConstraints constraints = new GridBagConstraints(); constraints.anchor = GridBagConstraints.NORTHWEST; constraints.fill = GridBagConstraints.HORIZONTAL; constraints.weighty = 0; int audioSystemFeatures = audioSystem.getFeatures(); boolean featureNotifyAndPlaybackDevices = ((audioSystemFeatures & AudioSystem.FEATURE_NOTIFY_AND_PLAYBACK_DEVICES) != 0); constraints.gridx = 0; constraints.insets = new Insets(3, 0, 3, 3); constraints.weightx = 0; constraints.gridy = 0; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)), constraints); if (featureNotifyAndPlaybackDevices) { constraints.gridy = 2; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)), constraints); constraints.gridy = 3; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)), constraints); } constraints.gridx = 1; constraints.insets = new Insets(3, 3, 3, 0); constraints.weightx = 1; JComboBox captureCombo = null; if (featureNotifyAndPlaybackDevices) { captureCombo = new JComboBox(); captureCombo.setEditable(false); captureCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)); constraints.gridy = 0; container.add(captureCombo, constraints); } int anchor = constraints.anchor; SoundLevelIndicator capturePreview = new SoundLevelIndicator( SimpleAudioLevelListener.MIN_LEVEL, SimpleAudioLevelListener.MAX_LEVEL); constraints.anchor = GridBagConstraints.CENTER; constraints.gridy = (captureCombo == null) ? 0 : 1; container.add(capturePreview, constraints); constraints.anchor = anchor; constraints.gridy = GridBagConstraints.RELATIVE; if (featureNotifyAndPlaybackDevices) { JComboBox playbackCombo = new JComboBox(); playbackCombo.setEditable(false); playbackCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)); container.add(playbackCombo, constraints); JComboBox notifyCombo = new JComboBox(); notifyCombo.setEditable(false); notifyCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)); container.add(notifyCombo, constraints); } if ((AudioSystem.FEATURE_ECHO_CANCELLATION & audioSystemFeatures) != 0) { final SIPCommCheckBox echoCancelCheckBox = new SIPCommCheckBox( NeomediaActivator.getResources().getI18NString("impl.media.configform.ECHOCANCEL")); /* * First set the selected one, then add the listener in order to * avoid saving the value when using the default one and only * showing to user without modification. */ echoCancelCheckBox.setSelected(mediaService.getDeviceConfiguration().isEchoCancel()); echoCancelCheckBox.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { mediaService.getDeviceConfiguration().setEchoCancel(echoCancelCheckBox.isSelected()); } }); container.add(echoCancelCheckBox, constraints); } if ((AudioSystem.FEATURE_DENOISE & audioSystemFeatures) != 0) { final SIPCommCheckBox denoiseCheckBox = new SIPCommCheckBox( NeomediaActivator.getResources().getI18NString("impl.media.configform.DENOISE")); /* * First set the selected one, then add the listener in order to * avoid saving the value when using the default one and only * showing to user without modification. */ denoiseCheckBox.setSelected(mediaService.getDeviceConfiguration().isDenoise()); denoiseCheckBox.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { mediaService.getDeviceConfiguration().setDenoise(denoiseCheckBox.isSelected()); } }); container.add(denoiseCheckBox, constraints); } createAudioPreview(audioSystem, captureCombo, capturePreview); } /** * Creates basic controls for a type (AUDIO or VIDEO). * * @param type the type. * @return the build Component. */ public static Component createBasicControls(final int type) { final JComboBox deviceComboBox = new JComboBox(); deviceComboBox.setEditable(false); deviceComboBox.setModel( new DeviceConfigurationComboBoxModel( deviceComboBox, mediaService.getDeviceConfiguration(), type)); JLabel deviceLabel = new JLabel(getLabelText(type)); deviceLabel.setDisplayedMnemonic(getDisplayedMnemonic(type)); deviceLabel.setLabelFor(deviceComboBox); final Container devicePanel = new TransparentPanel(new FlowLayout(FlowLayout.CENTER)); devicePanel.setMaximumSize(new Dimension(WIDTH, 25)); devicePanel.add(deviceLabel); devicePanel.add(deviceComboBox); final JPanel deviceAndPreviewPanel = new TransparentPanel(new BorderLayout()); int preferredDeviceAndPreviewPanelHeight; switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: preferredDeviceAndPreviewPanelHeight = 225; break; case DeviceConfigurationComboBoxModel.VIDEO: preferredDeviceAndPreviewPanelHeight = 305; break; default: preferredDeviceAndPreviewPanelHeight = 0; break; } if (preferredDeviceAndPreviewPanelHeight > 0) deviceAndPreviewPanel.setPreferredSize( new Dimension(WIDTH, preferredDeviceAndPreviewPanelHeight)); deviceAndPreviewPanel.add(devicePanel, BorderLayout.NORTH); final ActionListener deviceComboBoxActionListener = new ActionListener() { public void actionPerformed(ActionEvent event) { boolean revalidateAndRepaint = false; for (int i = deviceAndPreviewPanel.getComponentCount() - 1; i >= 0; i--) { Component c = deviceAndPreviewPanel.getComponent(i); if (c != devicePanel) { deviceAndPreviewPanel.remove(i); revalidateAndRepaint = true; } } Component preview = null; if ((deviceComboBox.getSelectedItem() != null) && deviceComboBox.isShowing()) { preview = createPreview(type, deviceComboBox, deviceAndPreviewPanel.getPreferredSize()); } if (preview != null) { deviceAndPreviewPanel.add(preview, BorderLayout.CENTER); revalidateAndRepaint = true; } if (revalidateAndRepaint) { deviceAndPreviewPanel.revalidate(); deviceAndPreviewPanel.repaint(); } } }; deviceComboBox.addActionListener(deviceComboBoxActionListener); /* * We have to initialize the controls to reflect the configuration * at the time of creating this instance. Additionally, because the * video preview will stop when it and its associated controls * become unnecessary, we have to restart it when the mentioned * controls become necessary again. We'll address the two goals * described by pretending there's a selection in the video combo * box when the combo box in question becomes displayable. */ deviceComboBox.addHierarchyListener( new HierarchyListener() { public void hierarchyChanged(HierarchyEvent event) { if ((event.getChangeFlags() & HierarchyEvent.SHOWING_CHANGED) != 0) { SwingUtilities.invokeLater( new Runnable() { public void run() { deviceComboBoxActionListener.actionPerformed(null); } }); } } }); return deviceAndPreviewPanel; } /** * Creates all the controls (including encoding) for a type(AUDIO or VIDEO) * * @param type the type. * @return the build Component. */ private static Component createControls(int type) { ConfigurationService cfg = NeomediaActivator.getConfigurationService(); SIPCommTabbedPane container = new SIPCommTabbedPane(); ResourceManagementService res = NeomediaActivator.getResources(); if ((cfg == null) || !cfg.getBoolean(DEVICES_DISABLED_PROP, false)) { container.insertTab( res.getI18NString("impl.media.configform.DEVICES"), null, createBasicControls(type), null, 0); } if ((cfg == null) || !cfg.getBoolean(ENCODINGS_DISABLED_PROP, false)) { container.insertTab( res.getI18NString("impl.media.configform.ENCODINGS"), null, new PriorityTable( new EncodingConfigurationTableModel(mediaService.getEncodingConfiguration(), type), 100), null, 1); } if ((type == DeviceConfigurationComboBoxModel.VIDEO) && ((cfg == null) || !cfg.getBoolean(VIDEO_MORE_SETTINGS_DISABLED_PROP, false))) { container.insertTab( res.getI18NString("impl.media.configform.VIDEO_MORE_SETTINGS"), null, createVideoAdvancedSettings(), null, 2); } return container; } /** * Creates preview for the (video) device in the video container. * * @param device the device * @param videoContainer the video container * @throws IOException a problem accessing the device * @throws MediaException a problem getting preview */ private static void createVideoPreview(CaptureDeviceInfo device, JComponent videoContainer) throws IOException, MediaException { videoContainer.removeAll(); videoContainer.revalidate(); videoContainer.repaint(); if (device == null) return; for (MediaDevice mediaDevice : mediaService.getDevices(MediaType.VIDEO, MediaUseCase.ANY)) { if (((MediaDeviceImpl) mediaDevice).getCaptureDeviceInfo().equals(device)) { Dimension videoContainerSize = videoContainer.getPreferredSize(); Component preview = (Component) mediaService.getVideoPreviewComponent( mediaDevice, videoContainerSize.width, videoContainerSize.height); if (preview != null) videoContainer.add(preview); break; } } } /** * Create preview component. * * @param type type * @param comboBox the options. * @param prefSize the preferred size * @return the component. */ private static Component createPreview(int type, final JComboBox comboBox, Dimension prefSize) { JComponent preview = null; if (type == DeviceConfigurationComboBoxModel.AUDIO) { Object selectedItem = comboBox.getSelectedItem(); if (selectedItem instanceof AudioSystem) { AudioSystem audioSystem = (AudioSystem) selectedItem; if (!NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(audioSystem.getLocatorProtocol())) { preview = new TransparentPanel(new GridBagLayout()); createAudioSystemControls(audioSystem, preview); } } } else if (type == DeviceConfigurationComboBoxModel.VIDEO) { JLabel noPreview = new JLabel( NeomediaActivator.getResources().getI18NString("impl.media.configform.NO_PREVIEW")); noPreview.setHorizontalAlignment(SwingConstants.CENTER); noPreview.setVerticalAlignment(SwingConstants.CENTER); preview = createVideoContainer(noPreview); preview.setPreferredSize(prefSize); Object selectedItem = comboBox.getSelectedItem(); CaptureDeviceInfo device = null; if (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) device = ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info; Exception exception; try { createVideoPreview(device, preview); exception = null; } catch (IOException ex) { exception = ex; } catch (MediaException ex) { exception = ex; } if (exception != null) { logger.error("Failed to create preview for device " + device, exception); device = null; } } return preview; } /** * Creates the video container. * * @param noVideoComponent the container component. * @return the video container. */ private static JComponent createVideoContainer(Component noVideoComponent) { return new VideoContainer(noVideoComponent, false); } /** * The mnemonic for a type. * * @param type audio or video type. * @return the mnemonic. */ private static char getDisplayedMnemonic(int type) { switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: return NeomediaActivator.getResources().getI18nMnemonic("impl.media.configform.AUDIO"); case DeviceConfigurationComboBoxModel.VIDEO: return NeomediaActivator.getResources().getI18nMnemonic("impl.media.configform.VIDEO"); default: throw new IllegalArgumentException("type"); } } /** * A label for a type. * * @param type the type. * @return the label. */ private static String getLabelText(int type) { switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO"); case DeviceConfigurationComboBoxModel.AUDIO_CAPTURE: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_IN"); case DeviceConfigurationComboBoxModel.AUDIO_NOTIFY: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_NOTIFY"); case DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_OUT"); case DeviceConfigurationComboBoxModel.VIDEO: return NeomediaActivator.getResources().getI18NString("impl.media.configform.VIDEO"); default: throw new IllegalArgumentException("type"); } } /** * Creates the video advanced settings. * * @return video advanced settings panel. */ private static Component createVideoAdvancedSettings() { ResourceManagementService resources = NeomediaActivator.getResources(); final DeviceConfiguration deviceConfig = mediaService.getDeviceConfiguration(); TransparentPanel centerPanel = new TransparentPanel(new GridBagLayout()); centerPanel.setMaximumSize(new Dimension(WIDTH, 150)); JButton resetDefaultsButton = new JButton(resources.getI18NString("impl.media.configform.VIDEO_RESET")); JPanel resetButtonPanel = new TransparentPanel(new FlowLayout(FlowLayout.RIGHT)); resetButtonPanel.add(resetDefaultsButton); final JPanel centerAdvancedPanel = new TransparentPanel(new BorderLayout()); centerAdvancedPanel.add(centerPanel, BorderLayout.NORTH); centerAdvancedPanel.add(resetButtonPanel, BorderLayout.SOUTH); GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.anchor = GridBagConstraints.NORTHWEST; constraints.insets = new Insets(5, 5, 0, 0); constraints.gridx = 0; constraints.weightx = 0; constraints.weighty = 0; constraints.gridy = 0; centerPanel.add( new JLabel(resources.getI18NString("impl.media.configform.VIDEO_RESOLUTION")), constraints); constraints.gridy = 1; constraints.insets = new Insets(0, 0, 0, 0); final JCheckBox frameRateCheck = new SIPCommCheckBox(resources.getI18NString("impl.media.configform.VIDEO_FRAME_RATE")); centerPanel.add(frameRateCheck, constraints); constraints.gridy = 2; constraints.insets = new Insets(5, 5, 0, 0); centerPanel.add( new JLabel(resources.getI18NString("impl.media.configform.VIDEO_PACKETS_POLICY")), constraints); constraints.weightx = 1; constraints.gridx = 1; constraints.gridy = 0; constraints.insets = new Insets(5, 0, 0, 5); Object[] resolutionValues = new Object[DeviceConfiguration.SUPPORTED_RESOLUTIONS.length + 1]; System.arraycopy( DeviceConfiguration.SUPPORTED_RESOLUTIONS, 0, resolutionValues, 1, DeviceConfiguration.SUPPORTED_RESOLUTIONS.length); final JComboBox sizeCombo = new JComboBox(resolutionValues); sizeCombo.setRenderer(new ResolutionCellRenderer()); sizeCombo.setEditable(false); centerPanel.add(sizeCombo, constraints); // default value is 20 final JSpinner frameRate = new JSpinner(new SpinnerNumberModel(20, 5, 30, 1)); frameRate.addChangeListener( new ChangeListener() { public void stateChanged(ChangeEvent e) { deviceConfig.setFrameRate( ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue()); } }); constraints.gridy = 1; constraints.insets = new Insets(0, 0, 0, 5); centerPanel.add(frameRate, constraints); frameRateCheck.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { if (frameRateCheck.isSelected()) { deviceConfig.setFrameRate( ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue()); } else // unlimited framerate deviceConfig.setFrameRate(-1); frameRate.setEnabled(frameRateCheck.isSelected()); } }); final JSpinner videoMaxBandwidth = new JSpinner( new SpinnerNumberModel(deviceConfig.getVideoMaxBandwidth(), 1, Integer.MAX_VALUE, 1)); videoMaxBandwidth.addChangeListener( new ChangeListener() { public void stateChanged(ChangeEvent e) { deviceConfig.setVideoMaxBandwidth( ((SpinnerNumberModel) videoMaxBandwidth.getModel()).getNumber().intValue()); } }); constraints.gridx = 1; constraints.gridy = 2; constraints.insets = new Insets(0, 0, 5, 5); centerPanel.add(videoMaxBandwidth, constraints); resetDefaultsButton.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { // reset to defaults sizeCombo.setSelectedIndex(0); frameRateCheck.setSelected(false); frameRate.setEnabled(false); frameRate.setValue(20); // unlimited framerate deviceConfig.setFrameRate(-1); videoMaxBandwidth.setValue(DeviceConfiguration.DEFAULT_VIDEO_MAX_BANDWIDTH); } }); // load selected value or auto Dimension videoSize = deviceConfig.getVideoSize(); if ((videoSize.getHeight() != DeviceConfiguration.DEFAULT_VIDEO_HEIGHT) && (videoSize.getWidth() != DeviceConfiguration.DEFAULT_VIDEO_WIDTH)) sizeCombo.setSelectedItem(deviceConfig.getVideoSize()); else sizeCombo.setSelectedIndex(0); sizeCombo.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { Dimension selectedVideoSize = (Dimension) sizeCombo.getSelectedItem(); if (selectedVideoSize == null) { // the auto value, default one selectedVideoSize = new Dimension( DeviceConfiguration.DEFAULT_VIDEO_WIDTH, DeviceConfiguration.DEFAULT_VIDEO_HEIGHT); } deviceConfig.setVideoSize(selectedVideoSize); } }); frameRateCheck.setSelected( deviceConfig.getFrameRate() != DeviceConfiguration.DEFAULT_VIDEO_FRAMERATE); frameRate.setEnabled(frameRateCheck.isSelected()); if (frameRate.isEnabled()) frameRate.setValue(deviceConfig.getFrameRate()); return centerAdvancedPanel; } /** Renders the available resolutions in the combo box. */ private static class ResolutionCellRenderer extends DefaultListCellRenderer { /** * The serialization version number of the <tt>ResolutionCellRenderer</tt> class. Defined to the * value of <tt>0</tt> because the <tt>ResolutionCellRenderer</tt> instances do not have state * of their own. */ private static final long serialVersionUID = 0L; /** * Sets readable text describing the resolution if the selected value is null we return the * string "Auto". * * @param list * @param value * @param index * @param isSelected * @param cellHasFocus * @return Component */ @Override public Component getListCellRendererComponent( JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { // call super to set backgrounds and fonts super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); // now just change the text if (value == null) setText("Auto"); else if (value instanceof Dimension) { Dimension d = (Dimension) value; setText(((int) d.getWidth()) + "x" + ((int) d.getHeight())); } return this; } } }
/** * Discovers and registers DirectShow video capture devices with JMF. * * @author Sebastien Vincent */ public class DirectShowSystem extends DeviceSystem { /** * The <tt>Logger</tt> used by the <tt>DirectShowSystem</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(DirectShowSystem.class); /** The protocol of the <tt>MediaLocator</tt>s identifying QuickTime/QTKit capture devices. */ private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_DIRECTSHOW; /** * Constructor. Discover and register DirectShow capture devices with JMF. * * @throws Exception if anything goes wrong while discovering and registering DirectShow capture * defines with JMF */ public DirectShowSystem() throws Exception { super(MediaType.VIDEO, LOCATOR_PROTOCOL); } protected void doInitialize() throws Exception { DSCaptureDevice devices[] = DSManager.getInstance().getCaptureDevices(); boolean captureDeviceInfoIsAdded = false; for (int i = 0, count = (devices == null) ? 0 : devices.length; i < count; i++) { long pixelFormat = devices[i].getFormat().getPixelFormat(); int ffmpegPixFmt = (int) DataSource.getFFmpegPixFmt(pixelFormat); Format format = null; if (ffmpegPixFmt != FFmpeg.PIX_FMT_NONE) { format = new AVFrameFormat(ffmpegPixFmt, (int) pixelFormat); } else { logger.warn( "No support for this webcam: " + devices[i].getName() + "(format " + pixelFormat + " not supported)"); continue; } if (logger.isInfoEnabled()) { for (DSFormat f : devices[i].getSupportedFormats()) { if (f.getWidth() != 0 && f.getHeight() != 0) logger.info( "Webcam available resolution for " + devices[i].getName() + ":" + f.getWidth() + "x" + f.getHeight()); } } CaptureDeviceInfo device = new CaptureDeviceInfo( devices[i].getName(), new MediaLocator(LOCATOR_PROTOCOL + ':' + devices[i].getName()), new Format[] {format}); if (logger.isInfoEnabled()) logger.info("Found[" + i + "]: " + device.getName()); CaptureDeviceManager.addDevice(device); captureDeviceInfoIsAdded = true; } if (captureDeviceInfoIsAdded && !MediaServiceImpl.isJmfRegistryDisableLoad()) CaptureDeviceManager.commit(); DSManager.dispose(); } }
/** * A <tt>Recorder</tt> implementation which attaches to an <tt>RTPTranslator</tt>. * * @author Vladimir Marinov * @author Boris Grozev */ public class RecorderRtpImpl implements Recorder, ReceiveStreamListener, ActiveSpeakerChangedListener, ControllerListener { /** * The <tt>Logger</tt> used by the <tt>RecorderRtpImpl</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(RecorderRtpImpl.class); // values hard-coded to match chrome // TODO: allow to set them dynamically private static final byte redPayloadType = 116; private static final byte ulpfecPayloadType = 117; private static final byte vp8PayloadType = 100; private static final byte opusPayloadType = 111; private static final Format redFormat = new VideoFormat(Constants.RED); private static final Format ulpfecFormat = new VideoFormat(Constants.ULPFEC); private static final Format vp8RtpFormat = new VideoFormat(Constants.VP8_RTP); private static final Format vp8Format = new VideoFormat(Constants.VP8); private static final Format opusFormat = new AudioFormat(Constants.OPUS_RTP, 48000, Format.NOT_SPECIFIED, Format.NOT_SPECIFIED); private static final int FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE = 300; /** The <tt>ContentDescriptor</tt> to use when saving audio. */ private static final ContentDescriptor AUDIO_CONTENT_DESCRIPTOR = new ContentDescriptor(FileTypeDescriptor.MPEG_AUDIO); /** The suffix for audio file names. */ private static final String AUDIO_FILENAME_SUFFIX = ".mp3"; /** The suffix for video file names. */ private static final String VIDEO_FILENAME_SUFFIX = ".webm"; static { Registry.set("video_jitter_buffer_MIN_SIZE", FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE); } /** The <tt>RTPTranslator</tt> that this recorder is/will be attached to. */ private RTPTranslatorImpl translator; /** * The custom <tt>RTPConnector</tt> that this instance uses to read from {@link #translator} and * write to {@link #rtpManager}. */ private RTPConnectorImpl rtpConnector; /** Path to the directory where the output files will be stored. */ private String path; /** The <tt>RTCPFeedbackMessageSender</tt> that we use to send RTCP FIR messages. */ private RTCPFeedbackMessageSender rtcpFeedbackSender; /** * The {@link RTPManager} instance we use to handle the packets coming from * <tt>RTPTranslator</tt>. */ private RTPManager rtpManager; /** * The instance which should be notified when events related to recordings (such as the start or * end of a recording) occur. */ private RecorderEventHandlerImpl eventHandler; /** * Holds the <tt>ReceiveStreams</tt> added to this instance by {@link #rtpManager} and additional * information associated with each one (e.g. the <tt>Processor</tt>, if any, used for it). */ private final HashSet<ReceiveStreamDesc> receiveStreams = new HashSet<ReceiveStreamDesc>(); private final Set<Long> activeVideoSsrcs = new HashSet<Long>(); /** * The <tt>ActiveSpeakerDetector</tt> which will listen to the audio receive streams of this * <tt>RecorderRtpImpl</tt> and notify it about changes to the active speaker via calls to {@link * #activeSpeakerChanged(long)} */ private ActiveSpeakerDetector activeSpeakerDetector = null; StreamRTPManager streamRTPManager; private SynchronizerImpl synchronizer; private boolean started = false; /** * Constructor. * * @param translator the <tt>RTPTranslator</tt> to which this instance will attach in order to * record media. */ public RecorderRtpImpl(RTPTranslator translator) { this.translator = (RTPTranslatorImpl) translator; activeSpeakerDetector = new ActiveSpeakerDetectorImpl(); activeSpeakerDetector.addActiveSpeakerChangedListener(this); } /** Implements {@link Recorder#addListener(Recorder.Listener)}. */ @Override public void addListener(Listener listener) {} /** Implements {@link Recorder#removeListener(Recorder.Listener)}. */ @Override public void removeListener(Listener listener) {} /** Implements {@link Recorder#getSupportedFormats()}. */ @Override public List<String> getSupportedFormats() { return null; } /** Implements {@link Recorder#setMute(boolean)}. */ @Override public void setMute(boolean mute) {} /** * Implements {@link Recorder#getFilename()}. Returns null, since we don't have a (single) * associated filename. */ @Override public String getFilename() { return null; } /** * Sets the instance which should be notified when events related to recordings (such as the start * or end of a recording) occur. */ public void setEventHandler(RecorderEventHandler eventHandler) { if (this.eventHandler == null || (this.eventHandler != eventHandler && this.eventHandler.handler != eventHandler)) { if (this.eventHandler == null) this.eventHandler = new RecorderEventHandlerImpl(eventHandler); else this.eventHandler.handler = eventHandler; } } /** * {@inheritDoc} * * @param format unused, since this implementation records multiple streams using potentially * different formats. * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the * recorded media files. */ @Override public void start(String format, String dirname) throws IOException, MediaException { if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode()); path = dirname; MediaService mediaService = LibJitsi.getMediaService(); /* * Note that we use only one RTPConnector for both the RTPTranslator * and the RTPManager instances. The this.translator will write to its * output streams, and this.rtpManager will read from its input streams. */ rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType); rtpManager = RTPManager.newInstance(); /* * Add the formats that we know about. */ rtpManager.addFormat(vp8RtpFormat, vp8PayloadType); rtpManager.addFormat(opusFormat, opusPayloadType); rtpManager.addReceiveStreamListener(this); /* * Note: When this.rtpManager sends RTCP sender/receiver reports, they * will end up being written to its own input stream. This is not * expected to cause problems, but might be something to keep an eye on. */ rtpManager.initialize(rtpConnector); /* * Register a fake call participant. * TODO: can we use a more generic MediaStream here? */ streamRTPManager = new StreamRTPManager( mediaService.createMediaStream( new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)), translator); streamRTPManager.initialize(rtpConnector); rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender(); translator.addFormat(streamRTPManager, opusFormat, opusPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat, // redPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat, // ulpfecPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, // mediaFormatImpl.getFormat(), vp8PayloadType); started = true; } @Override public void stop() { if (started) { if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode()); // remove the recorder from the translator (e.g. stop new packets from // being written to rtpConnector if (streamRTPManager != null) streamRTPManager.dispose(); HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>(); synchronized (receiveStreams) { streamsToRemove.addAll(receiveStreams); } for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false); rtpConnector.rtcpPacketTransformer.close(); rtpConnector.rtpPacketTransformer.close(); rtpManager.dispose(); started = false; } } /** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } } private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } } /** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } } /** * Restarts the recording for a specific SSRC. * * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording). */ private void resetRecording(long ssrc, long timestamp) { ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); // we only restart audio recordings if (receiveStream != null && receiveStream.format instanceof AudioFormat) { String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX); // flush the buffer contained in the MP3 encoder String s = "trying to flush ssrc=" + ssrc; Processor p = receiveStream.processor; if (p != null) { s += " p!=null"; for (TrackControl tc : p.getTrackControls()) { Object o = tc.getControl(FlushableControl.class.getName()); if (o != null) ((FlushableControl) o).flush(); } } if (logger.isInfoEnabled()) { logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename); } receiveStream.dataSink.close(); receiveStream.dataSink = null; // flush the FMJ jitter buffer // DataSource ds = receiveStream.receiveStream.getDataSource(); // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource) // ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush(); receiveStream.filename = newFilename; try { receiveStream.dataSink = Manager.createDataSink( receiveStream.dataSource, new MediaLocator("file:" + newFilename)); } catch (NoDataSinkException ndse) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse); removeReceiveStream(receiveStream, false); } try { receiveStream.dataSink.open(); receiveStream.dataSink.start(); } catch (IOException ioe) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe); removeReceiveStream(receiveStream, false); } audioRecordingStarted(ssrc, timestamp); } } private void audioRecordingStarted(long ssrc, long timestamp) { ReceiveStreamDesc desc = findReceiveStream(ssrc); if (desc == null) return; RecorderEvent event = new RecorderEvent(); event.setType(RecorderEvent.Type.RECORDING_STARTED); event.setMediaType(MediaType.AUDIO); event.setSsrc(ssrc); event.setRtpTimestamp(timestamp); event.setFilename(desc.filename); if (eventHandler != null) eventHandler.handleEvent(event); } /** * Handles a request from a specific <tt>DataSink</tt> to request a keyframe by sending an RTCP * feedback FIR message to the media source. * * @param dataSink the <tt>DataSink</tt> which requests that a keyframe be requested with a FIR * message. * @return <tt>true</tt> if a keyframe was successfully requested, <tt>false</tt> otherwise */ private boolean requestFIR(WebmDataSink dataSink) { ReceiveStreamDesc desc = findReceiveStream(dataSink); if (desc != null && rtcpFeedbackSender != null) { return rtcpFeedbackSender.sendFIR((int) desc.ssrc); } return false; } /** * Returns "prefix"+"suffix" if the file with this name does not exist. Otherwise, returns the * first inexistant filename of the form "prefix-"+i+"suffix", for an integer i. i is bounded by * 100 to prevent hanging, and on failure to find an inexistant filename the method will return * null. * * @param prefix * @param suffix * @return */ private String getNextFilename(String prefix, String suffix) { if (!new File(prefix + suffix).exists()) return prefix + suffix; int i = 1; String s; do { s = prefix + "-" + i + suffix; if (!new File(s).exists()) return s; i++; } while (i < 1000); // don't hang indefinitely... return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt> * * @param processor The <tt>Processor</tt> to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(Processor processor) { if (processor == null) return null; synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (processor.equals(r.processor)) return r; } return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt> * * @param dataSink The <tt>DataSink</tt> to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(DataSink dataSink) { if (dataSink == null) return null; synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (dataSink.equals(r.dataSink)) return r; } return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular SSRC. * * @param ssrc The SSRC to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular SSRC, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(long ssrc) { synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (ssrc == r.ssrc) return r; } return null; } /** * Gets the SSRC of a <tt>ReceiveStream</tt> as a (non-negative) <tt>long</tt>. * * <p>FMJ stores the 32-bit SSRC values in <tt>int</tt>s, and the <tt>ReceiveStream.getSSRC()</tt> * implementation(s) don't take care of converting the negative <tt>int</tt> values sometimes * resulting from reading of a 32-bit field into the correct unsigned <tt>long</tt> value. So do * the conversion here. * * @param receiveStream the <tt>ReceiveStream</tt> for which to get the SSRC. * @return the SSRC of <tt>receiveStream</tt> an a (non-negative) <tt>long</tt>. */ private long getReceiveStreamSSRC(ReceiveStream receiveStream) { return 0xffffffffL & receiveStream.getSSRC(); } /** * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed, * and that the new active stream has SSRC <tt>ssrc</tt>. * * @param ssrc the SSRC of the new active stream. */ @Override public void activeSpeakerChanged(long ssrc) { if (eventHandler != null) { RecorderEvent e = new RecorderEvent(); e.setAudioSsrc(ssrc); // TODO: how do we time this? e.setInstant(System.currentTimeMillis()); e.setType(RecorderEvent.Type.SPEAKER_CHANGED); e.setMediaType(MediaType.VIDEO); eventHandler.handleEvent(e); } } private void handleRtpPacket(RawPacket pkt) { if (pkt != null && pkt.getPayloadType() == vp8PayloadType) { int ssrc = pkt.getSSRC(); if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { synchronized (activeVideoSsrcs) { if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { activeVideoSsrcs.add(ssrc & 0xffffffffL); rtcpFeedbackSender.sendFIR(ssrc); } } } } } private void handleRtcpPacket(RawPacket pkt) { getSynchronizer().addRTCPPacket(pkt); eventHandler.nudge(); } public SynchronizerImpl getSynchronizer() { if (synchronizer == null) synchronizer = new SynchronizerImpl(); return synchronizer; } public void setSynchronizer(Synchronizer synchronizer) { if (synchronizer instanceof SynchronizerImpl) { this.synchronizer = (SynchronizerImpl) synchronizer; } } public void connect(Recorder recorder) { if (!(recorder instanceof RecorderRtpImpl)) return; ((RecorderRtpImpl) recorder).setSynchronizer(getSynchronizer()); } private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); } /** The <tt>RTPConnector</tt> implementation used by this <tt>RecorderRtpImpl</tt>. */ private class RTPConnectorImpl implements RTPConnector { private PushSourceStreamImpl controlInputStream; private OutputDataStreamImpl controlOutputStream; private PushSourceStreamImpl dataInputStream; private OutputDataStreamImpl dataOutputStream; private SourceTransferHandler dataTransferHandler; private SourceTransferHandler controlTransferHandler; private RawPacket pendingDataPacket = new RawPacket(); private RawPacket pendingControlPacket = new RawPacket(); private PacketTransformer rtpPacketTransformer = null; private PacketTransformer rtcpPacketTransformer = null; /** The PacketBuffer instance which we use as a jitter buffer. */ private PacketBuffer packetBuffer; private RTPConnectorImpl(byte redPT, byte ulpfecPT) { packetBuffer = new PacketBuffer(); // The chain of transformers will be applied in reverse order for // incoming packets. TransformEngine transformEngine = new TransformEngineChain( new TransformEngine[] { packetBuffer, new TransformEngineImpl(), new CompoundPacketEngine(), new FECTransformEngine(ulpfecPT, (byte) -1), new REDTransformEngine(redPT, (byte) -1) }); rtpPacketTransformer = transformEngine.getRTPTransformer(); rtcpPacketTransformer = transformEngine.getRTCPTransformer(); } private RTPConnectorImpl() {} @Override public void close() { try { if (dataOutputStream != null) dataOutputStream.close(); if (controlOutputStream != null) controlOutputStream.close(); } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } } @Override public PushSourceStream getControlInputStream() throws IOException { if (controlInputStream == null) { controlInputStream = new PushSourceStreamImpl(true); } return controlInputStream; } @Override public OutputDataStream getControlOutputStream() throws IOException { if (controlOutputStream == null) { controlOutputStream = new OutputDataStreamImpl(true); } return controlOutputStream; } @Override public PushSourceStream getDataInputStream() throws IOException { if (dataInputStream == null) { dataInputStream = new PushSourceStreamImpl(false); } return dataInputStream; } @Override public OutputDataStreamImpl getDataOutputStream() throws IOException { if (dataOutputStream == null) { dataOutputStream = new OutputDataStreamImpl(false); } return dataOutputStream; } @Override public double getRTCPBandwidthFraction() { return -1; } @Override public double getRTCPSenderBandwidthFraction() { return -1; } @Override public int getReceiveBufferSize() { // TODO Auto-generated method stub return 0; } @Override public int getSendBufferSize() { // TODO Auto-generated method stub return 0; } @Override public void setReceiveBufferSize(int arg0) throws IOException { // TODO Auto-generated method stub } @Override public void setSendBufferSize(int arg0) throws IOException { // TODO Auto-generated method stub } private class OutputDataStreamImpl implements OutputDataStream { boolean isControlStream; private RawPacket[] rawPacketArray = new RawPacket[1]; public OutputDataStreamImpl(boolean isControlStream) { this.isControlStream = isControlStream; } public int write(byte[] buffer, int offset, int length) { return write(buffer, offset, length, true); } public int write(byte[] buffer, int offset, int length, boolean transform) { RawPacket pkt = rawPacketArray[0]; if (pkt == null) pkt = new RawPacket(); rawPacketArray[0] = pkt; byte[] pktBuf = pkt.getBuffer(); if (pktBuf == null || pktBuf.length < length) { pktBuf = new byte[length]; pkt.setBuffer(pktBuf); } System.arraycopy(buffer, offset, pktBuf, 0, length); pkt.setOffset(0); pkt.setLength(length); if (transform) { PacketTransformer packetTransformer = isControlStream ? rtcpPacketTransformer : rtpPacketTransformer; if (packetTransformer != null) rawPacketArray = packetTransformer.reverseTransform(rawPacketArray); } SourceTransferHandler transferHandler; PushSourceStream pushSourceStream; try { if (isControlStream) { transferHandler = controlTransferHandler; pushSourceStream = getControlInputStream(); } else { transferHandler = dataTransferHandler; pushSourceStream = getDataInputStream(); } } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } for (int i = 0; i < rawPacketArray.length; i++) { RawPacket packet = rawPacketArray[i]; // keep the first element for reuse if (i != 0) rawPacketArray[i] = null; if (packet != null) { if (isControlStream) pendingControlPacket = packet; else pendingDataPacket = packet; if (transferHandler != null) { transferHandler.transferData(pushSourceStream); } } } return length; } public void close() throws IOException {} } /** * A dummy implementation of {@link PushSourceStream}. * * @author Vladimir Marinov */ private class PushSourceStreamImpl implements PushSourceStream { private boolean isControlStream = false; public PushSourceStreamImpl(boolean isControlStream) { this.isControlStream = isControlStream; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public boolean endOfStream() { return false; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public ContentDescriptor getContentDescriptor() { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public long getContentLength() { return 0; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public Object getControl(String arg0) { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public Object[] getControls() { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public int getMinimumTransferSize() { if (isControlStream) { if (pendingControlPacket.getBuffer() != null) { return pendingControlPacket.getLength(); } } else { if (pendingDataPacket.getBuffer() != null) { return pendingDataPacket.getLength(); } } return 0; } @Override public int read(byte[] buffer, int offset, int length) throws IOException { RawPacket pendingPacket; if (isControlStream) { pendingPacket = pendingControlPacket; } else { pendingPacket = pendingDataPacket; } int bytesToRead = 0; byte[] pendingPacketBuffer = pendingPacket.getBuffer(); if (pendingPacketBuffer != null) { int pendingPacketLength = pendingPacket.getLength(); bytesToRead = length > pendingPacketLength ? pendingPacketLength : length; System.arraycopy( pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead); } return bytesToRead; } /** * {@inheritDoc} * * <p>We keep the first non-null <tt>SourceTransferHandler</tt> that was set, because we don't * want it to be overwritten when we initialize a second <tt>RTPManager</tt> with this * <tt>RTPConnector</tt>. * * <p>See {@link RecorderRtpImpl#start(String, String)} */ @Override public void setTransferHandler(SourceTransferHandler transferHandler) { if (isControlStream) { if (RTPConnectorImpl.this.controlTransferHandler == null) { RTPConnectorImpl.this.controlTransferHandler = transferHandler; } } else { if (RTPConnectorImpl.this.dataTransferHandler == null) { RTPConnectorImpl.this.dataTransferHandler = transferHandler; } } } } /** * A transform engine implementation which allows <tt>RecorderRtpImpl</tt> to intercept RTP and * RTCP packets in. */ private class TransformEngineImpl implements TransformEngine { SinglePacketTransformer rtpTransformer = new SinglePacketTransformer() { @Override public RawPacket transform(RawPacket pkt) { return pkt; } @Override public RawPacket reverseTransform(RawPacket pkt) { RecorderRtpImpl.this.handleRtpPacket(pkt); return pkt; } @Override public void close() {} }; SinglePacketTransformer rtcpTransformer = new SinglePacketTransformer() { @Override public RawPacket transform(RawPacket pkt) { return pkt; } @Override public RawPacket reverseTransform(RawPacket pkt) { RecorderRtpImpl.this.handleRtcpPacket(pkt); if (pkt != null && pkt.getRTCPPayloadType() == 203) { // An RTCP BYE packet. Remove the receive stream before // it gets to FMJ, because we want to, for example, // flush the packet buffer before that. long ssrc = pkt.getRTCPSSRC() & 0xffffffffl; if (logger.isInfoEnabled()) logger.info("RTCP BYE for SSRC=" + ssrc); ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); if (receiveStream != null) removeReceiveStream(receiveStream, false); } return pkt; } @Override public void close() {} }; @Override public PacketTransformer getRTPTransformer() { return rtpTransformer; } @Override public PacketTransformer getRTCPTransformer() { return rtcpTransformer; } } } private class RecorderEventHandlerImpl implements RecorderEventHandler { private RecorderEventHandler handler; private final Set<RecorderEvent> pendingEvents = new HashSet<RecorderEvent>(); private RecorderEventHandlerImpl(RecorderEventHandler handler) { this.handler = handler; } @Override public boolean handleEvent(RecorderEvent ev) { if (ev == null) return true; if (RecorderEvent.Type.RECORDING_STARTED.equals(ev.getType())) { long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp()); if (instant != -1) { ev.setInstant(instant); return handler.handleEvent(ev); } else { pendingEvents.add(ev); return true; } } return handler.handleEvent(ev); } private void nudge() { for (Iterator<RecorderEvent> iter = pendingEvents.iterator(); iter.hasNext(); ) { RecorderEvent ev = iter.next(); long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp()); if (instant != -1) { iter.remove(); ev.setInstant(instant); handler.handleEvent(ev); } } } @Override public void close() { for (RecorderEvent ev : pendingEvents) handler.handleEvent(ev); } } /** Represents a <tt>ReceiveStream</tt> for the purposes of this <tt>RecorderRtpImpl</tt>. */ private class ReceiveStreamDesc { /** * The actual <tt>ReceiveStream</tt> which is represented by this <tt>ReceiveStreamDesc</tt>. */ private ReceiveStream receiveStream; /** The SSRC of the stream. */ long ssrc; /** * The <tt>Processor</tt> used to transcode this receive stream into a format appropriate for * saving to a file. */ private Processor processor; /** The <tt>DataSink</tt> which saves the <tt>this.dataSource</tt> to a file. */ private DataSink dataSink; /** * The <tt>DataSource</tt> for this receive stream which is to be saved using a * <tt>DataSink</tt> (i.e. the <tt>DataSource</tt> "after" all needed transcoding is done). */ private DataSource dataSource; /** The name of the file into which this stream is being saved. */ private String filename; /** The (original) format of this receive stream. */ private Format format; /** The <tt>SilenceEffect</tt> used for this stream (for audio streams only). */ private SilenceEffect silenceEffect; private ReceiveStreamDesc(ReceiveStream receiveStream) { this.receiveStream = receiveStream; this.ssrc = getReceiveStreamSSRC(receiveStream); } } }
/** * Implements an audio <tt>CaptureDevice</tt> using {@link AudioRecord}. * * @author Lyubomir Marinov */ public class DataSource extends AbstractPullBufferCaptureDevice { /** * The <tt>Logger</tt> used by the <tt>DataSource</tt> class and its instances for logging output. */ private static final Logger logger = Logger.getLogger(DataSource.class); /** * The priority to be set to the thread executing the {@link AudioRecordStream#read(Buffer)} * method of a given <tt>AudioRecordStream</tt>. */ private static final int THREAD_PRIORITY = Process.THREAD_PRIORITY_URGENT_AUDIO; /** Initializes a new <tt>DataSource</tt> instance. */ public DataSource() {} /** * Initializes a new <tt>DataSource</tt> from a specific <tt>MediaLocator</tt>. * * @param locator the <tt>MediaLocator</tt> to create the new instance from */ public DataSource(MediaLocator locator) { super(locator); } /** * Creates a new <tt>PullBufferStream</tt> which is to be at a specific zero-based index in the * list of streams of this <tt>PullBufferDataSource</tt>. The <tt>Format</tt>-related information * of the new instance is to be abstracted by a specific <tt>FormatControl</tt>. * * @param streamIndex the zero-based index of the <tt>PullBufferStream</tt> in the list of streams * of this <tt>PullBufferDataSource</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance * @return a new <tt>PullBufferStream</tt> which is to be at the specified <tt>streamIndex</tt> in * the list of streams of this <tt>PullBufferDataSource</tt> and which has its * <tt>Format</tt>-related information abstracted by the specified <tt>formatControl</tt> * @see AbstractPullBufferCaptureDevice#createStream(int, FormatControl) */ protected AbstractPullBufferStream createStream(int streamIndex, FormatControl formatControl) { return new AudioRecordStream(this, formatControl); } /** * Opens a connection to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening the connection to the media source * specified by the <tt>MediaLocator</tt> of this <tt>DataSource</tt> * @see AbstractPullBufferCaptureDevice#doConnect() */ @Override protected void doConnect() throws IOException { super.doConnect(); /* * XXX The AudioRecordStream will connect upon start in order to be able * to respect requests to set its format. */ } /** * Closes the connection to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt>. * * @see AbstractPullBufferCaptureDevice#doDisconnect() */ @Override protected void doDisconnect() { synchronized (getStreamSyncRoot()) { Object[] streams = streams(); if (streams != null) for (Object stream : streams) ((AudioRecordStream) stream).disconnect(); } super.doDisconnect(); } /** Sets the priority of the calling thread to {@link #THREAD_PRIORITY}. */ public static void setThreadPriority() { setThreadPriority(THREAD_PRIORITY); } /** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); } /** * Attempts to set the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> of a * <tt>PullBufferStream</tt> at a specific zero-based index in the list of streams of this * <tt>PullBufferDataSource</tt>. The <tt>PullBufferStream</tt> does not exist at the time of the * attempt to set its <tt>Format</tt>. Override the default behavior which is to not attempt to * set the specified <tt>Format</tt> so that they can enable setting the <tt>Format</tt> prior to * creating the <tt>PullBufferStream</tt>. * * @param streamIndex the zero-based index of the <tt>PullBufferStream</tt> the <tt>Format</tt> of * which is to be set * @param oldValue the last-known <tt>Format</tt> for the <tt>PullBufferStream</tt> at the * specified <tt>streamIndex</tt> * @param newValue the <tt>Format</tt> which is to be set * @return the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> of the * <tt>PullBufferStream</tt> at the specified <tt>streamIndex</tt> in the list of streams of * this <tt>PullBufferStream</tt> or <tt>null</tt> if the attempt to set the <tt>Format</tt> * did not success and any last-known <tt>Format</tt> is to be left in effect * @see AbstractPullBufferCaptureDevice#setFormat(int, Format, Format) */ @Override protected Format setFormat(int streamIndex, Format oldValue, Format newValue) { /* * Accept format specifications prior to the initialization of * AudioRecordStream. Afterwards, AudioRecordStream will decide whether * to accept further format specifications. */ return newValue; } /** Implements an audio <tt>PullBufferStream</tt> using {@link AudioRecord}. */ private static class AudioRecordStream extends AbstractPullBufferStream<DataSource> implements AudioEffect.OnEnableStatusChangeListener { /** The <tt>android.media.AudioRecord</tt> which does the actual capturing of audio. */ private AudioRecord audioRecord; /** The <tt>GainControl</tt> through which the volume/gain of captured media is controlled. */ private final GainControl gainControl; /** * The length in bytes of the media data read into a <tt>Buffer</tt> via a call to {@link * #read(Buffer)}. */ private int length; /** * The indicator which determines whether this <tt>AudioRecordStream</tt> is to set the priority * of the thread in which its {@link #read(Buffer)} method is executed. */ private boolean setThreadPriority = true; /** * Initializes a new <tt>OpenSLESStream</tt> instance which is to have its * <tt>Format</tt>-related information abstracted by a specific <tt>FormatControl</tt>. * * @param dataSource the <tt>DataSource</tt> which is creating the new instance so that it * becomes one of its <tt>streams</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance */ public AudioRecordStream(DataSource dataSource, FormatControl formatControl) { super(dataSource, formatControl); MediaServiceImpl mediaServiceImpl = NeomediaActivator.getMediaServiceImpl(); gainControl = (mediaServiceImpl == null) ? null : (GainControl) mediaServiceImpl.getInputVolumeControl(); } /** * Opens a connection to the media source of the associated <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening a connection to the media source of * the associated <tt>DataSource</tt> */ public synchronized void connect() throws IOException { javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) getFormat(); int channels = af.getChannels(); int channelConfig; switch (channels) { case Format.NOT_SPECIFIED: case 1: channelConfig = AudioFormat.CHANNEL_IN_MONO; break; case 2: channelConfig = AudioFormat.CHANNEL_IN_STEREO; break; default: throw new IOException("channels"); } int sampleSizeInBits = af.getSampleSizeInBits(); int audioFormat; switch (sampleSizeInBits) { case 8: audioFormat = AudioFormat.ENCODING_PCM_8BIT; break; case 16: audioFormat = AudioFormat.ENCODING_PCM_16BIT; break; default: throw new IOException("sampleSizeInBits"); } double sampleRate = af.getSampleRate(); length = (int) Math.round( 20 /* milliseconds */ * (sampleRate / 1000) * channels * (sampleSizeInBits / 8)); /* * Apart from the thread in which #read(Buffer) is executed, use the * thread priority for the thread which will create the AudioRecord. */ setThreadPriority(); try { int minBufferSize = AudioRecord.getMinBufferSize((int) sampleRate, channelConfig, audioFormat); audioRecord = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, (int) sampleRate, channelConfig, audioFormat, Math.max(length, minBufferSize)); // tries to configure audio effects if available configureEffects(); } catch (IllegalArgumentException iae) { IOException ioe = new IOException(); ioe.initCause(iae); throw ioe; } setThreadPriority = true; } /** Configures echo cancellation and noise suppression effects. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void configureEffects() { if (!AndroidUtils.hasAPI(16)) return; AudioSystem audioSystem = AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD); // Creates echo canceler if available if (AcousticEchoCanceler.isAvailable()) { AcousticEchoCanceler echoCanceller = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); if (echoCanceller != null) { echoCanceller.setEnableStatusListener(this); echoCanceller.setEnabled(audioSystem.isEchoCancel()); logger.info("Echo cancellation: " + echoCanceller.getEnabled()); } } // Automatic gain control if (AutomaticGainControl.isAvailable()) { AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId()); if (agc != null) { agc.setEnableStatusListener(this); agc.setEnabled(audioSystem.isAutomaticGainControl()); logger.info("Auto gain control: " + agc.getEnabled()); } } // Creates noise suppressor if available if (NoiseSuppressor.isAvailable()) { NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); if (noiseSuppressor != null) { noiseSuppressor.setEnableStatusListener(this); noiseSuppressor.setEnabled(audioSystem.isDenoise()); logger.info("Noise suppressor: " + noiseSuppressor.getEnabled()); } } } /** Closes the connection to the media source of the associated <tt>DataSource</tt>. */ public synchronized void disconnect() { if (audioRecord != null) { audioRecord.release(); audioRecord = null; setThreadPriority = true; } } /** * Attempts to set the <tt>Format</tt> of this <tt>AbstractBufferStream</tt>. * * @param format the <tt>Format</tt> to be set as the format of this * <tt>AbstractBufferStream</tt> * @return the <tt>Format</tt> of this <tt>AbstractBufferStream</tt> or <tt>null</tt> if the * attempt to set the <tt>Format</tt> did not succeed and any last-known <tt>Format</tt> is * to be left in effect * @see AbstractPullBufferStream#doSetFormat(Format) */ @Override protected synchronized Format doSetFormat(Format format) { return (audioRecord == null) ? format : null; } /** * Reads media data from this <tt>PullBufferStream</tt> into a specific <tt>Buffer</tt> with * blocking. * * @param buffer the <tt>Buffer</tt> in which media data is to be read from this * <tt>PullBufferStream</tt> * @throws IOException if anything goes wrong while reading media data from this * <tt>PullBufferStream</tt> into the specified <tt>buffer</tt> * @see javax.media.protocol.PullBufferStream#read(javax.media.Buffer) */ public void read(Buffer buffer) throws IOException { if (setThreadPriority) { setThreadPriority = false; setThreadPriority(); } Object data = buffer.getData(); int length = this.length; if (data instanceof byte[]) { if (((byte[]) data).length < length) data = null; } else data = null; if (data == null) { data = new byte[length]; buffer.setData(data); } int toRead = length; byte[] bytes = (byte[]) data; int offset = 0; buffer.setLength(0); while (toRead > 0) { int read; synchronized (this) { if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) read = audioRecord.read(bytes, offset, toRead); else break; } if (read < 0) { throw new IOException( AudioRecord.class.getName() + "#read(byte[], int, int) returned " + read); } else { buffer.setLength(buffer.getLength() + read); offset += read; toRead -= read; } } buffer.setOffset(0); // Apply software gain. if (gainControl != null) { BasicVolumeControl.applyGain(gainControl, bytes, buffer.getOffset(), buffer.getLength()); } } /** * Starts the transfer of media data from this <tt>AbstractBufferStream</tt>. * * @throws IOException if anything goes wrong while starting the transfer of media data from * this <tt>AbstractBufferStream</tt> * @see AbstractBufferStream#start() */ @Override public void start() throws IOException { /* * Connect upon start because the connect has been delayed to allow * this AudioRecordStream to respect requests to set its format. */ synchronized (this) { if (audioRecord == null) connect(); } super.start(); synchronized (this) { if (audioRecord != null) { setThreadPriority = true; audioRecord.startRecording(); } } } /** * Stops the transfer of media data from this <tt>AbstractBufferStream</tt>. * * @throws IOException if anything goes wrong while stopping the transfer of media data from * this <tt>AbstractBufferStream</tt> * @see AbstractBufferStream#stop() */ @Override public void stop() throws IOException { synchronized (this) { if (audioRecord != null) { audioRecord.stop(); setThreadPriority = true; } } super.stop(); } @Override public void onEnableStatusChange(AudioEffect effect, boolean enabled) { logger.info(effect.getDescriptor() + " : " + enabled); } } }
/** * Implements <tt>MediaFormatFactory</tt> for the JMF <tt>Format</tt> types. * * @author Lyubomir Marinov */ public class MediaFormatFactoryImpl implements MediaFormatFactory { /** * The <tt>Logger</tt> used by the <tt>MediaFormatFactoryImpl</tt> class and its instances for * logging output. */ private static final Logger logger = Logger.getLogger(MediaFormatFactoryImpl.class); /** * Creates an unknown <tt>MediaFormat</tt>. * * @param type <tt>MediaType</tt> * @return unknown <tt>MediaFormat</tt> */ public MediaFormat createUnknownMediaFormat(MediaType type) { Format unknown = null; /* * FIXME Why is a VideoFormat instance created for MediaType.AUDIO and * an AudioFormat instance for MediaType.VIDEO? */ if (type.equals(MediaType.AUDIO)) unknown = new VideoFormat("unknown"); else if (type.equals(MediaType.VIDEO)) unknown = new AudioFormat("unknown"); return MediaFormatImpl.createInstance(unknown); } /** * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt> with default clock rate and * set of format parameters. If <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>, * returns a <tt>MediaFormat</tt> which is either an <tt>AudioMediaFormat</tt> or a * <tt>VideoMediaFormat</tt> instance. Otherwise, returns <tt>null</tt>. * * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is * known to this <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt> * @see MediaFormatFactory#createMediaFormat(String) */ public MediaFormat createMediaFormat(String encoding) { return createMediaFormat(encoding, CLOCK_RATE_NOT_SPECIFIED); } /** * Creates a <tt>MediaFormat</tt> for the specified RTP payload type with default clock rate and * set of format parameters. If <tt>rtpPayloadType</tt> is known to this * <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns * <tt>null</tt>. * * @param rtpPayloadType the RTP payload type of the <tt>MediaFormat</tt> to create * @return a <tt>MediaFormat</tt> with the specified <tt>rtpPayloadType</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if * <tt>rtpPayloadType</tt> is known to this <tt>MediaFormatFactory</tt>; otherwise, * <tt>null</tt> * @see MediaFormatFactory#createMediaFormat(byte) */ public MediaFormat createMediaFormat(byte rtpPayloadType) { /* * We know which are the MediaFormat instances with the specified * rtpPayloadType but we cannot directly return them because they do not * reflect the user's configuration with respect to being enabled and * disabled. */ for (MediaFormat rtpPayloadTypeMediaFormat : MediaUtils.getMediaFormats(rtpPayloadType)) { MediaFormat mediaFormat = createMediaFormat( rtpPayloadTypeMediaFormat.getEncoding(), rtpPayloadTypeMediaFormat.getClockRate()); if (mediaFormat != null) return mediaFormat; } return null; } /** * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt> with the specified * <tt>clockRate</tt> and a default set of format parameters. If <tt>encoding</tt> is known to * this <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns * <tt>null</tt>. * * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt> and <tt>clockRate</tt> * which is either an <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if * <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt> * @see MediaFormatFactory#createMediaFormat(String, double) */ public MediaFormat createMediaFormat(String encoding, double clockRate) { return createMediaFormat(encoding, clockRate, 1); } /** * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt>, <tt>clockRate</tt> and * <tt>channels</tt> and a default set of format parameters. If <tt>encoding</tt> is known to this * <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns * <tt>null</tt>. * * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for * @param channels the number of available channels (1 for mono, 2 for stereo) if it makes sense * for the <tt>MediaFormat</tt> with the specified <tt>encoding</tt>; otherwise, ignored * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt> and * <tt>channels</tt> and a default set of format parameters which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is * known to this <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt> * @see MediaFormatFactory#createMediaFormat(String, double, int) */ public MediaFormat createMediaFormat(String encoding, double clockRate, int channels) { return createMediaFormat(encoding, clockRate, channels, null); } private MediaFormat createMediaFormat( String encoding, double clockRate, int channels, Map<String, String> fmtps) { for (MediaFormat format : getSupportedMediaFormats(encoding, clockRate)) { /* * The mediaType, encoding and clockRate properties are sure to * match because format is the result of the search for encoding and * clockRate. We just want to make sure that the channels and the * format parameters match. */ if (format.matches( format.getMediaType(), format.getEncoding(), format.getClockRate(), channels, fmtps)) return format; } return null; } /** * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt>, <tt>clockRate</tt> and set * of format parameters. If <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>, * returns a <tt>MediaFormat</tt> which is either an <tt>AudioMediaFormat</tt> or a * <tt>VideoMediaFormat</tt> instance. Otherwise, returns <tt>null</tt>. * * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for * @param formatParams any codec specific parameters which have been received via SIP/SDP or * XMPP/Jingle * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt> and set * of format parameters which is either an <tt>AudioMediaFormat</tt> or a * <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is known to this * <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt> * @see MediaFormatFactory#createMediaFormat(String, double, Map, Map) */ public MediaFormat createMediaFormat( String encoding, double clockRate, Map<String, String> formatParams, Map<String, String> advancedParams) { return createMediaFormat(encoding, clockRate, 1, -1, formatParams, advancedParams); } /** * Creates a <tt>MediaFormat</tt> for the specified <tt>encoding</tt>, <tt>clockRate</tt>, * <tt>channels</tt> and set of format parameters. If <tt>encoding</tt> is known to this * <tt>MediaFormatFactory</tt>, returns a <tt>MediaFormat</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. Otherwise, returns * <tt>null</tt>. * * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for * @param frameRate the frame rate in number of frames per second to create a <tt>MediaFormat</tt> * for * @param channels the number of available channels (1 for mono, 2 for stereo) if it makes sense * for the <tt>MediaFormat</tt> with the specified <tt>encoding</tt>; otherwise, ignored * @param formatParams any codec specific parameters which have been received via SIP/SDP or * XMPP/Jingle * @param advancedParams any parameters which have been received via SIP/SDP or XMPP/Jingle * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt>, * <tt>channels</tt> and set of format parameters which is either an <tt>AudioMediaFormat</tt> * or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is known to this * <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt> * @see MediaFormatFactory#createMediaFormat(String, double, int, float, Map, Map) */ public MediaFormat createMediaFormat( String encoding, double clockRate, int channels, float frameRate, Map<String, String> formatParams, Map<String, String> advancedParams) { MediaFormat mediaFormat = createMediaFormat(encoding, clockRate, channels, formatParams); if (mediaFormat == null) return null; /* * MediaFormatImpl is immutable so if the caller wants to change the * format parameters and/or the advanced attributes, we'll have to * create a new MediaFormatImpl. */ Map<String, String> formatParameters = null; Map<String, String> advancedParameters = null; if ((formatParams != null) && !formatParams.isEmpty()) formatParameters = formatParams; if ((advancedParams != null) && !advancedParams.isEmpty()) advancedParameters = advancedParams; if ((formatParameters != null) || (advancedParameters != null)) { switch (mediaFormat.getMediaType()) { case AUDIO: mediaFormat = new AudioMediaFormatImpl( ((AudioMediaFormatImpl) mediaFormat).getFormat(), formatParameters, advancedParameters); break; case VIDEO: VideoMediaFormatImpl videoMediaFormatImpl = (VideoMediaFormatImpl) mediaFormat; /* * If the format of VideoMediaFormatImpl is * a ParameterizedVideoFormat, it's possible for the format * parameters of that ParameterizedVideoFormat and of the new * VideoMediaFormatImpl (to be created) to be out of sync. While * it's not technically perfect, it should be practically safe * for the format parameters which distinguish VideoFormats with * the same encoding and clock rate because mediaFormat has * already been created in sync with formatParams (with respect * to the format parameters which distinguish VideoFormats with * the same encoding and clock rate). */ mediaFormat = new VideoMediaFormatImpl( videoMediaFormatImpl.getFormat(), videoMediaFormatImpl.getClockRate(), frameRate, formatParameters, advancedParameters); break; default: mediaFormat = null; } } return mediaFormat; } /** * Creates a <tt>MediaFormat</tt> either for the specified <tt>rtpPayloadType</tt> or for the * specified <tt>encoding</tt>, <tt>clockRate</tt>, <tt>channels</tt> and set of format * parameters. If <tt>encoding</tt> is known to this <tt>MediaFormatFactory</tt>, ignores * <tt>rtpPayloadType</tt> and returns a <tt>MediaFormat</tt> which is either an * <tt>AudioMediaFormat</tt> or a <tt>VideoMediaFormat</tt> instance. If <tt>rtpPayloadType</tt> * is not {@link MediaFormat#RTP_PAYLOAD_TYPE_UNKNOWN} and <tt>encoding</tt> is <tt>null</tt>, * uses the encoding associated with <tt>rtpPayloadType</tt>. * * @param rtpPayloadType the RTP payload type to create a <tt>MediaFormat</tt> for; {@link * MediaFormat#RTP_PAYLOAD_TYPE_UNKNOWN} if <tt>encoding</tt> is not <tt>null</tt>. If * <tt>rtpPayloadType</tt> is not <tt>MediaFormat#RTP_PAYLOAD_TYPE_UNKNOWN</tt> and * <tt>encoding</tt> is not <tt>null</tt>, <tt>rtpPayloadType</tt> is ignored * @param encoding the well-known encoding (name) to create a <tt>MediaFormat</tt> for; * <tt>null</tt> * @param clockRate the clock rate in Hz to create a <tt>MediaFormat</tt> for * @param frameRate the frame rate in number of frames per second to create a <tt>MediaFormat</tt> * for * @param channels the number of available channels (1 for mono, 2 for stereo) if it makes sense * for the <tt>MediaFormat</tt> with the specified <tt>encoding</tt>; otherwise, ignored * @param formatParams any codec specific parameters which have been received via SIP/SDP or * XMPP/Jingle * @param advancedParams any parameters which have been received via SIP/SDP or XMPP/Jingle * @return a <tt>MediaFormat</tt> with the specified <tt>encoding</tt>, <tt>clockRate</tt>, * <tt>channels</tt> and set of format parameters which is either an <tt>AudioMediaFormat</tt> * or a <tt>VideoMediaFormat</tt> instance if <tt>encoding</tt> is known to this * <tt>MediaFormatFactory</tt>; otherwise, <tt>null</tt> */ public MediaFormat createMediaFormat( byte rtpPayloadType, String encoding, double clockRate, int channels, float frameRate, Map<String, String> formatParams, Map<String, String> advancedParams) { /* * If rtpPayloadType is specified, use it only to figure out encoding * and/or clockRate in case either one of them is unknown. */ if ((MediaFormat.RTP_PAYLOAD_TYPE_UNKNOWN != rtpPayloadType) && ((encoding == null) || (CLOCK_RATE_NOT_SPECIFIED == clockRate))) { MediaFormat[] rtpPayloadTypeMediaFormats = MediaUtils.getMediaFormats(rtpPayloadType); if (rtpPayloadTypeMediaFormats.length > 0) { if (encoding == null) encoding = rtpPayloadTypeMediaFormats[0].getEncoding(); // Assign or check the clock rate. if (CLOCK_RATE_NOT_SPECIFIED == clockRate) clockRate = rtpPayloadTypeMediaFormats[0].getClockRate(); else { boolean clockRateIsValid = false; for (MediaFormat rtpPayloadTypeMediaFormat : rtpPayloadTypeMediaFormats) if (rtpPayloadTypeMediaFormat.getEncoding().equals(encoding) && (rtpPayloadTypeMediaFormat.getClockRate() == clockRate)) { clockRateIsValid = true; break; } if (!clockRateIsValid) return null; } } } return createMediaFormat( encoding, clockRate, channels, frameRate, formatParams, advancedParams); } /** * Gets the <tt>MediaFormat</tt>s among the specified <tt>mediaFormats</tt> which have the * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt>. * * @param mediaFormats the <tt>MediaFormat</tt>s from which to filter out only the ones which have * the specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt> * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link * #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s among <tt>mediaFormats</tt> which have the * specified <tt>encoding</tt> and, optionally, <tt>clockRate</tt> */ private List<MediaFormat> getMatchingMediaFormats( MediaFormat[] mediaFormats, String encoding, double clockRate) { /* * XXX Use String#equalsIgnoreCase(String) because some clients transmit * some of the codecs starting with capital letters. */ /* * As per RFC 3551.4.5.2, because of a mistake in RFC 1890 and for * backward compatibility, G.722 should always be announced as 8000 even * though it is wideband. So, if someone is looking for G722/16000, * then: Forgive them, for they know not what they do! */ if ("G722".equalsIgnoreCase(encoding) && (16000 == clockRate)) { clockRate = 8000; if (logger.isInfoEnabled()) logger.info("Suppressing erroneous 16000 announcement for G.722"); } List<MediaFormat> supportedMediaFormats = new ArrayList<MediaFormat>(); for (MediaFormat mediaFormat : mediaFormats) { if (mediaFormat.getEncoding().equalsIgnoreCase(encoding) && ((CLOCK_RATE_NOT_SPECIFIED == clockRate) || (mediaFormat.getClockRate() == clockRate))) { supportedMediaFormats.add(mediaFormat); } } return supportedMediaFormats; } /** * Gets the <tt>MediaFormat</tt>s supported by this <tt>MediaFormatFactory</tt> and the * <tt>MediaService</tt> associated with it and having the specified <tt>encoding</tt> and, * optionally, <tt>clockRate</tt>. * * @param encoding the well-known encoding (name) of the <tt>MediaFormat</tt>s to be retrieved * @param clockRate the clock rate of the <tt>MediaFormat</tt>s to be retrieved; {@link * #CLOCK_RATE_NOT_SPECIFIED} if any clock rate is acceptable * @return a <tt>List</tt> of the <tt>MediaFormat</tt>s supported by the <tt>MediaService</tt> * associated with this <tt>MediaFormatFactory</tt> and having the specified encoding and, * optionally, clock rate */ private List<MediaFormat> getSupportedMediaFormats(String encoding, double clockRate) { EncodingConfiguration encodingConfiguration = NeomediaServiceUtils.getMediaServiceImpl().getCurrentEncodingConfiguration(); List<MediaFormat> supportedMediaFormats = getMatchingMediaFormats( encodingConfiguration.getAllEncodings(MediaType.AUDIO), encoding, clockRate); if (supportedMediaFormats.isEmpty()) supportedMediaFormats = getMatchingMediaFormats( encodingConfiguration.getAllEncodings(MediaType.VIDEO), encoding, clockRate); return supportedMediaFormats; } }
public class h4Jmf extends JPanel implements h4JmfActions, EBComponent { private static final Logger logger = Logger.getLogger(h4Jmf.class.getName()); private static volatile int count = 0; private View view; // private URL url; // private MediaLocator mediaLocator; // private DataSource ds=null; // no more commands if command_busy set!!! nog doen??? // private Time duration=null; // private Calendar clndr=Calendar.getInstance(); // boolean jmf_ok=false; // this is really controlled by h4JmfPlugin having references to JMF-classes. if JMF is not // installed , plugin will not load public h4Jmf() { logger.info("no-args constructor"); } // public h4Jmf(View view, String position) { super(); count++; logger.info("args constructor " + count); this.setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); // logger.info("view="+view+" position="+position); this.view = view; // position.equals(DockableWindowManager.FLOATING); // cnsl=new Console(100); h4JmfPlugin.cnsl.setPreferredSize(new Dimension(500, 200)); JScrollPane jscrllpn = new JScrollPane(h4JmfPlugin.cnsl); // jscrllpn.setPreferredSize(new Dimension(500,200)); // add(BorderLayout.SOUTH,jscrllpn); add(jscrllpn); if (h4JmfPlugin.playMP3 != null) { java.awt.Component cpc = h4JmfPlugin.playMP3.getControlPanelComponent(); // logger.info("cpc="+cpc); if (cpc != null) // after panel has been closed, new instance !! { cpc.setPreferredSize(new Dimension(500, 50)); this.add(cpc); } } this.setPreferredSize(new Dimension(500, 300)); // cnsl.append("begin"); // cnsl.append("SettingsDirectory="+jEdit.getSettingsDirectory()); h4JmfPlugin.cnsl.append("args constructor " + count); // EditBus.addToBus(this);//moved to notify /** * *********************** //check a few jmf-classes begin done in h4JmfPlugin Class tst=null; * try { tst=Class.forName("com.sun.media.codec.audio.mp3.JavaDecoder"); jmf_ok=true; * cnsl.append("jmf Java Media Framework seems to be installed."); } catch(Exception excptn) { * cnsl.append("trying presence of jmf"); cnsl.append(excptn); cnsl.append("jmf Java Media * Framework does not seem to be installed!"); cnsl.append("see README for more info"); } * //check a few jmf-classes end ** */ if (!h4JmfPlugin.jmf_ok) { h4JmfPlugin.cnsl.append("JMF Java Media Framework does not seem to be installed!"); h4JmfPlugin.cnsl.append("see README for more info"); } logger.info("constructor end"); } // constructor public void chooseFile() { if (!h4JmfPlugin.jmf_ok) return; if (h4JmfPlugin.playMP3 != null) { h4JmfPlugin.cnsl.append("playMP3!=null"); return; } String tmpdir = System.getProperty("java.io.tmpdir"); String[] paths = GUIUtilities.showVFSFileDialog( view, tmpdir + File.separator, JFileChooser.OPEN_DIALOG, false); // if(paths!=null && !paths[0].equals(filename)) if (paths != null) { String filename = paths[0]; h4JmfPlugin.cnsl.append("filename=" + filename); try { URL url = new URL("file://" + filename); player_begin(url); } catch (Exception e) { logger.severe(e.getMessage()); h4JmfPlugin.cnsl.append(e); } } } // chooseFile public void player_begin(URL url) { if (h4JmfPlugin.playMP3 != null) { logger.severe("playMP3!=null"); h4JmfPlugin.cnsl.append("playMP3!=null"); return; } if (url == null) { h4JmfPlugin.cnsl.append("url==null"); return; } MediaLocator mediaLocator = new MediaLocator(url); try { // final JPanel jpnl_this=this; DataSource ds = Manager.createDataSource(mediaLocator); // cnsl.append("ds="+ds); h4JmfPlugin.playMP3 = Manager.createPlayer(ds); /** * ********************************************************** ControllerListener moved to * outer class addControllerListener done in h4JmfPlugin ********************************** */ h4JmfPlugin.player_begin(); } catch (Exception e) { logger.severe(e.getMessage()); h4JmfPlugin.cnsl.append(e); return; } // h4JmfPlugin.playMP3.realize(); // logger.info("after realize()"); // but possible [JMF thread: com.sun.media.PlaybackEngine@1ac13d7[ // com.sun.media.PlaybackEngine@1ac13d7 ] ( realizeThread)] [error] PlaybackEngine@1ac13d7 ] ( // realizeThread): Unable to handle format: mpeglayer3, 16000.0 Hz, 16-bit, Mono, // LittleEndian, Signed, 2000.0 frame rate, FrameSize=16384 bits // running tshvr under hedwig :11:17:08 PM [JMF thread: // com.sun.media.content.unknown.Handler@8c7be5 ( prefetchThread)] [error] Handler@8c7be5 ( // prefetchThread): Error: Unable to prefetch com.sun.media.PlaybackEngine@6d3b92 } // player_begin public void player_close() { h4JmfPlugin.cnsl.append("trying to close()"); if (h4JmfPlugin.playMP3 != null) { java.awt.Component cpc = h4JmfPlugin.playMP3.getControlPanelComponent(); // nog doen static h4Jmf.player_close() !!!! playMP3 will be set to null there !!! // h4JmfPlugin.playMP3.close(); h4JmfPlugin.cnsl.append("close()"); if (cpc != null) // after panel has been closed, new instance !! { this.remove(cpc); } h4JmfPlugin.player_close(); } // cpc=null; // h4JmfPlugin.playMP3=null; } // // methods start,stop,forward,rewind,insert-time,position moved as static to h4JmfPlugin // kleine delay inbouwen forward,rewind ???,sometimes player does not respind well ?? why?? // EBComponent interface public void handleMessage(EBMessage message) { logger.info(message.toString()); if (message instanceof PropertiesChanged) { // propertiesChanged(); } // AWT-EventQueue-0: INFO: EditorExiting[source=null] else if (message instanceof org.gjt.sp.jedit.msg.PluginUpdate) { Object what = ((PluginUpdate) message).getWhat(); if (what.equals(PluginUpdate.DEACTIVATED)) { // if(h4JmfPlugin.playMP3!=null) h4JmfPlugin.playMP3.close(); // h4JmfPlugin.playMP3=null; logger.info("close done in h4JmfPlugin"); } } // PluginUpdate } // handleMessage // JComponent methods /** **if using these without call to super,view empty! no cnsl, no cpc */ public void addNotify() { logger.info(""); super.addNotify(); EditBus.addToBus(this); } public void removeNotify() { logger.info(""); super.removeNotify(); EditBus.removeFromBus(this); } // JComponent methods end } // h4Jmf
/** * @author Bing SU ([email protected]) * @author Lyubomir Marinov * @author Boris Grozev */ public abstract class RTPConnectorInputStream implements PushSourceStream, Runnable { /** * The value of the property <tt>controls</tt> of <tt>RTPConnectorInputStream</tt> when there are * no controls. Explicitly defined in order to reduce unnecessary allocations. */ private static final Object[] EMPTY_CONTROLS = new Object[0]; /** * The length in bytes of the buffers of <tt>RTPConnectorInputStream</tt> receiving packets from * the network. */ public static final int PACKET_RECEIVE_BUFFER_LENGTH = 4 * 1024; /** * The <tt>Logger</tt> used by the <tt>RTPConnectorInputStream</tt> class and its instances to * print debug information. */ private static final Logger logger = Logger.getLogger(RTPConnectorInputStream.class); /** Packet receive buffer */ private final byte[] buffer = new byte[PACKET_RECEIVE_BUFFER_LENGTH]; /** Whether this stream is closed. Used to control the termination of worker thread. */ protected boolean closed; public Participant videoRecorder; /** * The <tt>DatagramPacketFilter</tt>s which allow dropping <tt>DatagramPacket</tt>s before they * are converted into <tt>RawPacket</tt>s. */ private DatagramPacketFilter[] datagramPacketFilters; /** Caught an IO exception during read from socket */ protected boolean ioError = false; /** * The packet data to be read out of this instance through its {@link #read(byte[], int, int)} * method. */ private RawPacket pkt; /** The <tt>Object</tt> which synchronizes the access to {@link #pkt}. */ private final Object pktSyncRoot = new Object(); /** The adapter of this <tt>PushSourceStream</tt> to the <tt>PushBufferStream</tt> interface. */ private final PushBufferStream pushBufferStream; /** * The pool of <tt>RawPacket[]</tt> instances to reduce their allocations and garbage collection. * Contains arrays full of <tt>null</tt>. */ private final Queue<RawPacket[]> rawPacketArrayPool = new LinkedBlockingQueue<RawPacket[]>(); /** * The pool of <tt>RawPacket</tt> instances to reduce their allocations and garbage collection. */ private final Queue<RawPacket> rawPacketPool = new LinkedBlockingQueue<RawPacket>(); /** The Thread receiving packets. */ protected Thread receiverThread = null; /** SourceTransferHandler object which is used to read packets. */ private SourceTransferHandler transferHandler; /** * Whether this <tt>RTPConnectorInputStream</tt> is enabled or disabled. While disabled, the * stream does not accept any packets. */ private boolean enabled = true; /** * Initializes a new <tt>RTPConnectorInputStream</tt> which is to receive packet data from a * specific UDP socket. */ public RTPConnectorInputStream() { // PacketLoggingService addDatagramPacketFilter( new DatagramPacketFilter() { /** * Used for debugging. As we don't log every packet, we must count them and decide which * to log. */ private long numberOfPackets = 0; public boolean accept(DatagramPacket p) { numberOfPackets++; if (RTPConnectorOutputStream.logPacket(numberOfPackets)) { PacketLoggingService packetLogging = LibJitsi.getPacketLoggingService(); if ((packetLogging != null) && packetLogging.isLoggingEnabled(PacketLoggingService.ProtocolName.RTP)) doLogPacket(p); } return true; } }); /* * Adapt this PushSourceStream to the PushBufferStream interface in * order to make it possible to read the Buffer flags of RawPacket. */ pushBufferStream = new PushBufferStreamAdapter(this, null) { @Override protected int doRead(Buffer buffer, byte[] data, int offset, int length) throws IOException { return RTPConnectorInputStream.this.read(buffer, data, offset, length); } }; } /** Close this stream, stops the worker thread. */ public synchronized void close() {} /** * Creates a new <tt>RawPacket</tt> from a specific <tt>DatagramPacket</tt> in order to have this * instance receive its packet data through its {@link #read(byte[], int, int)} method. Returns an * array of <tt>RawPacket</tt> with the created packet as its first element (and <tt>null</tt> for * the other elements). * * <p>Allows extenders to intercept the packet data and possibly filter and/or modify it. * * @param datagramPacket the <tt>DatagramPacket</tt> containing the packet data * @return an array of <tt>RawPacket</tt> containing the <tt>RawPacket</tt> which contains the * packet data of the specified <tt>DatagramPacket</tt> as its first element. */ protected RawPacket[] createRawPacket(DatagramPacket datagramPacket) { RawPacket[] pkts = rawPacketArrayPool.poll(); if (pkts == null) pkts = new RawPacket[1]; RawPacket pkt = rawPacketPool.poll(); if (pkt == null) pkt = new RawPacket(); pkt.setBuffer(datagramPacket.getData()); pkt.setFlags(0); pkt.setLength(datagramPacket.getLength()); pkt.setOffset(datagramPacket.getOffset()); pkts[0] = pkt; return pkts; } /** * Provides a dummy implementation to {@link RTPConnectorInputStream#endOfStream()} that always * returns <tt>false</tt>. * * @return <tt>false</tt>, no matter what. */ public boolean endOfStream() { return false; } /** * Provides a dummy implementation to {@link RTPConnectorInputStream#getContentDescriptor()} that * always returns <tt>null</tt>. * * @return <tt>null</tt>, no matter what. */ public ContentDescriptor getContentDescriptor() { return null; } /** * Provides a dummy implementation to {@link RTPConnectorInputStream#getContentLength()} that * always returns <tt>LENGTH_UNKNOWN</tt>. * * @return <tt>LENGTH_UNKNOWN</tt>, no matter what. */ public long getContentLength() { return LENGTH_UNKNOWN; } /** * Provides a dummy implementation of {@link RTPConnectorInputStream#getControl(String)} that * always returns <tt>null</tt>. * * @param controlType ignored. * @return <tt>null</tt>, no matter what. */ public Object getControl(String controlType) { if (PushBufferStream.class.getName().equals(controlType)) return pushBufferStream; else return null; } /** * Provides a dummy implementation of {@link RTPConnectorInputStream#getControls()} that always * returns <tt>EMPTY_CONTROLS</tt>. * * @return <tt>EMPTY_CONTROLS</tt>, no matter what. */ public Object[] getControls() { return EMPTY_CONTROLS; } /** * Provides a dummy implementation to {@link RTPConnectorInputStream#getMinimumTransferSize()} * that always returns <tt>2 * 1024</tt>. * * @return <tt>2 * 1024</tt>, no matter what. */ public int getMinimumTransferSize() { return 2 * 1024; // twice the MTU size, just to be safe. } /** * Pools the specified <tt>RawPacket</tt> in order to avoid future allocations and to reduce the * effects of garbage collection. * * @param pkt the <tt>RawPacket</tt> to be offered to {@link #rawPacketPool} */ private void poolRawPacket(RawPacket pkt) { pkt.setBuffer(null); pkt.setFlags(0); pkt.setLength(0); pkt.setOffset(0); rawPacketPool.offer(pkt); } /** * Copies the content of the most recently received packet into <tt>buffer</tt>. * * @param buffer the <tt>byte[]</tt> that we'd like to copy the content of the packet to. * @param offset the position where we are supposed to start writing in <tt>buffer</tt>. * @param length the number of <tt>byte</tt>s available for writing in <tt>buffer</tt>. * @return the number of bytes read * @throws IOException if <tt>length</tt> is less than the size of the packet. */ public int read(byte[] buffer, int offset, int length) throws IOException { return read(null, buffer, offset, length); } /** * Copies the content of the most recently received packet into <tt>data</tt>. * * @param buffer an optional <tt>Buffer</tt> instance associated with the specified <tt>data</tt>, * <tt>offset</tt> and <tt>length</tt> and provided to the method in case the implementation * would like to provide additional <tt>Buffer</tt> properties such as <tt>flags</tt> * @param data the <tt>byte[]</tt> that we'd like to copy the content of the packet to. * @param offset the position where we are supposed to start writing in <tt>data</tt>. * @param length the number of <tt>byte</tt>s available for writing in <tt>data</tt>. * @return the number of bytes read * @throws IOException if <tt>length</tt> is less than the size of the packet. */ protected int read(Buffer buffer, byte[] data, int offset, int length) throws IOException { if (data == null) throw new NullPointerException("data"); if (ioError) return -1; RawPacket pkt; synchronized (pktSyncRoot) { pkt = this.pkt; this.pkt = null; } int pktLength; if (pkt == null) { pktLength = 0; } else { // By default, pkt will be returned to the pool after it was read. boolean poolPkt = true; try { pktLength = pkt.getLength(); if (length < pktLength) { /* * If pkt is still the latest RawPacket made available to * reading, reinstate it for the next invocation of read; * otherwise, return it to the pool. */ poolPkt = false; throw new IOException("Input buffer not big enough for " + pktLength); } else { byte[] pktBuffer = pkt.getBuffer(); if (pktBuffer == null) { throw new NullPointerException( "pkt.buffer null, pkt.length " + pktLength + ", pkt.offset " + pkt.getOffset()); } else { System.arraycopy(pkt.getBuffer(), pkt.getOffset(), data, offset, pktLength); if (buffer != null) buffer.setFlags(pkt.getFlags()); } } } finally { if (!poolPkt) { synchronized (pktSyncRoot) { if (this.pkt == null) this.pkt = pkt; else poolPkt = true; } } if (poolPkt) { // Return pkt to the pool because it was successfully read. poolRawPacket(pkt); } } } return pktLength; } /** * Log the packet. * * @param packet packet to log */ protected abstract void doLogPacket(DatagramPacket packet); /** * Receive packet. * * @param p packet for receiving * @throws IOException if something goes wrong during receiving */ protected abstract void receivePacket(DatagramPacket p) throws IOException; /** * Listens for incoming datagrams, stores them for reading by the <tt>read</tt> method and * notifies the local <tt>transferHandler</tt> that there's data to be read. */ public void run() { DatagramPacket p = new DatagramPacket(buffer, 0, PACKET_RECEIVE_BUFFER_LENGTH); while (!closed) { try { // http://code.google.com/p/android/issues/detail?id=24765 if (OSUtils.IS_ANDROID) p.setLength(PACKET_RECEIVE_BUFFER_LENGTH); receivePacket(p); } catch (IOException e) { ioError = true; break; } /* * Do the DatagramPacketFilters accept the received DatagramPacket? */ DatagramPacketFilter[] datagramPacketFilters = getDatagramPacketFilters(); boolean accept; if (!enabled) accept = false; else if (datagramPacketFilters == null) accept = true; else { accept = true; for (int i = 0; i < datagramPacketFilters.length; i++) { try { if (!datagramPacketFilters[i].accept(p)) { accept = false; break; } } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; } } } if (accept) { RawPacket pkts[] = createRawPacket(p); for (int i = 0; i < pkts.length; i++) { RawPacket pkt = pkts[i]; pkts[i] = null; if (pkt != null) { if (pkt.isInvalid()) { /* * Return pkt to the pool because it is invalid and, * consequently, will not be made available to * reading. */ poolRawPacket(pkt); } else { RawPacket oldPkt; synchronized (pktSyncRoot) { oldPkt = this.pkt; this.pkt = pkt; } if (oldPkt != null) { /* * Return oldPkt to the pool because it was made * available to reading and it was not read. */ poolRawPacket(oldPkt); } if (videoRecorder != null) videoRecorder.recordData(pkt); if ((transferHandler != null) && !closed) { try { transferHandler.transferData(this); } catch (Throwable t) { /* * XXX We cannot allow transferHandler to * kill us. */ if (t instanceof ThreadDeath) { throw (ThreadDeath) t; } else { logger.warn("An RTP packet may have not been" + " fully handled.", t); } } } } } } rawPacketArrayPool.offer(pkts); } } } /** * Sets the <tt>transferHandler</tt> that this connector should be notifying when new data is * available for reading. * * @param transferHandler the <tt>transferHandler</tt> that this connector should be notifying * when new data is available for reading. */ public void setTransferHandler(SourceTransferHandler transferHandler) { if (!closed) this.transferHandler = transferHandler; } /** * Changes current thread priority. * * @param priority the new priority. */ public void setPriority(int priority) { // currently no priority is set // if (receiverThread != null) // receiverThread.setPriority(priority); } /** * Gets the <tt>DatagramPacketFilter</tt>s which allow dropping <tt>DatagramPacket</tt>s before * they are converted into <tt>RawPacket</tt>s. * * @return the <tt>DatagramPacketFilter</tt>s which allow dropping <tt>DatagramPacket</tt>s before * they are converted into <tt>RawPacket</tt>s. */ public synchronized DatagramPacketFilter[] getDatagramPacketFilters() { return datagramPacketFilters; } /** * Adds a <tt>DatagramPacketFilter</tt> which allows dropping <tt>DatagramPacket</tt>s before they * are converted into <tt>RawPacket</tt>s. * * @param datagramPacketFilter the <tt>DatagramPacketFilter</tt> which allows dropping * <tt>DatagramPacket</tt>s before they are converted into <tt>RawPacket</tt>s */ public synchronized void addDatagramPacketFilter(DatagramPacketFilter datagramPacketFilter) { if (datagramPacketFilter == null) throw new NullPointerException("datagramPacketFilter"); if (datagramPacketFilters == null) { datagramPacketFilters = new DatagramPacketFilter[] {datagramPacketFilter}; } else { final int length = datagramPacketFilters.length; for (int i = 0; i < length; i++) if (datagramPacketFilter.equals(datagramPacketFilters[i])) return; DatagramPacketFilter[] newDatagramPacketFilters = new DatagramPacketFilter[length + 1]; System.arraycopy(datagramPacketFilters, 0, newDatagramPacketFilters, 0, length); newDatagramPacketFilters[length] = datagramPacketFilter; datagramPacketFilters = newDatagramPacketFilters; } } /** * Enables or disables this <tt>RTPConnectorInputStream</tt>. While the stream is disabled, it * does not accept any packets. * * @param enabled <tt>true</tt> to enable, <tt>false</tt> to disable. */ public void setEnabled(boolean enabled) { if (logger.isDebugEnabled()) logger.debug("setEnabled: " + enabled); this.enabled = enabled; } }