/** * {@inheritDoc} * * @param format unused, since this implementation records multiple streams using potentially * different formats. * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the * recorded media files. */ @Override public void start(String format, String dirname) throws IOException, MediaException { if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode()); path = dirname; MediaService mediaService = LibJitsi.getMediaService(); /* * Note that we use only one RTPConnector for both the RTPTranslator * and the RTPManager instances. The this.translator will write to its * output streams, and this.rtpManager will read from its input streams. */ rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType); rtpManager = RTPManager.newInstance(); /* * Add the formats that we know about. */ rtpManager.addFormat(vp8RtpFormat, vp8PayloadType); rtpManager.addFormat(opusFormat, opusPayloadType); rtpManager.addReceiveStreamListener(this); /* * Note: When this.rtpManager sends RTCP sender/receiver reports, they * will end up being written to its own input stream. This is not * expected to cause problems, but might be something to keep an eye on. */ rtpManager.initialize(rtpConnector); /* * Register a fake call participant. * TODO: can we use a more generic MediaStream here? */ streamRTPManager = new StreamRTPManager( mediaService.createMediaStream( new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)), translator); streamRTPManager.initialize(rtpConnector); rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender(); translator.addFormat(streamRTPManager, opusFormat, opusPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat, // redPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat, // ulpfecPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, // mediaFormatImpl.getFormat(), vp8PayloadType); started = true; }
private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } }
/** * Restarts the recording for a specific SSRC. * * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording). */ private void resetRecording(long ssrc, long timestamp) { ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); // we only restart audio recordings if (receiveStream != null && receiveStream.format instanceof AudioFormat) { String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX); // flush the buffer contained in the MP3 encoder String s = "trying to flush ssrc=" + ssrc; Processor p = receiveStream.processor; if (p != null) { s += " p!=null"; for (TrackControl tc : p.getTrackControls()) { Object o = tc.getControl(FlushableControl.class.getName()); if (o != null) ((FlushableControl) o).flush(); } } if (logger.isInfoEnabled()) { logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename); } receiveStream.dataSink.close(); receiveStream.dataSink = null; // flush the FMJ jitter buffer // DataSource ds = receiveStream.receiveStream.getDataSource(); // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource) // ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush(); receiveStream.filename = newFilename; try { receiveStream.dataSink = Manager.createDataSink( receiveStream.dataSource, new MediaLocator("file:" + newFilename)); } catch (NoDataSinkException ndse) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse); removeReceiveStream(receiveStream, false); } try { receiveStream.dataSink.open(); receiveStream.dataSink.start(); } catch (IOException ioe) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe); removeReceiveStream(receiveStream, false); } audioRecordingStarted(ssrc, timestamp); } }
/** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); }
/** Configures echo cancellation and noise suppression effects. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void configureEffects() { if (!AndroidUtils.hasAPI(16)) return; AudioSystem audioSystem = AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD); // Creates echo canceler if available if (AcousticEchoCanceler.isAvailable()) { AcousticEchoCanceler echoCanceller = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); if (echoCanceller != null) { echoCanceller.setEnableStatusListener(this); echoCanceller.setEnabled(audioSystem.isEchoCancel()); logger.info("Echo cancellation: " + echoCanceller.getEnabled()); } } // Automatic gain control if (AutomaticGainControl.isAvailable()) { AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId()); if (agc != null) { agc.setEnableStatusListener(this); agc.setEnabled(audioSystem.isAutomaticGainControl()); logger.info("Auto gain control: " + agc.getEnabled()); } } // Creates noise suppressor if available if (NoiseSuppressor.isAvailable()) { NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); if (noiseSuppressor != null) { noiseSuppressor.setEnableStatusListener(this); noiseSuppressor.setEnabled(audioSystem.isDenoise()); logger.info("Noise suppressor: " + noiseSuppressor.getEnabled()); } } }
private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); }
@Override public void stop() { if (started) { if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode()); // remove the recorder from the translator (e.g. stop new packets from // being written to rtpConnector if (streamRTPManager != null) streamRTPManager.dispose(); HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>(); synchronized (receiveStreams) { streamsToRemove.addAll(receiveStreams); } for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false); rtpConnector.rtcpPacketTransformer.close(); rtpConnector.rtpPacketTransformer.close(); rtpManager.dispose(); started = false; } }
/** * Create preview component. * * @param type type * @param comboBox the options. * @param prefSize the preferred size * @return the component. */ private static Component createPreview(int type, final JComboBox comboBox, Dimension prefSize) { JComponent preview = null; if (type == DeviceConfigurationComboBoxModel.AUDIO) { Object selectedItem = comboBox.getSelectedItem(); if (selectedItem instanceof AudioSystem) { AudioSystem audioSystem = (AudioSystem) selectedItem; if (!NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(audioSystem.getLocatorProtocol())) { preview = new TransparentPanel(new GridBagLayout()); createAudioSystemControls(audioSystem, preview); } } } else if (type == DeviceConfigurationComboBoxModel.VIDEO) { JLabel noPreview = new JLabel( NeomediaActivator.getResources().getI18NString("impl.media.configform.NO_PREVIEW")); noPreview.setHorizontalAlignment(SwingConstants.CENTER); noPreview.setVerticalAlignment(SwingConstants.CENTER); preview = createVideoContainer(noPreview); preview.setPreferredSize(prefSize); Object selectedItem = comboBox.getSelectedItem(); CaptureDeviceInfo device = null; if (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) device = ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info; Exception exception; try { createVideoPreview(device, preview); exception = null; } catch (IOException ex) { exception = ex; } catch (MediaException ex) { exception = ex; } if (exception != null) { logger.error("Failed to create preview for device " + device, exception); device = null; } } return preview; }
/** * @author Lyubomir Marinov * @author Damian Minkov * @author Yana Stamcheva */ public class MediaConfiguration { /** The <tt>Logger</tt> used by the <tt>MediaConfiguration</tt> class for logging output. */ private static final Logger logger = Logger.getLogger(MediaConfiguration.class); /** The <tt>MediaService</tt> implementation used by <tt>MediaConfiguration</tt>. */ private static final MediaServiceImpl mediaService = NeomediaActivator.getMediaServiceImpl(); /** The preferred width of all panels. */ private static final int WIDTH = 350; /** * Indicates if the Devices settings configuration tab should be disabled, i.e. not visible to the * user. */ private static final String DEVICES_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.devicesconfig.DISABLED"; /** * Indicates if the Audio/Video encodings configuration tab should be disabled, i.e. not visible * to the user. */ private static final String ENCODINGS_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.encodingsconfig.DISABLED"; /** * Indicates if the Video/More Settings configuration tab should be disabled, i.e. not visible to * the user. */ private static final String VIDEO_MORE_SETTINGS_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.videomoresettingsconfig.DISABLED"; /** * Returns the audio configuration panel. * * @return the audio configuration panel */ public static Component createAudioConfigPanel() { return createControls(DeviceConfigurationComboBoxModel.AUDIO); } /** * Returns the video configuration panel. * * @return the video configuration panel */ public static Component createVideoConfigPanel() { return createControls(DeviceConfigurationComboBoxModel.VIDEO); } private static void createAudioPreview( final AudioSystem audioSystem, final JComboBox comboBox, final SoundLevelIndicator soundLevelIndicator) { final ActionListener captureComboActionListener = new ActionListener() { private final SimpleAudioLevelListener audioLevelListener = new SimpleAudioLevelListener() { public void audioLevelChanged(int level) { soundLevelIndicator.updateSoundLevel(level); } }; private AudioMediaDeviceSession deviceSession; private final BufferTransferHandler transferHandler = new BufferTransferHandler() { public void transferData(PushBufferStream stream) { try { stream.read(transferHandlerBuffer); } catch (IOException ioe) { } } }; private final Buffer transferHandlerBuffer = new Buffer(); public void actionPerformed(ActionEvent event) { setDeviceSession(null); CaptureDeviceInfo cdi; if (comboBox == null) { cdi = soundLevelIndicator.isShowing() ? audioSystem.getCaptureDevice() : null; } else { Object selectedItem = soundLevelIndicator.isShowing() ? comboBox.getSelectedItem() : null; cdi = (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) ? ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info : null; } if (cdi != null) { for (MediaDevice md : mediaService.getDevices(MediaType.AUDIO, MediaUseCase.ANY)) { if (md instanceof AudioMediaDeviceImpl) { AudioMediaDeviceImpl amd = (AudioMediaDeviceImpl) md; if (cdi.equals(amd.getCaptureDeviceInfo())) { try { MediaDeviceSession deviceSession = amd.createSession(); boolean setDeviceSession = false; try { if (deviceSession instanceof AudioMediaDeviceSession) { setDeviceSession((AudioMediaDeviceSession) deviceSession); setDeviceSession = true; } } finally { if (!setDeviceSession) deviceSession.close(); } } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; } break; } } } } } private void setDeviceSession(AudioMediaDeviceSession deviceSession) { if (this.deviceSession == deviceSession) return; if (this.deviceSession != null) { try { this.deviceSession.close(); } finally { this.deviceSession.setLocalUserAudioLevelListener(null); soundLevelIndicator.resetSoundLevel(); } } this.deviceSession = deviceSession; if (this.deviceSession != null) { this.deviceSession.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW)); this.deviceSession.setLocalUserAudioLevelListener(audioLevelListener); this.deviceSession.start(MediaDirection.SENDONLY); try { DataSource dataSource = this.deviceSession.getOutputDataSource(); dataSource.connect(); PushBufferStream[] streams = ((PushBufferDataSource) dataSource).getStreams(); for (PushBufferStream stream : streams) stream.setTransferHandler(transferHandler); dataSource.start(); } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; else setDeviceSession(null); } } } }; if (comboBox != null) comboBox.addActionListener(captureComboActionListener); soundLevelIndicator.addHierarchyListener( new HierarchyListener() { public void hierarchyChanged(HierarchyEvent event) { if ((event.getChangeFlags() & HierarchyEvent.SHOWING_CHANGED) != 0) { SwingUtilities.invokeLater( new Runnable() { public void run() { captureComboActionListener.actionPerformed(null); } }); } } }); } /** * Creates the UI controls which are to control the details of a specific <tt>AudioSystem</tt>. * * @param audioSystem the <tt>AudioSystem</tt> for which the UI controls to control its details * are to be created * @param container the <tt>JComponent</tt> into which the UI controls which are to control the * details of the specified <tt>audioSystem</tt> are to be added */ public static void createAudioSystemControls(AudioSystem audioSystem, JComponent container) { GridBagConstraints constraints = new GridBagConstraints(); constraints.anchor = GridBagConstraints.NORTHWEST; constraints.fill = GridBagConstraints.HORIZONTAL; constraints.weighty = 0; int audioSystemFeatures = audioSystem.getFeatures(); boolean featureNotifyAndPlaybackDevices = ((audioSystemFeatures & AudioSystem.FEATURE_NOTIFY_AND_PLAYBACK_DEVICES) != 0); constraints.gridx = 0; constraints.insets = new Insets(3, 0, 3, 3); constraints.weightx = 0; constraints.gridy = 0; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)), constraints); if (featureNotifyAndPlaybackDevices) { constraints.gridy = 2; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)), constraints); constraints.gridy = 3; container.add( new JLabel(getLabelText(DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)), constraints); } constraints.gridx = 1; constraints.insets = new Insets(3, 3, 3, 0); constraints.weightx = 1; JComboBox captureCombo = null; if (featureNotifyAndPlaybackDevices) { captureCombo = new JComboBox(); captureCombo.setEditable(false); captureCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)); constraints.gridy = 0; container.add(captureCombo, constraints); } int anchor = constraints.anchor; SoundLevelIndicator capturePreview = new SoundLevelIndicator( SimpleAudioLevelListener.MIN_LEVEL, SimpleAudioLevelListener.MAX_LEVEL); constraints.anchor = GridBagConstraints.CENTER; constraints.gridy = (captureCombo == null) ? 0 : 1; container.add(capturePreview, constraints); constraints.anchor = anchor; constraints.gridy = GridBagConstraints.RELATIVE; if (featureNotifyAndPlaybackDevices) { JComboBox playbackCombo = new JComboBox(); playbackCombo.setEditable(false); playbackCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)); container.add(playbackCombo, constraints); JComboBox notifyCombo = new JComboBox(); notifyCombo.setEditable(false); notifyCombo.setModel( new DeviceConfigurationComboBoxModel( captureCombo, mediaService.getDeviceConfiguration(), DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)); container.add(notifyCombo, constraints); } if ((AudioSystem.FEATURE_ECHO_CANCELLATION & audioSystemFeatures) != 0) { final SIPCommCheckBox echoCancelCheckBox = new SIPCommCheckBox( NeomediaActivator.getResources().getI18NString("impl.media.configform.ECHOCANCEL")); /* * First set the selected one, then add the listener in order to * avoid saving the value when using the default one and only * showing to user without modification. */ echoCancelCheckBox.setSelected(mediaService.getDeviceConfiguration().isEchoCancel()); echoCancelCheckBox.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { mediaService.getDeviceConfiguration().setEchoCancel(echoCancelCheckBox.isSelected()); } }); container.add(echoCancelCheckBox, constraints); } if ((AudioSystem.FEATURE_DENOISE & audioSystemFeatures) != 0) { final SIPCommCheckBox denoiseCheckBox = new SIPCommCheckBox( NeomediaActivator.getResources().getI18NString("impl.media.configform.DENOISE")); /* * First set the selected one, then add the listener in order to * avoid saving the value when using the default one and only * showing to user without modification. */ denoiseCheckBox.setSelected(mediaService.getDeviceConfiguration().isDenoise()); denoiseCheckBox.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { mediaService.getDeviceConfiguration().setDenoise(denoiseCheckBox.isSelected()); } }); container.add(denoiseCheckBox, constraints); } createAudioPreview(audioSystem, captureCombo, capturePreview); } /** * Creates basic controls for a type (AUDIO or VIDEO). * * @param type the type. * @return the build Component. */ public static Component createBasicControls(final int type) { final JComboBox deviceComboBox = new JComboBox(); deviceComboBox.setEditable(false); deviceComboBox.setModel( new DeviceConfigurationComboBoxModel( deviceComboBox, mediaService.getDeviceConfiguration(), type)); JLabel deviceLabel = new JLabel(getLabelText(type)); deviceLabel.setDisplayedMnemonic(getDisplayedMnemonic(type)); deviceLabel.setLabelFor(deviceComboBox); final Container devicePanel = new TransparentPanel(new FlowLayout(FlowLayout.CENTER)); devicePanel.setMaximumSize(new Dimension(WIDTH, 25)); devicePanel.add(deviceLabel); devicePanel.add(deviceComboBox); final JPanel deviceAndPreviewPanel = new TransparentPanel(new BorderLayout()); int preferredDeviceAndPreviewPanelHeight; switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: preferredDeviceAndPreviewPanelHeight = 225; break; case DeviceConfigurationComboBoxModel.VIDEO: preferredDeviceAndPreviewPanelHeight = 305; break; default: preferredDeviceAndPreviewPanelHeight = 0; break; } if (preferredDeviceAndPreviewPanelHeight > 0) deviceAndPreviewPanel.setPreferredSize( new Dimension(WIDTH, preferredDeviceAndPreviewPanelHeight)); deviceAndPreviewPanel.add(devicePanel, BorderLayout.NORTH); final ActionListener deviceComboBoxActionListener = new ActionListener() { public void actionPerformed(ActionEvent event) { boolean revalidateAndRepaint = false; for (int i = deviceAndPreviewPanel.getComponentCount() - 1; i >= 0; i--) { Component c = deviceAndPreviewPanel.getComponent(i); if (c != devicePanel) { deviceAndPreviewPanel.remove(i); revalidateAndRepaint = true; } } Component preview = null; if ((deviceComboBox.getSelectedItem() != null) && deviceComboBox.isShowing()) { preview = createPreview(type, deviceComboBox, deviceAndPreviewPanel.getPreferredSize()); } if (preview != null) { deviceAndPreviewPanel.add(preview, BorderLayout.CENTER); revalidateAndRepaint = true; } if (revalidateAndRepaint) { deviceAndPreviewPanel.revalidate(); deviceAndPreviewPanel.repaint(); } } }; deviceComboBox.addActionListener(deviceComboBoxActionListener); /* * We have to initialize the controls to reflect the configuration * at the time of creating this instance. Additionally, because the * video preview will stop when it and its associated controls * become unnecessary, we have to restart it when the mentioned * controls become necessary again. We'll address the two goals * described by pretending there's a selection in the video combo * box when the combo box in question becomes displayable. */ deviceComboBox.addHierarchyListener( new HierarchyListener() { public void hierarchyChanged(HierarchyEvent event) { if ((event.getChangeFlags() & HierarchyEvent.SHOWING_CHANGED) != 0) { SwingUtilities.invokeLater( new Runnable() { public void run() { deviceComboBoxActionListener.actionPerformed(null); } }); } } }); return deviceAndPreviewPanel; } /** * Creates all the controls (including encoding) for a type(AUDIO or VIDEO) * * @param type the type. * @return the build Component. */ private static Component createControls(int type) { ConfigurationService cfg = NeomediaActivator.getConfigurationService(); SIPCommTabbedPane container = new SIPCommTabbedPane(); ResourceManagementService res = NeomediaActivator.getResources(); if ((cfg == null) || !cfg.getBoolean(DEVICES_DISABLED_PROP, false)) { container.insertTab( res.getI18NString("impl.media.configform.DEVICES"), null, createBasicControls(type), null, 0); } if ((cfg == null) || !cfg.getBoolean(ENCODINGS_DISABLED_PROP, false)) { container.insertTab( res.getI18NString("impl.media.configform.ENCODINGS"), null, new PriorityTable( new EncodingConfigurationTableModel(mediaService.getEncodingConfiguration(), type), 100), null, 1); } if ((type == DeviceConfigurationComboBoxModel.VIDEO) && ((cfg == null) || !cfg.getBoolean(VIDEO_MORE_SETTINGS_DISABLED_PROP, false))) { container.insertTab( res.getI18NString("impl.media.configform.VIDEO_MORE_SETTINGS"), null, createVideoAdvancedSettings(), null, 2); } return container; } /** * Creates preview for the (video) device in the video container. * * @param device the device * @param videoContainer the video container * @throws IOException a problem accessing the device * @throws MediaException a problem getting preview */ private static void createVideoPreview(CaptureDeviceInfo device, JComponent videoContainer) throws IOException, MediaException { videoContainer.removeAll(); videoContainer.revalidate(); videoContainer.repaint(); if (device == null) return; for (MediaDevice mediaDevice : mediaService.getDevices(MediaType.VIDEO, MediaUseCase.ANY)) { if (((MediaDeviceImpl) mediaDevice).getCaptureDeviceInfo().equals(device)) { Dimension videoContainerSize = videoContainer.getPreferredSize(); Component preview = (Component) mediaService.getVideoPreviewComponent( mediaDevice, videoContainerSize.width, videoContainerSize.height); if (preview != null) videoContainer.add(preview); break; } } } /** * Create preview component. * * @param type type * @param comboBox the options. * @param prefSize the preferred size * @return the component. */ private static Component createPreview(int type, final JComboBox comboBox, Dimension prefSize) { JComponent preview = null; if (type == DeviceConfigurationComboBoxModel.AUDIO) { Object selectedItem = comboBox.getSelectedItem(); if (selectedItem instanceof AudioSystem) { AudioSystem audioSystem = (AudioSystem) selectedItem; if (!NoneAudioSystem.LOCATOR_PROTOCOL.equalsIgnoreCase(audioSystem.getLocatorProtocol())) { preview = new TransparentPanel(new GridBagLayout()); createAudioSystemControls(audioSystem, preview); } } } else if (type == DeviceConfigurationComboBoxModel.VIDEO) { JLabel noPreview = new JLabel( NeomediaActivator.getResources().getI18NString("impl.media.configform.NO_PREVIEW")); noPreview.setHorizontalAlignment(SwingConstants.CENTER); noPreview.setVerticalAlignment(SwingConstants.CENTER); preview = createVideoContainer(noPreview); preview.setPreferredSize(prefSize); Object selectedItem = comboBox.getSelectedItem(); CaptureDeviceInfo device = null; if (selectedItem instanceof DeviceConfigurationComboBoxModel.CaptureDevice) device = ((DeviceConfigurationComboBoxModel.CaptureDevice) selectedItem).info; Exception exception; try { createVideoPreview(device, preview); exception = null; } catch (IOException ex) { exception = ex; } catch (MediaException ex) { exception = ex; } if (exception != null) { logger.error("Failed to create preview for device " + device, exception); device = null; } } return preview; } /** * Creates the video container. * * @param noVideoComponent the container component. * @return the video container. */ private static JComponent createVideoContainer(Component noVideoComponent) { return new VideoContainer(noVideoComponent, false); } /** * The mnemonic for a type. * * @param type audio or video type. * @return the mnemonic. */ private static char getDisplayedMnemonic(int type) { switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: return NeomediaActivator.getResources().getI18nMnemonic("impl.media.configform.AUDIO"); case DeviceConfigurationComboBoxModel.VIDEO: return NeomediaActivator.getResources().getI18nMnemonic("impl.media.configform.VIDEO"); default: throw new IllegalArgumentException("type"); } } /** * A label for a type. * * @param type the type. * @return the label. */ private static String getLabelText(int type) { switch (type) { case DeviceConfigurationComboBoxModel.AUDIO: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO"); case DeviceConfigurationComboBoxModel.AUDIO_CAPTURE: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_IN"); case DeviceConfigurationComboBoxModel.AUDIO_NOTIFY: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_NOTIFY"); case DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK: return NeomediaActivator.getResources().getI18NString("impl.media.configform.AUDIO_OUT"); case DeviceConfigurationComboBoxModel.VIDEO: return NeomediaActivator.getResources().getI18NString("impl.media.configform.VIDEO"); default: throw new IllegalArgumentException("type"); } } /** * Creates the video advanced settings. * * @return video advanced settings panel. */ private static Component createVideoAdvancedSettings() { ResourceManagementService resources = NeomediaActivator.getResources(); final DeviceConfiguration deviceConfig = mediaService.getDeviceConfiguration(); TransparentPanel centerPanel = new TransparentPanel(new GridBagLayout()); centerPanel.setMaximumSize(new Dimension(WIDTH, 150)); JButton resetDefaultsButton = new JButton(resources.getI18NString("impl.media.configform.VIDEO_RESET")); JPanel resetButtonPanel = new TransparentPanel(new FlowLayout(FlowLayout.RIGHT)); resetButtonPanel.add(resetDefaultsButton); final JPanel centerAdvancedPanel = new TransparentPanel(new BorderLayout()); centerAdvancedPanel.add(centerPanel, BorderLayout.NORTH); centerAdvancedPanel.add(resetButtonPanel, BorderLayout.SOUTH); GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.anchor = GridBagConstraints.NORTHWEST; constraints.insets = new Insets(5, 5, 0, 0); constraints.gridx = 0; constraints.weightx = 0; constraints.weighty = 0; constraints.gridy = 0; centerPanel.add( new JLabel(resources.getI18NString("impl.media.configform.VIDEO_RESOLUTION")), constraints); constraints.gridy = 1; constraints.insets = new Insets(0, 0, 0, 0); final JCheckBox frameRateCheck = new SIPCommCheckBox(resources.getI18NString("impl.media.configform.VIDEO_FRAME_RATE")); centerPanel.add(frameRateCheck, constraints); constraints.gridy = 2; constraints.insets = new Insets(5, 5, 0, 0); centerPanel.add( new JLabel(resources.getI18NString("impl.media.configform.VIDEO_PACKETS_POLICY")), constraints); constraints.weightx = 1; constraints.gridx = 1; constraints.gridy = 0; constraints.insets = new Insets(5, 0, 0, 5); Object[] resolutionValues = new Object[DeviceConfiguration.SUPPORTED_RESOLUTIONS.length + 1]; System.arraycopy( DeviceConfiguration.SUPPORTED_RESOLUTIONS, 0, resolutionValues, 1, DeviceConfiguration.SUPPORTED_RESOLUTIONS.length); final JComboBox sizeCombo = new JComboBox(resolutionValues); sizeCombo.setRenderer(new ResolutionCellRenderer()); sizeCombo.setEditable(false); centerPanel.add(sizeCombo, constraints); // default value is 20 final JSpinner frameRate = new JSpinner(new SpinnerNumberModel(20, 5, 30, 1)); frameRate.addChangeListener( new ChangeListener() { public void stateChanged(ChangeEvent e) { deviceConfig.setFrameRate( ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue()); } }); constraints.gridy = 1; constraints.insets = new Insets(0, 0, 0, 5); centerPanel.add(frameRate, constraints); frameRateCheck.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { if (frameRateCheck.isSelected()) { deviceConfig.setFrameRate( ((SpinnerNumberModel) frameRate.getModel()).getNumber().intValue()); } else // unlimited framerate deviceConfig.setFrameRate(-1); frameRate.setEnabled(frameRateCheck.isSelected()); } }); final JSpinner videoMaxBandwidth = new JSpinner( new SpinnerNumberModel(deviceConfig.getVideoMaxBandwidth(), 1, Integer.MAX_VALUE, 1)); videoMaxBandwidth.addChangeListener( new ChangeListener() { public void stateChanged(ChangeEvent e) { deviceConfig.setVideoMaxBandwidth( ((SpinnerNumberModel) videoMaxBandwidth.getModel()).getNumber().intValue()); } }); constraints.gridx = 1; constraints.gridy = 2; constraints.insets = new Insets(0, 0, 5, 5); centerPanel.add(videoMaxBandwidth, constraints); resetDefaultsButton.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { // reset to defaults sizeCombo.setSelectedIndex(0); frameRateCheck.setSelected(false); frameRate.setEnabled(false); frameRate.setValue(20); // unlimited framerate deviceConfig.setFrameRate(-1); videoMaxBandwidth.setValue(DeviceConfiguration.DEFAULT_VIDEO_MAX_BANDWIDTH); } }); // load selected value or auto Dimension videoSize = deviceConfig.getVideoSize(); if ((videoSize.getHeight() != DeviceConfiguration.DEFAULT_VIDEO_HEIGHT) && (videoSize.getWidth() != DeviceConfiguration.DEFAULT_VIDEO_WIDTH)) sizeCombo.setSelectedItem(deviceConfig.getVideoSize()); else sizeCombo.setSelectedIndex(0); sizeCombo.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { Dimension selectedVideoSize = (Dimension) sizeCombo.getSelectedItem(); if (selectedVideoSize == null) { // the auto value, default one selectedVideoSize = new Dimension( DeviceConfiguration.DEFAULT_VIDEO_WIDTH, DeviceConfiguration.DEFAULT_VIDEO_HEIGHT); } deviceConfig.setVideoSize(selectedVideoSize); } }); frameRateCheck.setSelected( deviceConfig.getFrameRate() != DeviceConfiguration.DEFAULT_VIDEO_FRAMERATE); frameRate.setEnabled(frameRateCheck.isSelected()); if (frameRate.isEnabled()) frameRate.setValue(deviceConfig.getFrameRate()); return centerAdvancedPanel; } /** Renders the available resolutions in the combo box. */ private static class ResolutionCellRenderer extends DefaultListCellRenderer { /** * The serialization version number of the <tt>ResolutionCellRenderer</tt> class. Defined to the * value of <tt>0</tt> because the <tt>ResolutionCellRenderer</tt> instances do not have state * of their own. */ private static final long serialVersionUID = 0L; /** * Sets readable text describing the resolution if the selected value is null we return the * string "Auto". * * @param list * @param value * @param index * @param isSelected * @param cellHasFocus * @return Component */ @Override public Component getListCellRendererComponent( JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { // call super to set backgrounds and fonts super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); // now just change the text if (value == null) setText("Auto"); else if (value instanceof Dimension) { Dimension d = (Dimension) value; setText(((int) d.getWidth()) + "x" + ((int) d.getHeight())); } return this; } } }
/** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } }
/** * A <tt>Recorder</tt> implementation which attaches to an <tt>RTPTranslator</tt>. * * @author Vladimir Marinov * @author Boris Grozev */ public class RecorderRtpImpl implements Recorder, ReceiveStreamListener, ActiveSpeakerChangedListener, ControllerListener { /** * The <tt>Logger</tt> used by the <tt>RecorderRtpImpl</tt> class and its instances for logging * output. */ private static final Logger logger = Logger.getLogger(RecorderRtpImpl.class); // values hard-coded to match chrome // TODO: allow to set them dynamically private static final byte redPayloadType = 116; private static final byte ulpfecPayloadType = 117; private static final byte vp8PayloadType = 100; private static final byte opusPayloadType = 111; private static final Format redFormat = new VideoFormat(Constants.RED); private static final Format ulpfecFormat = new VideoFormat(Constants.ULPFEC); private static final Format vp8RtpFormat = new VideoFormat(Constants.VP8_RTP); private static final Format vp8Format = new VideoFormat(Constants.VP8); private static final Format opusFormat = new AudioFormat(Constants.OPUS_RTP, 48000, Format.NOT_SPECIFIED, Format.NOT_SPECIFIED); private static final int FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE = 300; /** The <tt>ContentDescriptor</tt> to use when saving audio. */ private static final ContentDescriptor AUDIO_CONTENT_DESCRIPTOR = new ContentDescriptor(FileTypeDescriptor.MPEG_AUDIO); /** The suffix for audio file names. */ private static final String AUDIO_FILENAME_SUFFIX = ".mp3"; /** The suffix for video file names. */ private static final String VIDEO_FILENAME_SUFFIX = ".webm"; static { Registry.set("video_jitter_buffer_MIN_SIZE", FMJ_VIDEO_JITTER_BUFFER_MIN_SIZE); } /** The <tt>RTPTranslator</tt> that this recorder is/will be attached to. */ private RTPTranslatorImpl translator; /** * The custom <tt>RTPConnector</tt> that this instance uses to read from {@link #translator} and * write to {@link #rtpManager}. */ private RTPConnectorImpl rtpConnector; /** Path to the directory where the output files will be stored. */ private String path; /** The <tt>RTCPFeedbackMessageSender</tt> that we use to send RTCP FIR messages. */ private RTCPFeedbackMessageSender rtcpFeedbackSender; /** * The {@link RTPManager} instance we use to handle the packets coming from * <tt>RTPTranslator</tt>. */ private RTPManager rtpManager; /** * The instance which should be notified when events related to recordings (such as the start or * end of a recording) occur. */ private RecorderEventHandlerImpl eventHandler; /** * Holds the <tt>ReceiveStreams</tt> added to this instance by {@link #rtpManager} and additional * information associated with each one (e.g. the <tt>Processor</tt>, if any, used for it). */ private final HashSet<ReceiveStreamDesc> receiveStreams = new HashSet<ReceiveStreamDesc>(); private final Set<Long> activeVideoSsrcs = new HashSet<Long>(); /** * The <tt>ActiveSpeakerDetector</tt> which will listen to the audio receive streams of this * <tt>RecorderRtpImpl</tt> and notify it about changes to the active speaker via calls to {@link * #activeSpeakerChanged(long)} */ private ActiveSpeakerDetector activeSpeakerDetector = null; StreamRTPManager streamRTPManager; private SynchronizerImpl synchronizer; private boolean started = false; /** * Constructor. * * @param translator the <tt>RTPTranslator</tt> to which this instance will attach in order to * record media. */ public RecorderRtpImpl(RTPTranslator translator) { this.translator = (RTPTranslatorImpl) translator; activeSpeakerDetector = new ActiveSpeakerDetectorImpl(); activeSpeakerDetector.addActiveSpeakerChangedListener(this); } /** Implements {@link Recorder#addListener(Recorder.Listener)}. */ @Override public void addListener(Listener listener) {} /** Implements {@link Recorder#removeListener(Recorder.Listener)}. */ @Override public void removeListener(Listener listener) {} /** Implements {@link Recorder#getSupportedFormats()}. */ @Override public List<String> getSupportedFormats() { return null; } /** Implements {@link Recorder#setMute(boolean)}. */ @Override public void setMute(boolean mute) {} /** * Implements {@link Recorder#getFilename()}. Returns null, since we don't have a (single) * associated filename. */ @Override public String getFilename() { return null; } /** * Sets the instance which should be notified when events related to recordings (such as the start * or end of a recording) occur. */ public void setEventHandler(RecorderEventHandler eventHandler) { if (this.eventHandler == null || (this.eventHandler != eventHandler && this.eventHandler.handler != eventHandler)) { if (this.eventHandler == null) this.eventHandler = new RecorderEventHandlerImpl(eventHandler); else this.eventHandler.handler = eventHandler; } } /** * {@inheritDoc} * * @param format unused, since this implementation records multiple streams using potentially * different formats. * @param dirname the path to the directory into which this <tt>Recorder</tt> will store the * recorded media files. */ @Override public void start(String format, String dirname) throws IOException, MediaException { if (logger.isInfoEnabled()) logger.info("Starting, format=" + format + " " + hashCode()); path = dirname; MediaService mediaService = LibJitsi.getMediaService(); /* * Note that we use only one RTPConnector for both the RTPTranslator * and the RTPManager instances. The this.translator will write to its * output streams, and this.rtpManager will read from its input streams. */ rtpConnector = new RTPConnectorImpl(redPayloadType, ulpfecPayloadType); rtpManager = RTPManager.newInstance(); /* * Add the formats that we know about. */ rtpManager.addFormat(vp8RtpFormat, vp8PayloadType); rtpManager.addFormat(opusFormat, opusPayloadType); rtpManager.addReceiveStreamListener(this); /* * Note: When this.rtpManager sends RTCP sender/receiver reports, they * will end up being written to its own input stream. This is not * expected to cause problems, but might be something to keep an eye on. */ rtpManager.initialize(rtpConnector); /* * Register a fake call participant. * TODO: can we use a more generic MediaStream here? */ streamRTPManager = new StreamRTPManager( mediaService.createMediaStream( new MediaDeviceImpl(new CaptureDeviceInfo(), MediaType.VIDEO)), translator); streamRTPManager.initialize(rtpConnector); rtcpFeedbackSender = translator.getRtcpFeedbackMessageSender(); translator.addFormat(streamRTPManager, opusFormat, opusPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, redFormat, // redPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, ulpfecFormat, // ulpfecPayloadType); // ((RTPTranslatorImpl)videoRTPTranslator).addFormat(streamRTPManager, // mediaFormatImpl.getFormat(), vp8PayloadType); started = true; } @Override public void stop() { if (started) { if (logger.isInfoEnabled()) logger.info("Stopping " + hashCode()); // remove the recorder from the translator (e.g. stop new packets from // being written to rtpConnector if (streamRTPManager != null) streamRTPManager.dispose(); HashSet<ReceiveStreamDesc> streamsToRemove = new HashSet<ReceiveStreamDesc>(); synchronized (receiveStreams) { streamsToRemove.addAll(receiveStreams); } for (ReceiveStreamDesc r : streamsToRemove) removeReceiveStream(r, false); rtpConnector.rtcpPacketTransformer.close(); rtpConnector.rtpPacketTransformer.close(); rtpManager.dispose(); started = false; } } /** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } } private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } } /** * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from * the <tt>Processor</tt>s that this instance uses to transcode media. * * @param ev the event to handle. */ public void controllerUpdate(ControllerEvent ev) { if (ev == null || ev.getSourceController() == null) { return; } Processor processor = (Processor) ev.getSourceController(); ReceiveStreamDesc desc = findReceiveStream(processor); if (desc == null) { logger.warn("Event from an orphaned processor, ignoring: " + ev); return; } if (ev instanceof ConfigureCompleteEvent) { if (logger.isInfoEnabled()) { logger.info( "Configured processor for ReceiveStream ssrc=" + desc.ssrc + " (" + desc.format + ")" + " " + System.currentTimeMillis()); } boolean audio = desc.format instanceof AudioFormat; if (audio) { ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR); if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) { logger.error( "Failed to set the Processor content " + "descriptor to " + AUDIO_CONTENT_DESCRIPTOR + ". Actual result: " + cd); removeReceiveStream(desc, false); return; } } for (TrackControl track : processor.getTrackControls()) { Format trackFormat = track.getFormat(); if (audio) { final long ssrc = desc.ssrc; SilenceEffect silenceEffect; if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) { silenceEffect = new SilenceEffect(48000); } else { // We haven't tested that the RTP timestamps survive // the journey through the chain when codecs other than // opus are in use, so for the moment we rely on FMJ's // timestamps for non-opus formats. silenceEffect = new SilenceEffect(); } silenceEffect.setListener( new SilenceEffect.Listener() { boolean first = true; @Override public void onSilenceNotInserted(long timestamp) { if (first) { first = false; // send event only audioRecordingStarted(ssrc, timestamp); } else { // change file and send event resetRecording(ssrc, timestamp); } } }); desc.silenceEffect = silenceEffect; AudioLevelEffect audioLevelEffect = new AudioLevelEffect(); audioLevelEffect.setAudioLevelListener( new SimpleAudioLevelListener() { @Override public void audioLevelChanged(int level) { activeSpeakerDetector.levelChanged(ssrc, level); } }); try { // We add an effect, which will insert "silence" in // place of lost packets. track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect}); } catch (UnsupportedPlugInException upie) { logger.warn("Failed to insert silence effect: " + upie); // But do go on, a recording without extra silence is // better than nothing ;) } } else { // transcode vp8/rtp to vp8 (i.e. depacketize vp8) if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format); else { logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc); // we currently only support vp8 removeReceiveStream(desc, false); return; } } } processor.realize(); } else if (ev instanceof RealizeCompleteEvent) { desc.dataSource = processor.getDataOutput(); long ssrc = desc.ssrc; boolean audio = desc.format instanceof AudioFormat; String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX; // XXX '\' on windows? String filename = getNextFilename(path + "/" + ssrc, suffix); desc.filename = filename; DataSink dataSink; if (audio) { try { dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename)); } catch (NoDataSinkException ndse) { logger.error("Could not create DataSink: " + ndse); removeReceiveStream(desc, false); return; } } else { dataSink = new WebmDataSink(filename, desc.dataSource); } if (logger.isInfoEnabled()) logger.info( "Created DataSink (" + dataSink + ") for SSRC=" + ssrc + ". Output filename: " + filename); try { dataSink.open(); } catch (IOException e) { logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e); removeReceiveStream(desc, false); return; } if (!audio) { final WebmDataSink webmDataSink = (WebmDataSink) dataSink; webmDataSink.setSsrc(ssrc); webmDataSink.setEventHandler(eventHandler); webmDataSink.setKeyFrameControl( new KeyFrameControlAdapter() { @Override public boolean requestKeyFrame(boolean urgent) { return requestFIR(webmDataSink); } }); } try { dataSink.start(); } catch (IOException e) { logger.error( "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e); removeReceiveStream(desc, false); return; } if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc); desc.dataSink = dataSink; processor.start(); } else if (logger.isDebugEnabled()) { logger.debug( "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev); } } /** * Restarts the recording for a specific SSRC. * * @param ssrc the SSRC for which to restart recording. RTP packet of the new recording). */ private void resetRecording(long ssrc, long timestamp) { ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); // we only restart audio recordings if (receiveStream != null && receiveStream.format instanceof AudioFormat) { String newFilename = getNextFilename(path + "/" + ssrc, AUDIO_FILENAME_SUFFIX); // flush the buffer contained in the MP3 encoder String s = "trying to flush ssrc=" + ssrc; Processor p = receiveStream.processor; if (p != null) { s += " p!=null"; for (TrackControl tc : p.getTrackControls()) { Object o = tc.getControl(FlushableControl.class.getName()); if (o != null) ((FlushableControl) o).flush(); } } if (logger.isInfoEnabled()) { logger.info("Restarting recording for SSRC=" + ssrc + ". New filename: " + newFilename); } receiveStream.dataSink.close(); receiveStream.dataSink = null; // flush the FMJ jitter buffer // DataSource ds = receiveStream.receiveStream.getDataSource(); // if (ds instanceof net.sf.fmj.media.protocol.rtp.DataSource) // ((net.sf.fmj.media.protocol.rtp.DataSource)ds).flush(); receiveStream.filename = newFilename; try { receiveStream.dataSink = Manager.createDataSink( receiveStream.dataSource, new MediaLocator("file:" + newFilename)); } catch (NoDataSinkException ndse) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ndse); removeReceiveStream(receiveStream, false); } try { receiveStream.dataSink.open(); receiveStream.dataSink.start(); } catch (IOException ioe) { logger.warn("Could not reset recording for SSRC=" + ssrc + ": " + ioe); removeReceiveStream(receiveStream, false); } audioRecordingStarted(ssrc, timestamp); } } private void audioRecordingStarted(long ssrc, long timestamp) { ReceiveStreamDesc desc = findReceiveStream(ssrc); if (desc == null) return; RecorderEvent event = new RecorderEvent(); event.setType(RecorderEvent.Type.RECORDING_STARTED); event.setMediaType(MediaType.AUDIO); event.setSsrc(ssrc); event.setRtpTimestamp(timestamp); event.setFilename(desc.filename); if (eventHandler != null) eventHandler.handleEvent(event); } /** * Handles a request from a specific <tt>DataSink</tt> to request a keyframe by sending an RTCP * feedback FIR message to the media source. * * @param dataSink the <tt>DataSink</tt> which requests that a keyframe be requested with a FIR * message. * @return <tt>true</tt> if a keyframe was successfully requested, <tt>false</tt> otherwise */ private boolean requestFIR(WebmDataSink dataSink) { ReceiveStreamDesc desc = findReceiveStream(dataSink); if (desc != null && rtcpFeedbackSender != null) { return rtcpFeedbackSender.sendFIR((int) desc.ssrc); } return false; } /** * Returns "prefix"+"suffix" if the file with this name does not exist. Otherwise, returns the * first inexistant filename of the form "prefix-"+i+"suffix", for an integer i. i is bounded by * 100 to prevent hanging, and on failure to find an inexistant filename the method will return * null. * * @param prefix * @param suffix * @return */ private String getNextFilename(String prefix, String suffix) { if (!new File(prefix + suffix).exists()) return prefix + suffix; int i = 1; String s; do { s = prefix + "-" + i + suffix; if (!new File(s).exists()) return s; i++; } while (i < 1000); // don't hang indefinitely... return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt> * * @param processor The <tt>Processor</tt> to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>Processor</tt>, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(Processor processor) { if (processor == null) return null; synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (processor.equals(r.processor)) return r; } return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt> * * @param dataSink The <tt>DataSink</tt> to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular <tt>DataSink</tt>, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(DataSink dataSink) { if (dataSink == null) return null; synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (dataSink.equals(r.dataSink)) return r; } return null; } /** * Finds the <tt>ReceiveStreamDesc</tt> with a particular SSRC. * * @param ssrc The SSRC to match. * @return the <tt>ReceiveStreamDesc</tt> with a particular SSRC, or <tt>null</tt>. */ private ReceiveStreamDesc findReceiveStream(long ssrc) { synchronized (receiveStreams) { for (ReceiveStreamDesc r : receiveStreams) if (ssrc == r.ssrc) return r; } return null; } /** * Gets the SSRC of a <tt>ReceiveStream</tt> as a (non-negative) <tt>long</tt>. * * <p>FMJ stores the 32-bit SSRC values in <tt>int</tt>s, and the <tt>ReceiveStream.getSSRC()</tt> * implementation(s) don't take care of converting the negative <tt>int</tt> values sometimes * resulting from reading of a 32-bit field into the correct unsigned <tt>long</tt> value. So do * the conversion here. * * @param receiveStream the <tt>ReceiveStream</tt> for which to get the SSRC. * @return the SSRC of <tt>receiveStream</tt> an a (non-negative) <tt>long</tt>. */ private long getReceiveStreamSSRC(ReceiveStream receiveStream) { return 0xffffffffL & receiveStream.getSSRC(); } /** * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed, * and that the new active stream has SSRC <tt>ssrc</tt>. * * @param ssrc the SSRC of the new active stream. */ @Override public void activeSpeakerChanged(long ssrc) { if (eventHandler != null) { RecorderEvent e = new RecorderEvent(); e.setAudioSsrc(ssrc); // TODO: how do we time this? e.setInstant(System.currentTimeMillis()); e.setType(RecorderEvent.Type.SPEAKER_CHANGED); e.setMediaType(MediaType.VIDEO); eventHandler.handleEvent(e); } } private void handleRtpPacket(RawPacket pkt) { if (pkt != null && pkt.getPayloadType() == vp8PayloadType) { int ssrc = pkt.getSSRC(); if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { synchronized (activeVideoSsrcs) { if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { activeVideoSsrcs.add(ssrc & 0xffffffffL); rtcpFeedbackSender.sendFIR(ssrc); } } } } } private void handleRtcpPacket(RawPacket pkt) { getSynchronizer().addRTCPPacket(pkt); eventHandler.nudge(); } public SynchronizerImpl getSynchronizer() { if (synchronizer == null) synchronizer = new SynchronizerImpl(); return synchronizer; } public void setSynchronizer(Synchronizer synchronizer) { if (synchronizer instanceof SynchronizerImpl) { this.synchronizer = (SynchronizerImpl) synchronizer; } } public void connect(Recorder recorder) { if (!(recorder instanceof RecorderRtpImpl)) return; ((RecorderRtpImpl) recorder).setSynchronizer(getSynchronizer()); } private void emptyPacketBuffer(long ssrc) { RawPacket[] pkts = rtpConnector.packetBuffer.emptyBuffer(ssrc); RTPConnectorImpl.OutputDataStreamImpl dataStream; try { dataStream = rtpConnector.getDataOutputStream(); } catch (IOException ioe) { logger.error("Failed to empty packet buffer for SSRC=" + ssrc + ": " + ioe); return; } for (RawPacket pkt : pkts) dataStream.write( pkt.getBuffer(), pkt.getOffset(), pkt.getLength(), false /* already transformed */); } /** The <tt>RTPConnector</tt> implementation used by this <tt>RecorderRtpImpl</tt>. */ private class RTPConnectorImpl implements RTPConnector { private PushSourceStreamImpl controlInputStream; private OutputDataStreamImpl controlOutputStream; private PushSourceStreamImpl dataInputStream; private OutputDataStreamImpl dataOutputStream; private SourceTransferHandler dataTransferHandler; private SourceTransferHandler controlTransferHandler; private RawPacket pendingDataPacket = new RawPacket(); private RawPacket pendingControlPacket = new RawPacket(); private PacketTransformer rtpPacketTransformer = null; private PacketTransformer rtcpPacketTransformer = null; /** The PacketBuffer instance which we use as a jitter buffer. */ private PacketBuffer packetBuffer; private RTPConnectorImpl(byte redPT, byte ulpfecPT) { packetBuffer = new PacketBuffer(); // The chain of transformers will be applied in reverse order for // incoming packets. TransformEngine transformEngine = new TransformEngineChain( new TransformEngine[] { packetBuffer, new TransformEngineImpl(), new CompoundPacketEngine(), new FECTransformEngine(ulpfecPT, (byte) -1), new REDTransformEngine(redPT, (byte) -1) }); rtpPacketTransformer = transformEngine.getRTPTransformer(); rtcpPacketTransformer = transformEngine.getRTCPTransformer(); } private RTPConnectorImpl() {} @Override public void close() { try { if (dataOutputStream != null) dataOutputStream.close(); if (controlOutputStream != null) controlOutputStream.close(); } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } } @Override public PushSourceStream getControlInputStream() throws IOException { if (controlInputStream == null) { controlInputStream = new PushSourceStreamImpl(true); } return controlInputStream; } @Override public OutputDataStream getControlOutputStream() throws IOException { if (controlOutputStream == null) { controlOutputStream = new OutputDataStreamImpl(true); } return controlOutputStream; } @Override public PushSourceStream getDataInputStream() throws IOException { if (dataInputStream == null) { dataInputStream = new PushSourceStreamImpl(false); } return dataInputStream; } @Override public OutputDataStreamImpl getDataOutputStream() throws IOException { if (dataOutputStream == null) { dataOutputStream = new OutputDataStreamImpl(false); } return dataOutputStream; } @Override public double getRTCPBandwidthFraction() { return -1; } @Override public double getRTCPSenderBandwidthFraction() { return -1; } @Override public int getReceiveBufferSize() { // TODO Auto-generated method stub return 0; } @Override public int getSendBufferSize() { // TODO Auto-generated method stub return 0; } @Override public void setReceiveBufferSize(int arg0) throws IOException { // TODO Auto-generated method stub } @Override public void setSendBufferSize(int arg0) throws IOException { // TODO Auto-generated method stub } private class OutputDataStreamImpl implements OutputDataStream { boolean isControlStream; private RawPacket[] rawPacketArray = new RawPacket[1]; public OutputDataStreamImpl(boolean isControlStream) { this.isControlStream = isControlStream; } public int write(byte[] buffer, int offset, int length) { return write(buffer, offset, length, true); } public int write(byte[] buffer, int offset, int length, boolean transform) { RawPacket pkt = rawPacketArray[0]; if (pkt == null) pkt = new RawPacket(); rawPacketArray[0] = pkt; byte[] pktBuf = pkt.getBuffer(); if (pktBuf == null || pktBuf.length < length) { pktBuf = new byte[length]; pkt.setBuffer(pktBuf); } System.arraycopy(buffer, offset, pktBuf, 0, length); pkt.setOffset(0); pkt.setLength(length); if (transform) { PacketTransformer packetTransformer = isControlStream ? rtcpPacketTransformer : rtpPacketTransformer; if (packetTransformer != null) rawPacketArray = packetTransformer.reverseTransform(rawPacketArray); } SourceTransferHandler transferHandler; PushSourceStream pushSourceStream; try { if (isControlStream) { transferHandler = controlTransferHandler; pushSourceStream = getControlInputStream(); } else { transferHandler = dataTransferHandler; pushSourceStream = getDataInputStream(); } } catch (IOException ioe) { throw new UndeclaredThrowableException(ioe); } for (int i = 0; i < rawPacketArray.length; i++) { RawPacket packet = rawPacketArray[i]; // keep the first element for reuse if (i != 0) rawPacketArray[i] = null; if (packet != null) { if (isControlStream) pendingControlPacket = packet; else pendingDataPacket = packet; if (transferHandler != null) { transferHandler.transferData(pushSourceStream); } } } return length; } public void close() throws IOException {} } /** * A dummy implementation of {@link PushSourceStream}. * * @author Vladimir Marinov */ private class PushSourceStreamImpl implements PushSourceStream { private boolean isControlStream = false; public PushSourceStreamImpl(boolean isControlStream) { this.isControlStream = isControlStream; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public boolean endOfStream() { return false; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public ContentDescriptor getContentDescriptor() { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public long getContentLength() { return 0; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public Object getControl(String arg0) { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public Object[] getControls() { return null; } /** Not implemented because there are currently no uses of the underlying functionality. */ @Override public int getMinimumTransferSize() { if (isControlStream) { if (pendingControlPacket.getBuffer() != null) { return pendingControlPacket.getLength(); } } else { if (pendingDataPacket.getBuffer() != null) { return pendingDataPacket.getLength(); } } return 0; } @Override public int read(byte[] buffer, int offset, int length) throws IOException { RawPacket pendingPacket; if (isControlStream) { pendingPacket = pendingControlPacket; } else { pendingPacket = pendingDataPacket; } int bytesToRead = 0; byte[] pendingPacketBuffer = pendingPacket.getBuffer(); if (pendingPacketBuffer != null) { int pendingPacketLength = pendingPacket.getLength(); bytesToRead = length > pendingPacketLength ? pendingPacketLength : length; System.arraycopy( pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead); } return bytesToRead; } /** * {@inheritDoc} * * <p>We keep the first non-null <tt>SourceTransferHandler</tt> that was set, because we don't * want it to be overwritten when we initialize a second <tt>RTPManager</tt> with this * <tt>RTPConnector</tt>. * * <p>See {@link RecorderRtpImpl#start(String, String)} */ @Override public void setTransferHandler(SourceTransferHandler transferHandler) { if (isControlStream) { if (RTPConnectorImpl.this.controlTransferHandler == null) { RTPConnectorImpl.this.controlTransferHandler = transferHandler; } } else { if (RTPConnectorImpl.this.dataTransferHandler == null) { RTPConnectorImpl.this.dataTransferHandler = transferHandler; } } } } /** * A transform engine implementation which allows <tt>RecorderRtpImpl</tt> to intercept RTP and * RTCP packets in. */ private class TransformEngineImpl implements TransformEngine { SinglePacketTransformer rtpTransformer = new SinglePacketTransformer() { @Override public RawPacket transform(RawPacket pkt) { return pkt; } @Override public RawPacket reverseTransform(RawPacket pkt) { RecorderRtpImpl.this.handleRtpPacket(pkt); return pkt; } @Override public void close() {} }; SinglePacketTransformer rtcpTransformer = new SinglePacketTransformer() { @Override public RawPacket transform(RawPacket pkt) { return pkt; } @Override public RawPacket reverseTransform(RawPacket pkt) { RecorderRtpImpl.this.handleRtcpPacket(pkt); if (pkt != null && pkt.getRTCPPayloadType() == 203) { // An RTCP BYE packet. Remove the receive stream before // it gets to FMJ, because we want to, for example, // flush the packet buffer before that. long ssrc = pkt.getRTCPSSRC() & 0xffffffffl; if (logger.isInfoEnabled()) logger.info("RTCP BYE for SSRC=" + ssrc); ReceiveStreamDesc receiveStream = findReceiveStream(ssrc); if (receiveStream != null) removeReceiveStream(receiveStream, false); } return pkt; } @Override public void close() {} }; @Override public PacketTransformer getRTPTransformer() { return rtpTransformer; } @Override public PacketTransformer getRTCPTransformer() { return rtcpTransformer; } } } private class RecorderEventHandlerImpl implements RecorderEventHandler { private RecorderEventHandler handler; private final Set<RecorderEvent> pendingEvents = new HashSet<RecorderEvent>(); private RecorderEventHandlerImpl(RecorderEventHandler handler) { this.handler = handler; } @Override public boolean handleEvent(RecorderEvent ev) { if (ev == null) return true; if (RecorderEvent.Type.RECORDING_STARTED.equals(ev.getType())) { long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp()); if (instant != -1) { ev.setInstant(instant); return handler.handleEvent(ev); } else { pendingEvents.add(ev); return true; } } return handler.handleEvent(ev); } private void nudge() { for (Iterator<RecorderEvent> iter = pendingEvents.iterator(); iter.hasNext(); ) { RecorderEvent ev = iter.next(); long instant = getSynchronizer().getLocalTime(ev.getSsrc(), ev.getRtpTimestamp()); if (instant != -1) { iter.remove(); ev.setInstant(instant); handler.handleEvent(ev); } } } @Override public void close() { for (RecorderEvent ev : pendingEvents) handler.handleEvent(ev); } } /** Represents a <tt>ReceiveStream</tt> for the purposes of this <tt>RecorderRtpImpl</tt>. */ private class ReceiveStreamDesc { /** * The actual <tt>ReceiveStream</tt> which is represented by this <tt>ReceiveStreamDesc</tt>. */ private ReceiveStream receiveStream; /** The SSRC of the stream. */ long ssrc; /** * The <tt>Processor</tt> used to transcode this receive stream into a format appropriate for * saving to a file. */ private Processor processor; /** The <tt>DataSink</tt> which saves the <tt>this.dataSource</tt> to a file. */ private DataSink dataSink; /** * The <tt>DataSource</tt> for this receive stream which is to be saved using a * <tt>DataSink</tt> (i.e. the <tt>DataSource</tt> "after" all needed transcoding is done). */ private DataSource dataSource; /** The name of the file into which this stream is being saved. */ private String filename; /** The (original) format of this receive stream. */ private Format format; /** The <tt>SilenceEffect</tt> used for this stream (for audio streams only). */ private SilenceEffect silenceEffect; private ReceiveStreamDesc(ReceiveStream receiveStream) { this.receiveStream = receiveStream; this.ssrc = getReceiveStreamSSRC(receiveStream); } } }
/** * Implements {@link ReceiveStreamListener#update(ReceiveStreamEvent)}. * * <p>{@link #rtpManager} will use this to notify us of <tt>ReceiveStreamEvent</tt>s. */ @Override public void update(ReceiveStreamEvent event) { if (event == null) return; ReceiveStream receiveStream = event.getReceiveStream(); if (event instanceof NewReceiveStreamEvent) { if (receiveStream == null) { logger.warn("NewReceiveStreamEvent: null"); return; } final long ssrc = getReceiveStreamSSRC(receiveStream); ReceiveStreamDesc receiveStreamDesc = findReceiveStream(ssrc); if (receiveStreamDesc != null) { String s = "NewReceiveStreamEvent for an existing SSRC. "; if (receiveStream != receiveStreamDesc.receiveStream) s += "(but different ReceiveStream object)"; logger.warn(s); return; } else receiveStreamDesc = new ReceiveStreamDesc(receiveStream); if (logger.isInfoEnabled()) logger.info("New ReceiveStream, ssrc=" + ssrc); // Find the format of the ReceiveStream DataSource dataSource = receiveStream.getDataSource(); if (dataSource instanceof PushBufferDataSource) { Format format = null; PushBufferDataSource pbds = (PushBufferDataSource) dataSource; for (PushBufferStream pbs : pbds.getStreams()) { if ((format = pbs.getFormat()) != null) break; } if (format == null) { logger.error("Failed to handle new ReceiveStream: " + "Failed to determine format"); return; } receiveStreamDesc.format = format; } else { logger.error("Failed to handle new ReceiveStream: " + "Unsupported DataSource"); return; } int rtpClockRate = -1; if (receiveStreamDesc.format instanceof AudioFormat) rtpClockRate = (int) ((AudioFormat) receiveStreamDesc.format).getSampleRate(); else if (receiveStreamDesc.format instanceof VideoFormat) rtpClockRate = 90000; getSynchronizer().setRtpClockRate(ssrc, rtpClockRate); // create a Processor and configure it Processor processor = null; try { processor = Manager.createProcessor(receiveStream.getDataSource()); } catch (NoProcessorException npe) { logger.error("Failed to create Processor: ", npe); return; } catch (IOException ioe) { logger.error("Failed to create Processor: ", ioe); return; } if (logger.isInfoEnabled()) logger.info("Created processor for SSRC=" + ssrc); processor.addControllerListener(this); receiveStreamDesc.processor = processor; final int streamCount; synchronized (receiveStreams) { receiveStreams.add(receiveStreamDesc); streamCount = receiveStreams.size(); } /* * XXX TODO IRBABOON * This is a terrible hack which works around a failure to realize() * some of the Processor-s for audio streams, when multiple streams * start nearly simultaneously. The cause of the problem is currently * unknown (and synchronizing all FMJ calls in RecorderRtpImpl * does not help). * XXX TODO NOOBABRI */ if (receiveStreamDesc.format instanceof AudioFormat) { final Processor p = processor; new Thread() { @Override public void run() { // delay configuring the processors for the different // audio streams to decrease the probability that they // run together. try { int ms = 450 * (streamCount - 1); logger.warn( "Sleeping for " + ms + "ms before" + " configuring processor for SSRC=" + ssrc + " " + System.currentTimeMillis()); Thread.sleep(ms); } catch (Exception e) { } p.configure(); } }.run(); } else { processor.configure(); } } else if (event instanceof TimeoutEvent) { if (receiveStream == null) { // TODO: we might want to get the list of ReceiveStream-s from // rtpManager and compare it to our list, to see if we should // remove a stream. logger.warn("TimeoutEvent: null."); return; } // FMJ silently creates new ReceiveStream instances, so we have to // recognize them by the SSRC. ReceiveStreamDesc receiveStreamDesc = findReceiveStream(getReceiveStreamSSRC(receiveStream)); if (receiveStreamDesc != null) { if (logger.isInfoEnabled()) { logger.info("ReceiveStream timeout, ssrc=" + receiveStreamDesc.ssrc); } removeReceiveStream(receiveStreamDesc, true); } } else if (event != null && logger.isInfoEnabled()) { logger.info("Unhandled ReceiveStreamEvent (" + event.getClass().getName() + "): " + event); } }
@Override public void onEnableStatusChange(AudioEffect effect, boolean enabled) { logger.info(effect.getDescriptor() + " : " + enabled); }
/** * Implements an audio <tt>CaptureDevice</tt> using {@link AudioRecord}. * * @author Lyubomir Marinov */ public class DataSource extends AbstractPullBufferCaptureDevice { /** * The <tt>Logger</tt> used by the <tt>DataSource</tt> class and its instances for logging output. */ private static final Logger logger = Logger.getLogger(DataSource.class); /** * The priority to be set to the thread executing the {@link AudioRecordStream#read(Buffer)} * method of a given <tt>AudioRecordStream</tt>. */ private static final int THREAD_PRIORITY = Process.THREAD_PRIORITY_URGENT_AUDIO; /** Initializes a new <tt>DataSource</tt> instance. */ public DataSource() {} /** * Initializes a new <tt>DataSource</tt> from a specific <tt>MediaLocator</tt>. * * @param locator the <tt>MediaLocator</tt> to create the new instance from */ public DataSource(MediaLocator locator) { super(locator); } /** * Creates a new <tt>PullBufferStream</tt> which is to be at a specific zero-based index in the * list of streams of this <tt>PullBufferDataSource</tt>. The <tt>Format</tt>-related information * of the new instance is to be abstracted by a specific <tt>FormatControl</tt>. * * @param streamIndex the zero-based index of the <tt>PullBufferStream</tt> in the list of streams * of this <tt>PullBufferDataSource</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance * @return a new <tt>PullBufferStream</tt> which is to be at the specified <tt>streamIndex</tt> in * the list of streams of this <tt>PullBufferDataSource</tt> and which has its * <tt>Format</tt>-related information abstracted by the specified <tt>formatControl</tt> * @see AbstractPullBufferCaptureDevice#createStream(int, FormatControl) */ protected AbstractPullBufferStream createStream(int streamIndex, FormatControl formatControl) { return new AudioRecordStream(this, formatControl); } /** * Opens a connection to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening the connection to the media source * specified by the <tt>MediaLocator</tt> of this <tt>DataSource</tt> * @see AbstractPullBufferCaptureDevice#doConnect() */ @Override protected void doConnect() throws IOException { super.doConnect(); /* * XXX The AudioRecordStream will connect upon start in order to be able * to respect requests to set its format. */ } /** * Closes the connection to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt>. * * @see AbstractPullBufferCaptureDevice#doDisconnect() */ @Override protected void doDisconnect() { synchronized (getStreamSyncRoot()) { Object[] streams = streams(); if (streams != null) for (Object stream : streams) ((AudioRecordStream) stream).disconnect(); } super.doDisconnect(); } /** Sets the priority of the calling thread to {@link #THREAD_PRIORITY}. */ public static void setThreadPriority() { setThreadPriority(THREAD_PRIORITY); } /** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); } /** * Attempts to set the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> of a * <tt>PullBufferStream</tt> at a specific zero-based index in the list of streams of this * <tt>PullBufferDataSource</tt>. The <tt>PullBufferStream</tt> does not exist at the time of the * attempt to set its <tt>Format</tt>. Override the default behavior which is to not attempt to * set the specified <tt>Format</tt> so that they can enable setting the <tt>Format</tt> prior to * creating the <tt>PullBufferStream</tt>. * * @param streamIndex the zero-based index of the <tt>PullBufferStream</tt> the <tt>Format</tt> of * which is to be set * @param oldValue the last-known <tt>Format</tt> for the <tt>PullBufferStream</tt> at the * specified <tt>streamIndex</tt> * @param newValue the <tt>Format</tt> which is to be set * @return the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> of the * <tt>PullBufferStream</tt> at the specified <tt>streamIndex</tt> in the list of streams of * this <tt>PullBufferStream</tt> or <tt>null</tt> if the attempt to set the <tt>Format</tt> * did not success and any last-known <tt>Format</tt> is to be left in effect * @see AbstractPullBufferCaptureDevice#setFormat(int, Format, Format) */ @Override protected Format setFormat(int streamIndex, Format oldValue, Format newValue) { /* * Accept format specifications prior to the initialization of * AudioRecordStream. Afterwards, AudioRecordStream will decide whether * to accept further format specifications. */ return newValue; } /** Implements an audio <tt>PullBufferStream</tt> using {@link AudioRecord}. */ private static class AudioRecordStream extends AbstractPullBufferStream<DataSource> implements AudioEffect.OnEnableStatusChangeListener { /** The <tt>android.media.AudioRecord</tt> which does the actual capturing of audio. */ private AudioRecord audioRecord; /** The <tt>GainControl</tt> through which the volume/gain of captured media is controlled. */ private final GainControl gainControl; /** * The length in bytes of the media data read into a <tt>Buffer</tt> via a call to {@link * #read(Buffer)}. */ private int length; /** * The indicator which determines whether this <tt>AudioRecordStream</tt> is to set the priority * of the thread in which its {@link #read(Buffer)} method is executed. */ private boolean setThreadPriority = true; /** * Initializes a new <tt>OpenSLESStream</tt> instance which is to have its * <tt>Format</tt>-related information abstracted by a specific <tt>FormatControl</tt>. * * @param dataSource the <tt>DataSource</tt> which is creating the new instance so that it * becomes one of its <tt>streams</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance */ public AudioRecordStream(DataSource dataSource, FormatControl formatControl) { super(dataSource, formatControl); MediaServiceImpl mediaServiceImpl = NeomediaActivator.getMediaServiceImpl(); gainControl = (mediaServiceImpl == null) ? null : (GainControl) mediaServiceImpl.getInputVolumeControl(); } /** * Opens a connection to the media source of the associated <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening a connection to the media source of * the associated <tt>DataSource</tt> */ public synchronized void connect() throws IOException { javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) getFormat(); int channels = af.getChannels(); int channelConfig; switch (channels) { case Format.NOT_SPECIFIED: case 1: channelConfig = AudioFormat.CHANNEL_IN_MONO; break; case 2: channelConfig = AudioFormat.CHANNEL_IN_STEREO; break; default: throw new IOException("channels"); } int sampleSizeInBits = af.getSampleSizeInBits(); int audioFormat; switch (sampleSizeInBits) { case 8: audioFormat = AudioFormat.ENCODING_PCM_8BIT; break; case 16: audioFormat = AudioFormat.ENCODING_PCM_16BIT; break; default: throw new IOException("sampleSizeInBits"); } double sampleRate = af.getSampleRate(); length = (int) Math.round( 20 /* milliseconds */ * (sampleRate / 1000) * channels * (sampleSizeInBits / 8)); /* * Apart from the thread in which #read(Buffer) is executed, use the * thread priority for the thread which will create the AudioRecord. */ setThreadPriority(); try { int minBufferSize = AudioRecord.getMinBufferSize((int) sampleRate, channelConfig, audioFormat); audioRecord = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, (int) sampleRate, channelConfig, audioFormat, Math.max(length, minBufferSize)); // tries to configure audio effects if available configureEffects(); } catch (IllegalArgumentException iae) { IOException ioe = new IOException(); ioe.initCause(iae); throw ioe; } setThreadPriority = true; } /** Configures echo cancellation and noise suppression effects. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void configureEffects() { if (!AndroidUtils.hasAPI(16)) return; AudioSystem audioSystem = AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD); // Creates echo canceler if available if (AcousticEchoCanceler.isAvailable()) { AcousticEchoCanceler echoCanceller = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); if (echoCanceller != null) { echoCanceller.setEnableStatusListener(this); echoCanceller.setEnabled(audioSystem.isEchoCancel()); logger.info("Echo cancellation: " + echoCanceller.getEnabled()); } } // Automatic gain control if (AutomaticGainControl.isAvailable()) { AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId()); if (agc != null) { agc.setEnableStatusListener(this); agc.setEnabled(audioSystem.isAutomaticGainControl()); logger.info("Auto gain control: " + agc.getEnabled()); } } // Creates noise suppressor if available if (NoiseSuppressor.isAvailable()) { NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); if (noiseSuppressor != null) { noiseSuppressor.setEnableStatusListener(this); noiseSuppressor.setEnabled(audioSystem.isDenoise()); logger.info("Noise suppressor: " + noiseSuppressor.getEnabled()); } } } /** Closes the connection to the media source of the associated <tt>DataSource</tt>. */ public synchronized void disconnect() { if (audioRecord != null) { audioRecord.release(); audioRecord = null; setThreadPriority = true; } } /** * Attempts to set the <tt>Format</tt> of this <tt>AbstractBufferStream</tt>. * * @param format the <tt>Format</tt> to be set as the format of this * <tt>AbstractBufferStream</tt> * @return the <tt>Format</tt> of this <tt>AbstractBufferStream</tt> or <tt>null</tt> if the * attempt to set the <tt>Format</tt> did not succeed and any last-known <tt>Format</tt> is * to be left in effect * @see AbstractPullBufferStream#doSetFormat(Format) */ @Override protected synchronized Format doSetFormat(Format format) { return (audioRecord == null) ? format : null; } /** * Reads media data from this <tt>PullBufferStream</tt> into a specific <tt>Buffer</tt> with * blocking. * * @param buffer the <tt>Buffer</tt> in which media data is to be read from this * <tt>PullBufferStream</tt> * @throws IOException if anything goes wrong while reading media data from this * <tt>PullBufferStream</tt> into the specified <tt>buffer</tt> * @see javax.media.protocol.PullBufferStream#read(javax.media.Buffer) */ public void read(Buffer buffer) throws IOException { if (setThreadPriority) { setThreadPriority = false; setThreadPriority(); } Object data = buffer.getData(); int length = this.length; if (data instanceof byte[]) { if (((byte[]) data).length < length) data = null; } else data = null; if (data == null) { data = new byte[length]; buffer.setData(data); } int toRead = length; byte[] bytes = (byte[]) data; int offset = 0; buffer.setLength(0); while (toRead > 0) { int read; synchronized (this) { if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) read = audioRecord.read(bytes, offset, toRead); else break; } if (read < 0) { throw new IOException( AudioRecord.class.getName() + "#read(byte[], int, int) returned " + read); } else { buffer.setLength(buffer.getLength() + read); offset += read; toRead -= read; } } buffer.setOffset(0); // Apply software gain. if (gainControl != null) { BasicVolumeControl.applyGain(gainControl, bytes, buffer.getOffset(), buffer.getLength()); } } /** * Starts the transfer of media data from this <tt>AbstractBufferStream</tt>. * * @throws IOException if anything goes wrong while starting the transfer of media data from * this <tt>AbstractBufferStream</tt> * @see AbstractBufferStream#start() */ @Override public void start() throws IOException { /* * Connect upon start because the connect has been delayed to allow * this AudioRecordStream to respect requests to set its format. */ synchronized (this) { if (audioRecord == null) connect(); } super.start(); synchronized (this) { if (audioRecord != null) { setThreadPriority = true; audioRecord.startRecording(); } } } /** * Stops the transfer of media data from this <tt>AbstractBufferStream</tt>. * * @throws IOException if anything goes wrong while stopping the transfer of media data from * this <tt>AbstractBufferStream</tt> * @see AbstractBufferStream#stop() */ @Override public void stop() throws IOException { synchronized (this) { if (audioRecord != null) { audioRecord.stop(); setThreadPriority = true; } } super.stop(); } @Override public void onEnableStatusChange(AudioEffect effect, boolean enabled) { logger.info(effect.getDescriptor() + " : " + enabled); } } }
/** * Implements <tt>BundleActivator</tt> for the neomedia bundle. * * @author Martin Andre * @author Emil Ivov * @author Lyubomir Marinov * @author Boris Grozev */ public class NeomediaActivator implements BundleActivator { /** * The <tt>Logger</tt> used by the <tt>NeomediaActivator</tt> class and its instances for logging * output. */ private final Logger logger = Logger.getLogger(NeomediaActivator.class); /** Indicates if the audio configuration form should be disabled, i.e. not visible to the user. */ private static final String AUDIO_CONFIG_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.AUDIO_CONFIG_DISABLED"; /** Indicates if the video configuration form should be disabled, i.e. not visible to the user. */ private static final String VIDEO_CONFIG_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.VIDEO_CONFIG_DISABLED"; /** Indicates if the H.264 configuration form should be disabled, i.e. not visible to the user. */ private static final String H264_CONFIG_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.h264config.DISABLED"; /** Indicates if the ZRTP configuration form should be disabled, i.e. not visible to the user. */ private static final String ZRTP_CONFIG_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.zrtpconfig.DISABLED"; /** * Indicates if the call recording config form should be disabled, i.e. not visible to the user. */ private static final String CALL_RECORDING_CONFIG_DISABLED_PROP = "net.java.sip.communicator.impl.neomedia.callrecordingconfig.DISABLED"; /** * The name of the notification pop-up event displayed when the device configration has changed. */ private static final String DEVICE_CONFIGURATION_HAS_CHANGED = "DeviceConfigurationChanged"; /** * The context in which the one and only <tt>NeomediaActivator</tt> instance has started * executing. */ private static BundleContext bundleContext; /** * The <tt>ConfigurationService</tt> registered in {@link #bundleContext} and used by the * <tt>NeomediaActivator</tt> instance to read and write configuration properties. */ private static ConfigurationService configurationService; /** * The <tt>FileAccessService</tt> registered in {@link #bundleContext} and used by the * <tt>NeomediaActivator</tt> instance to safely access files. */ private static FileAccessService fileAccessService; /** The notifcation service to pop-up messages. */ private static NotificationService notificationService; /** * The one and only <tt>MediaServiceImpl</tt> instance registered in {@link #bundleContext} by the * <tt>NeomediaActivator</tt> instance. */ private static MediaServiceImpl mediaServiceImpl; /** * The <tt>ResourceManagementService</tt> registered in {@link #bundleContext} and representing * the resources such as internationalized and localized text and images used by the neomedia * bundle. */ private static ResourceManagementService resources; /** * The OSGi <tt>PacketLoggingService</tt> of {@link #mediaServiceImpl} in {@link #bundleContext} * and used for debugging. */ private static PacketLoggingService packetLoggingService = null; /** A listener to the click on the popup message concerning device configuration changes. */ private AudioDeviceConfigurationListener deviceConfigurationPropertyChangeListener; /** A {@link MediaConfigurationService} instance. */ // private static MediaConfigurationImpl mediaConfiguration; /** The audio configuration form used to define the capture/notify/playback audio devices. */ private static ConfigurationForm audioConfigurationForm; /** * Starts the execution of the neomedia bundle in the specified context. * * @param bundleContext the context in which the neomedia bundle is to start executing * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the * specified context */ public void start(BundleContext bundleContext) throws Exception { if (logger.isDebugEnabled()) logger.debug("Started."); NeomediaActivator.bundleContext = bundleContext; // MediaService mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService(); bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null); if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]"); // mediaConfiguration = new MediaConfigurationImpl(); // bundleContext.registerService( // MediaConfigurationService.class.getStatus(), // getMediaConfiguration(), // null); if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]"); ConfigurationService cfg = NeomediaActivator.getConfigurationService(); Dictionary<String, String> mediaProps = new Hashtable<String, String>(); mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE); // If the audio configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false)) // { // audioConfigurationForm // = new LazyConfigurationForm( // AudioConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.AUDIO_ICON", // "impl.neomedia.configform.AUDIO", // 3); // // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // audioConfigurationForm, // mediaProps); // // if (deviceConfigurationPropertyChangeListener == null) // { // // Initializes and registers the changed device configuration // // event ot the notification service. // getNotificationService(); // // deviceConfigurationPropertyChangeListener // = new AudioDeviceConfigurationListener(); // mediaServiceImpl // .getDeviceConfiguration() // .addPropertyChangeListener( // deviceConfigurationPropertyChangeListener); // } // } // If the video configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false)) // { // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // VideoConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.VIDEO", // 4), // mediaProps); // } // H.264 // If the H.264 configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> h264Props // = new Hashtable<String, String>(); // // h264Props.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // ConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.H264", // -1, // true), // h264Props); // } // ZRTP // If the ZRTP configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> securityProps // = new Hashtable<String, String>(); // // securityProps.put( ConfigurationForm.FORM_TYPE, // ConfigurationForm.SECURITY_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // SecurityConfigForm.class.getStatus(), // getClass().getClassLoader(), // "impl.media.security.zrtp.CONF_ICON", // "impl.media.security.zrtp.TITLE", // 0), // securityProps); // } // we use the nist-sdp stack to make parse sdp and we need to set the // following property to make sure that it would accept java generated // IPv6 addresses that contain address scope zones. System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true"); // AudioNotifierService AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService(); audioNotifierService.setMute( (cfg == null) || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true)); bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null); if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]"); // Call Recording // If the call recording configuration form is disabled don't continue. // if ((cfg == null) // || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> callRecordingProps // = new Hashtable<String, String>(); // // callRecordingProps.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // CallRecordingConfigForm.class.getStatus(), // getClass().getClassLoader(), // null, // "plugin.callrecordingconfig.CALL_RECORDING_CONFIG", // 1100, // true), // callRecordingProps); // } } /** * Stops the execution of the neomedia bundle in the specified context. * * @param bundleContext the context in which the neomedia bundle is to stop executing * @throws Exception if an error occurs while stopping the execution of the neomedia bundle in the * specified context */ public void stop(BundleContext bundleContext) throws Exception { try { if (deviceConfigurationPropertyChangeListener != null) { mediaServiceImpl .getDeviceConfiguration() .removePropertyChangeListener(deviceConfigurationPropertyChangeListener); if (deviceConfigurationPropertyChangeListener != null) { deviceConfigurationPropertyChangeListener.managePopupMessageListenerRegistration(false); deviceConfigurationPropertyChangeListener = null; } } } finally { configurationService = null; fileAccessService = null; mediaServiceImpl = null; resources = null; } } /** * Returns a reference to a ConfigurationService implementation currently registered in the bundle * context or null if no such implementation was found. * * @return a currently valid implementation of the ConfigurationService. */ public static ConfigurationService getConfigurationService() { if (configurationService == null) { configurationService = ServiceUtils.getService(bundleContext, ConfigurationService.class); } return configurationService; } /** * Returns a reference to a FileAccessService implementation currently registered in the bundle * context or null if no such implementation was found. * * @return a currently valid implementation of the FileAccessService . */ public static FileAccessService getFileAccessService() { if (fileAccessService == null) { fileAccessService = ServiceUtils.getService(bundleContext, FileAccessService.class); } return fileAccessService; } /** * Gets the <tt>MediaService</tt> implementation instance registered by the neomedia bundle. * * @return the <tt>MediaService</tt> implementation instance registered by the neomedia bundle */ public static MediaServiceImpl getMediaServiceImpl() { return mediaServiceImpl; } // public static MediaConfigurationService getMediaConfiguration() // { // return mediaConfiguration; // } /** * Gets the <tt>ResourceManagementService</tt> instance which represents the resources such as * internationalized and localized text and images used by the neomedia bundle. * * @return the <tt>ResourceManagementService</tt> instance which represents the resources such as * internationalized and localized text and images used by the neomedia bundle */ public static ResourceManagementService getResources() { if (resources == null) { resources = ResourceManagementServiceUtils.getService(bundleContext); } return resources; } /** * Returns a reference to the <tt>PacketLoggingService</tt> implementation currently registered in * the bundle context or null if no such implementation was found. * * @return a reference to a <tt>PacketLoggingService</tt> implementation currently registered in * the bundle context or null if no such implementation was found. */ public static PacketLoggingService getPacketLogging() { if (packetLoggingService == null) { packetLoggingService = ServiceUtils.getService(bundleContext, PacketLoggingService.class); } return packetLoggingService; } /** * Returns the <tt>NotificationService</tt> obtained from the bundle context. * * @return The <tt>NotificationService</tt> obtained from the bundle context. */ public static NotificationService getNotificationService() { if (notificationService == null) { // Get the notification service implementation ServiceReference notifReference = bundleContext.getServiceReference(NotificationService.class.getName()); notificationService = (NotificationService) bundleContext.getService(notifReference); if (notificationService != null) { // Register a popup message for a device configuration changed // notification. notificationService.registerDefaultNotificationForEvent( DEVICE_CONFIGURATION_HAS_CHANGED, net.java.sip.communicator.service.notification.NotificationAction.ACTION_POPUP_MESSAGE, "Device onfiguration has changed", null); } } return notificationService; } /** A listener to the click on the popup message concerning device configuration changes. */ private class AudioDeviceConfigurationListener implements PropertyChangeListener /*, SystrayPopupMessageListener*/ { /** * A boolean used to verify that this listener registers only once to the popup message * notification handler. */ private boolean isRegisteredToPopupMessageListener = false; /** * Registers or unregister as a popup message listener to detect when a user click on * notification saying that the device configuration has changed. * * @param enable True to register to the popup message notifcation handler. False to unregister. */ public void managePopupMessageListenerRegistration(boolean enable) { Iterator<NotificationHandler> notificationHandlers = notificationService .getActionHandlers( net.java.sip.communicator.service.notification.NotificationAction .ACTION_POPUP_MESSAGE) .iterator(); NotificationHandler notificationHandler; while (notificationHandlers.hasNext()) { notificationHandler = notificationHandlers.next(); if (notificationHandler instanceof PopupMessageNotificationHandler) { // Register. if (enable) { // ((PopupMessageNotificationHandler) notificationHandler) // .addPopupMessageListener(this); } // Unregister. else { // ((PopupMessageNotificationHandler) notificationHandler) // .removePopupMessageListener(this); } } } } /** * Function called when an audio device is plugged or unplugged. * * @param event The property change event which may concern the audio device. */ public void propertyChange(PropertyChangeEvent event) { if (DeviceConfiguration.PROP_AUDIO_SYSTEM_DEVICES.equals(event.getPropertyName())) { NotificationService notificationService = getNotificationService(); if (notificationService != null) { // Registers only once to the popup message notification // handler. if (!isRegisteredToPopupMessageListener) { isRegisteredToPopupMessageListener = true; managePopupMessageListenerRegistration(true); } // Fires the popup notification. ResourceManagementService resources = NeomediaActivator.getResources(); Map<String, Object> extras = new HashMap<String, Object>(); extras.put(NotificationData.POPUP_MESSAGE_HANDLER_TAG_EXTRA, this); notificationService.fireNotification( DEVICE_CONFIGURATION_HAS_CHANGED, resources.getI18NString("impl.media.configform" + ".AUDIO_DEVICE_CONFIG_CHANGED"), resources.getI18NString( "impl.media.configform" + ".AUDIO_DEVICE_CONFIG_MANAGMENT_CLICK"), null, extras); } } } /** * Indicates that user has clicked on the systray popup message. * * @param evt the event triggered when user clicks on the systray popup message */ // public void popupMessageClicked(SystrayPopupMessageEvent evt) // { // // Checks if this event is fired from one click on one of our popup // // message. // if(evt.getTag() == deviceConfigurationPropertyChangeListener) // { // // Get the UI service // ServiceReference uiReference = bundleContext // .getServiceReference(UIService.class.getStatus()); // // UIService uiService = (UIService) bundleContext // .getService(uiReference); // // if(uiService != null) // { // // Shows the audio configuration window. // ConfigurationContainer configurationContainer // = uiService.getConfigurationContainer(); // configurationContainer.setSelected(audioConfigurationForm); // configurationContainer.setVisible(true); // } // } // } } public static BundleContext getBundleContext() { return bundleContext; } }
/** * Starts the execution of the neomedia bundle in the specified context. * * @param bundleContext the context in which the neomedia bundle is to start executing * @throws Exception if an error occurs while starting the execution of the neomedia bundle in the * specified context */ public void start(BundleContext bundleContext) throws Exception { if (logger.isDebugEnabled()) logger.debug("Started."); NeomediaActivator.bundleContext = bundleContext; // MediaService mediaServiceImpl = (MediaServiceImpl) LibJitsi.getMediaService(); bundleContext.registerService(MediaService.class.getName(), mediaServiceImpl, null); if (logger.isDebugEnabled()) logger.debug("Media Service ... [REGISTERED]"); // mediaConfiguration = new MediaConfigurationImpl(); // bundleContext.registerService( // MediaConfigurationService.class.getStatus(), // getMediaConfiguration(), // null); if (logger.isDebugEnabled()) logger.debug("Media Configuration ... [REGISTERED]"); ConfigurationService cfg = NeomediaActivator.getConfigurationService(); Dictionary<String, String> mediaProps = new Hashtable<String, String>(); mediaProps.put(ConfigurationForm.FORM_TYPE, ConfigurationForm.GENERAL_TYPE); // If the audio configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(AUDIO_CONFIG_DISABLED_PROP, false)) // { // audioConfigurationForm // = new LazyConfigurationForm( // AudioConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.AUDIO_ICON", // "impl.neomedia.configform.AUDIO", // 3); // // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // audioConfigurationForm, // mediaProps); // // if (deviceConfigurationPropertyChangeListener == null) // { // // Initializes and registers the changed device configuration // // event ot the notification service. // getNotificationService(); // // deviceConfigurationPropertyChangeListener // = new AudioDeviceConfigurationListener(); // mediaServiceImpl // .getDeviceConfiguration() // .addPropertyChangeListener( // deviceConfigurationPropertyChangeListener); // } // } // If the video configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(VIDEO_CONFIG_DISABLED_PROP, false)) // { // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // VideoConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.VIDEO", // 4), // mediaProps); // } // H.264 // If the H.264 configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(H264_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> h264Props // = new Hashtable<String, String>(); // // h264Props.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // ConfigurationPanel.class.getStatus(), // getClass().getClassLoader(), // "plugin.mediaconfig.VIDEO_ICON", // "impl.neomedia.configform.H264", // -1, // true), // h264Props); // } // ZRTP // If the ZRTP configuration form is disabled don't register it. // if ((cfg == null) || !cfg.getBoolean(ZRTP_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> securityProps // = new Hashtable<String, String>(); // // securityProps.put( ConfigurationForm.FORM_TYPE, // ConfigurationForm.SECURITY_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // SecurityConfigForm.class.getStatus(), // getClass().getClassLoader(), // "impl.media.security.zrtp.CONF_ICON", // "impl.media.security.zrtp.TITLE", // 0), // securityProps); // } // we use the nist-sdp stack to make parse sdp and we need to set the // following property to make sure that it would accept java generated // IPv6 addresses that contain address scope zones. System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true"); // AudioNotifierService AudioNotifierService audioNotifierService = LibJitsi.getAudioNotifierService(); audioNotifierService.setMute( (cfg == null) || !cfg.getBoolean("net.java.sip.communicator" + ".impl.sound.isSoundEnabled", true)); bundleContext.registerService(AudioNotifierService.class.getName(), audioNotifierService, null); if (logger.isInfoEnabled()) logger.info("Audio Notifier Service ...[REGISTERED]"); // Call Recording // If the call recording configuration form is disabled don't continue. // if ((cfg == null) // || !cfg.getBoolean(CALL_RECORDING_CONFIG_DISABLED_PROP, false)) // { // Dictionary<String, String> callRecordingProps // = new Hashtable<String, String>(); // // callRecordingProps.put( // ConfigurationForm.FORM_TYPE, // ConfigurationForm.ADVANCED_TYPE); // bundleContext.registerService( // ConfigurationForm.class.getStatus(), // new LazyConfigurationForm( // CallRecordingConfigForm.class.getStatus(), // getClass().getClassLoader(), // null, // "plugin.callrecordingconfig.CALL_RECORDING_CONFIG", // 1100, // true), // callRecordingProps); // } }