/** * Method fired when "add" button is clicked. * * @param v add button's <tt>View</tt> */ public void onAddClicked(View v) { Spinner accountsSpiner = (Spinner) findViewById(R.id.selectAccountSpinner); Account selectedAcc = (Account) accountsSpiner.getSelectedItem(); if (selectedAcc == null) { logger.error("No account selected"); return; } ProtocolProviderService pps = selectedAcc.getProtocolProvider(); if (pps == null) { logger.error("No provider registered for account " + selectedAcc.getAccountName()); return; } View content = findViewById(android.R.id.content); String contactAddress = ViewUtil.getTextViewValue(content, R.id.editContactName); String displayName = ViewUtil.getTextViewValue(content, R.id.editDisplayName); if (displayName != null && displayName.length() > 0) { addRenameListener(pps, null, contactAddress, displayName); } Spinner groupSpinner = (Spinner) findViewById(R.id.selectGroupSpinner); ContactListUtils.addContact( pps, (MetaContactGroup) groupSpinner.getSelectedItem(), contactAddress); finish(); }
/** Creates instances of CameraTextureRender and SurfaceTexture. */ public SurfaceTextureManager() { textureRender = new CameraTextureRender(); textureRender.surfaceCreated(); logger.debug("textureID=" + textureRender.getTextureId()); surfaceTexture = new SurfaceTexture(textureRender.getTextureId()); surfaceTexture.setOnFrameAvailableListener(this); }
@Override public void onFrameAvailable(SurfaceTexture st) { logger.trace("new frame available"); synchronized (frameSyncObject) { if (frameAvailable) { throw new RuntimeException("frameAvailable already set, frame could be dropped"); } frameAvailable = true; frameSyncObject.notifyAll(); } }
/** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); }
/** Configures echo cancellation and noise suppression effects. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void configureEffects() { if (!AndroidUtils.hasAPI(16)) return; AudioSystem audioSystem = AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD); // Creates echo canceler if available if (AcousticEchoCanceler.isAvailable()) { AcousticEchoCanceler echoCanceller = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); if (echoCanceller != null) { echoCanceller.setEnableStatusListener(this); echoCanceller.setEnabled(audioSystem.isEchoCancel()); logger.info("Echo cancellation: " + echoCanceller.getEnabled()); } } // Automatic gain control if (AutomaticGainControl.isAvailable()) { AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId()); if (agc != null) { agc.setEnableStatusListener(this); agc.setEnabled(audioSystem.isAutomaticGainControl()); logger.info("Auto gain control: " + agc.getEnabled()); } } // Creates noise suppressor if available if (NoiseSuppressor.isAvailable()) { NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); if (noiseSuppressor != null) { noiseSuppressor.setEnableStatusListener(this); noiseSuppressor.setEnabled(audioSystem.isDenoise()); logger.info("Noise suppressor: " + noiseSuppressor.getEnabled()); } } }
public static CodecInfo getCodecInfo(MediaCodecInfo codecInfo) { String[] types = codecInfo.getSupportedTypes(); for (String type : types) { try { if (type.equals(MEDIA_CODEC_TYPE_H263)) return new H263CodecInfo(codecInfo); else if (type.equals(MEDIA_CODEC_TYPE_H264)) return new H264CodecInfo(codecInfo); else if (type.equals(MEDIA_CODEC_TYPE_VP8)) return new VP8CodecInfo(codecInfo); } catch (IllegalArgumentException e) { logger.error( "Error initializing codec info: " + codecInfo.getName() + ", type: " + type, e); } } return null; }
static { bannedYuvCodecs = new ArrayList<String>(); // Banned H264 encoders/decoders // Crashes bannedYuvCodecs.add("OMX.SEC.avc.enc"); bannedYuvCodecs.add("OMX.SEC.h263.enc"); // Don't support 3.1 profile used by Jitsi bannedYuvCodecs.add("OMX.Nvidia.h264.decode"); // bannedYuvCodecs.add("OMX.SEC.avc.dec"); // Banned VP8 encoders/decoders bannedYuvCodecs.add("OMX.SEC.vp8.dec"); // This one works only for res 176x144 bannedYuvCodecs.add("OMX.google.vpx.encoder"); for (int codecIndex = 0, codecCount = MediaCodecList.getCodecCount(); codecIndex < codecCount; codecIndex++) { MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(codecIndex); logger.info( "Discovered codec: " + codecInfo.getName() + "/" + Arrays.toString(codecInfo.getSupportedTypes())); CodecInfo ci = CodecInfo.getCodecInfo(codecInfo); if (ci != null) { codecs.add(ci); ci.setBanned(bannedYuvCodecs.contains(ci.getName())); } } logger.info("Selected H264 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, true)); logger.info("Selected H264 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, false)); logger.info("Selected H263 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, true)); logger.info("Selected H263 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, false)); logger.info("Selected VP8 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, true)); logger.info("Selected VP8 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, false)); }
/** * This activity allows user to add new contacts. * * @author Pawel Domas */ public class AddContactActivity extends OSGiActivity { /** The logger. */ private static final Logger logger = Logger.getLogger(AddContactActivity.class); /** {@inheritDoc} */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.add_contact); setTitle(R.string.service_gui_ADD_CONTACT); initAccountSpinner(); initContactGroupSpinner(); } /** Initializes "select account" spinner with existing accounts. */ private void initAccountSpinner() { Spinner accountsSpiner = (Spinner) findViewById(R.id.selectAccountSpinner); Iterator<ProtocolProviderService> providers = AccountUtils.getRegisteredProviders().iterator(); List<AccountID> accounts = new ArrayList<AccountID>(); int selectedIdx = -1; int idx = 0; while (providers.hasNext()) { ProtocolProviderService provider = providers.next(); OperationSet opSet = provider.getOperationSet(OperationSetPresence.class); if (opSet == null) continue; AccountID account = provider.getAccountID(); accounts.add(account); idx++; if (account.isPreferredProvider()) { selectedIdx = idx; } } AccountsListAdapter accountsAdapter = new AccountsListAdapter( this, R.layout.select_account_row, R.layout.select_account_dropdown, accounts, true); accountsSpiner.setAdapter(accountsAdapter); // if we have only select account option and only one account // select the available account if (accounts.size() == 1) accountsSpiner.setSelection(0); else accountsSpiner.setSelection(selectedIdx); } /** Initializes select contact group spinner with contact groups. */ private void initContactGroupSpinner() { Spinner groupSpinner = (Spinner) findViewById(R.id.selectGroupSpinner); MetaContactGroupAdapter contactGroupAdapter = new MetaContactGroupAdapter(this, R.id.selectGroupSpinner, true, true); contactGroupAdapter.setItemLayout(R.layout.simple_spinner_item); contactGroupAdapter.setDropDownLayout(R.layout.dropdown_spinner_item); groupSpinner.setAdapter(contactGroupAdapter); } /** * Method fired when "add" button is clicked. * * @param v add button's <tt>View</tt> */ public void onAddClicked(View v) { Spinner accountsSpiner = (Spinner) findViewById(R.id.selectAccountSpinner); Account selectedAcc = (Account) accountsSpiner.getSelectedItem(); if (selectedAcc == null) { logger.error("No account selected"); return; } ProtocolProviderService pps = selectedAcc.getProtocolProvider(); if (pps == null) { logger.error("No provider registered for account " + selectedAcc.getAccountName()); return; } View content = findViewById(android.R.id.content); String contactAddress = ViewUtil.getTextViewValue(content, R.id.editContactName); String displayName = ViewUtil.getTextViewValue(content, R.id.editDisplayName); if (displayName != null && displayName.length() > 0) { addRenameListener(pps, null, contactAddress, displayName); } Spinner groupSpinner = (Spinner) findViewById(R.id.selectGroupSpinner); ContactListUtils.addContact( pps, (MetaContactGroup) groupSpinner.getSelectedItem(), contactAddress); finish(); } /** * Adds a rename listener. * * @param protocolProvider the protocol provider to which the contact was added * @param metaContact the <tt>MetaContact</tt> if the new contact was added to an existing meta * contact * @param contactAddress the address of the newly added contact * @param displayName the new display name */ private void addRenameListener( final ProtocolProviderService protocolProvider, final MetaContact metaContact, final String contactAddress, final String displayName) { AndroidGUIActivator.getContactListService() .addMetaContactListListener( new MetaContactListAdapter() { @Override public void metaContactAdded(MetaContactEvent evt) { if (evt.getSourceMetaContact().getContact(contactAddress, protocolProvider) != null) { renameContact(evt.getSourceMetaContact(), displayName); } } @Override public void protoContactAdded(ProtoContactEvent evt) { if (metaContact != null && evt.getNewParent().equals(metaContact)) { renameContact(metaContact, displayName); } } }); } /** * Renames the given meta contact. * * @param metaContact the <tt>MetaContact</tt> to rename * @param displayName the new display name */ private void renameContact(final MetaContact metaContact, final String displayName) { new Thread() { @Override public void run() { AndroidGUIActivator.getContactListService().renameMetaContact(metaContact, displayName); } }.start(); } }
/** * Class used to manage codecs information for <tt>MediaCodec</tt>. * * @author Pawel Domas */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) public abstract class CodecInfo { /** The logger */ private static final Logger logger = Logger.getLogger(CodecInfo.class); /** * The mime type of H.264-encoded media data as defined by Android's <tt>MediaCodec</tt> class. */ public static final String MEDIA_CODEC_TYPE_H264 = "video/avc"; /** The mime type of VP8-encoded media data as defined by Android's <tt>MediaCodec</tt> class. */ public static final String MEDIA_CODEC_TYPE_VP8 = "video/x-vnd.on2.vp8"; /** * The mime type of H.263-encoded media data as defined by Android's <tt>MediaCodec</tt> class. */ public static final String MEDIA_CODEC_TYPE_H263 = "video/3gpp"; /** List of crashing codecs */ private static final List<String> bannedYuvCodecs; /** List of all codecs discovered in the system. */ private static final List<CodecInfo> codecs = new ArrayList<CodecInfo>(); static { bannedYuvCodecs = new ArrayList<String>(); // Banned H264 encoders/decoders // Crashes bannedYuvCodecs.add("OMX.SEC.avc.enc"); bannedYuvCodecs.add("OMX.SEC.h263.enc"); // Don't support 3.1 profile used by Jitsi bannedYuvCodecs.add("OMX.Nvidia.h264.decode"); // bannedYuvCodecs.add("OMX.SEC.avc.dec"); // Banned VP8 encoders/decoders bannedYuvCodecs.add("OMX.SEC.vp8.dec"); // This one works only for res 176x144 bannedYuvCodecs.add("OMX.google.vpx.encoder"); for (int codecIndex = 0, codecCount = MediaCodecList.getCodecCount(); codecIndex < codecCount; codecIndex++) { MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(codecIndex); logger.info( "Discovered codec: " + codecInfo.getName() + "/" + Arrays.toString(codecInfo.getSupportedTypes())); CodecInfo ci = CodecInfo.getCodecInfo(codecInfo); if (ci != null) { codecs.add(ci); ci.setBanned(bannedYuvCodecs.contains(ci.getName())); } } logger.info("Selected H264 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, true)); logger.info("Selected H264 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H264, false)); logger.info("Selected H263 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, true)); logger.info("Selected H263 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_H263, false)); logger.info("Selected VP8 encoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, true)); logger.info("Selected VP8 decoder: " + getCodecForType(MEDIA_CODEC_TYPE_VP8, false)); } /** <tt>MediaCodecInfo</tt> encapsulated by this instance. */ protected final MediaCodecInfo codecInfo; /** <tt>MediaCodecInfo.CodecCapabilities</tt> encapsulated by this instance. */ protected final MediaCodecInfo.CodecCapabilities caps; /** List of color formats supported by subject <tt>MediaCodec</tt>. */ protected final ArrayList<CodecColorFormat> colors; /** Media type of this <tt>CodecInfo</tt>. */ private final String mediaType; /** Profile levels supported by subject <tt>MediaCodec</tt>. */ private ProfileLevel[] profileLevels; /** * Flag indicates that this codec is known to cause some troubles and is disabled(will be ignored * during codec select phase). */ private boolean banned; /** * Creates new instance of <tt>CodecInfo</tt> that will encapsulate given <tt>codecInfo</tt>. * * @param codecInfo the codec info object to encapsulate. * @param mediaType media type of the codec */ public CodecInfo(MediaCodecInfo codecInfo, String mediaType) { this.codecInfo = codecInfo; this.mediaType = mediaType; this.caps = codecInfo.getCapabilitiesForType(mediaType); this.colors = new ArrayList<CodecColorFormat>(); int[] colorFormats = caps.colorFormats; for (int colorFormat : colorFormats) { colors.add(CodecColorFormat.fromInt(colorFormat)); } } /** * Returns codec name that can be used to obtain <tt>MediaCodec</tt>. * * @return codec name that can be used to obtain <tt>MediaCodec</tt>. */ public String getName() { return codecInfo.getName(); } /** * Finds the codec for given <tt>mimeType</tt>. * * @param mimeType mime type of the codec. * @param isEncoder <tt>true</tt> if encoder should be returned or <tt>false</tt> for decoder. * @return the codec for given <tt>mimeType</tt>. */ public static CodecInfo getCodecForType(String mimeType, boolean isEncoder) { for (CodecInfo codec : codecs) { if (!codec.isBanned() && codec.mediaType.equals(mimeType) && codec.codecInfo.isEncoder() == isEncoder) { return codec; } } return null; } /** * Returns the list of detected codecs. * * @return the list of detected codecs. */ public static List<CodecInfo> getSupportedCodecs() { return Collections.unmodifiableList(codecs); } /** * Returns the list of profiles supported. * * @return the list of profiles supported. */ protected abstract Profile[] getProfileSet(); /** * Returns the list supported levels. * * @return the list supported levels. */ protected abstract Level[] getLevelSet(); private Profile getProfile(int profileInt) { for (Profile p : getProfileSet()) { if (p.value == profileInt) return p; } return new Profile("Unknown", profileInt); } private Level getLevel(int levelInt) { for (Level l : getLevelSet()) { if (l.value == levelInt) return l; } return new Level("Unknown", levelInt); } public ProfileLevel[] getProfileLevels() { if (profileLevels == null) { MediaCodecInfo.CodecProfileLevel[] plArray = caps.profileLevels; profileLevels = new ProfileLevel[plArray.length]; for (int i = 0; i < profileLevels.length; i++) { Profile p = getProfile(plArray[i].profile); Level l = getLevel(plArray[i].level); profileLevels[i] = new ProfileLevel(p, l); } } return profileLevels; } @Override public String toString() { StringBuilder colorStr = new StringBuilder("\ncolors:\n"); for (int i = 0; i < colors.size(); i++) { colorStr.append(colors.get(i)); if (i != colors.size() - 1) colorStr.append(", \n"); } StringBuilder plStr = new StringBuilder("\nprofiles:\n"); ProfileLevel[] profiles = getProfileLevels(); for (int i = 0; i < profiles.length; i++) { plStr.append(profiles[i].toString()); if (i != profiles.length - 1) plStr.append(", \n"); } return codecInfo.getName() + "(" + getLibjitsiEncoding() + ")" + colorStr + plStr; } public static CodecInfo getCodecInfo(MediaCodecInfo codecInfo) { String[] types = codecInfo.getSupportedTypes(); for (String type : types) { try { if (type.equals(MEDIA_CODEC_TYPE_H263)) return new H263CodecInfo(codecInfo); else if (type.equals(MEDIA_CODEC_TYPE_H264)) return new H264CodecInfo(codecInfo); else if (type.equals(MEDIA_CODEC_TYPE_VP8)) return new VP8CodecInfo(codecInfo); } catch (IllegalArgumentException e) { logger.error( "Error initializing codec info: " + codecInfo.getName() + ", type: " + type, e); } } return null; } public void setBanned(boolean banned) { this.banned = banned; } public boolean isBanned() { return banned; } public boolean isEncoder() { return codecInfo.isEncoder(); } public boolean isNominated() { return getCodecForType(mediaType, isEncoder()) == this; } public String getLibjitsiEncoding() { if (mediaType.equals(MEDIA_CODEC_TYPE_H263)) { return Constants.H263P; } else if (mediaType.equals(MEDIA_CODEC_TYPE_H264)) { return Constants.H264; } else if (mediaType.equals(MEDIA_CODEC_TYPE_VP8)) { return Constants.VP8; } else { return mediaType; } } public static class ProfileLevel { private final Profile profile; private final Level level; public ProfileLevel(Profile p, Level l) { this.profile = p; this.level = l; } @Override public String toString() { return "P: " + profile.toString() + " L: " + level.toString(); } } public static class Profile { private final int value; private final String name; public Profile(String name, int value) { this.value = value; this.name = name; } @Override public String toString() { return name + "(0x" + Integer.toString(value, 16) + ")"; } } public static class Level { private final int value; private final String name; public Level(String name, int value) { this.value = value; this.name = name; } @Override public String toString() { return name + "(0x" + Integer.toString(value, 16) + ")"; } } static class H264CodecInfo extends CodecInfo { private final CodecInfo.Profile[] PROFILES = new CodecInfo.Profile[] { // from OMX_VIDEO_AVCPROFILETYPE new Profile("ProfileBaseline", 0x01), new Profile("ProfileMain", 0x02), new Profile("ProfileExtended", 0x04), new Profile("ProfileHigh", 0x08), new Profile("ProfileHigh10", 0x10), new Profile("ProfileHigh422", 0x20), new Profile("ProfileHigh444", 0x40) }; private final CodecInfo.Level[] LEVELS = new CodecInfo.Level[] { // from OMX_VIDEO_AVCLEVELTYPE new Level("Level1", 0x01), new Level("Level1b", 0x02), new Level("Level11", 0x04), new Level("Level12", 0x08), new Level("Level13", 0x10), new Level("Level2", 0x20), new Level("Level21", 0x40), new Level("Level22", 0x80), new Level("Level3", 0x100), new Level("Level31", 0x200), new Level("Level32", 0x400), new Level("Level4", 0x800), new Level("Level41", 0x1000), new Level("Level42", 0x2000), new Level("Level5", 0x4000), new Level("Level51", 0x8000) }; public H264CodecInfo(MediaCodecInfo codecInfo) { super(codecInfo, MEDIA_CODEC_TYPE_H264); } @Override protected Profile[] getProfileSet() { return PROFILES; } @Override protected Level[] getLevelSet() { return LEVELS; } } static class H263CodecInfo extends CodecInfo { private final CodecInfo.Profile[] PROFILES = new CodecInfo.Profile[] { // from OMX_VIDEO_H263PROFILETYPE new Profile("Baseline", 0x01), new Profile("H320Coding", 0x02), new Profile("BackwardCompatible", 0x04), new Profile("ISWV2", 0x08), new Profile("ISWV3", 0x10), new Profile("HighCompression", 0x20), new Profile("Internet", 0x40), new Profile("Interlace", 0x80), new Profile("HighLatency", 0x100) }; private final CodecInfo.Level[] LEVELS = new CodecInfo.Level[] { // from OMX_VIDEO_H263LEVELTYPE new Level("Level10", 0x01), new Level("Level20", 0x02), new Level("Level30", 0x04), new Level("Level40", 0x08), new Level("Level45", 0x10), new Level("Level50", 0x20), new Level("Level60", 0x40), new Level("Level70", 0x80) }; public H263CodecInfo(MediaCodecInfo codecInfo) { super(codecInfo, MEDIA_CODEC_TYPE_H263); } @Override protected Profile[] getProfileSet() { return PROFILES; } @Override protected Level[] getLevelSet() { return LEVELS; } } static class VP8CodecInfo extends CodecInfo { private final Profile[] PROFILES = new Profile[] { // from OMX_VIDEO_VP8PROFILETYPE new Profile("ProfileMain", 0x01) }; private final Level[] LEVELS = new Level[] { // from OMX_VIDEO_VP8LEVELTYPE new Level("Version0", 0x01), new Level("Version1", 0x02), new Level("Version2", 0x04), new Level("Version3", 0x08) }; public VP8CodecInfo(MediaCodecInfo codecInfo) { super(codecInfo, MEDIA_CODEC_TYPE_VP8); } @Override protected Profile[] getProfileSet() { return PROFILES; } @Override protected Level[] getLevelSet() { return LEVELS; } } }
@Override public void onEnableStatusChange(AudioEffect effect, boolean enabled) { logger.info(effect.getDescriptor() + " : " + enabled); }
/** * Implements an audio <tt>CaptureDevice</tt> using {@link AudioRecord}. * * @author Lyubomir Marinov */ public class DataSource extends AbstractPullBufferCaptureDevice { /** * The <tt>Logger</tt> used by the <tt>DataSource</tt> class and its instances for logging output. */ private static final Logger logger = Logger.getLogger(DataSource.class); /** * The priority to be set to the thread executing the {@link AudioRecordStream#read(Buffer)} * method of a given <tt>AudioRecordStream</tt>. */ private static final int THREAD_PRIORITY = Process.THREAD_PRIORITY_URGENT_AUDIO; /** Initializes a new <tt>DataSource</tt> instance. */ public DataSource() {} /** * Initializes a new <tt>DataSource</tt> from a specific <tt>MediaLocator</tt>. * * @param locator the <tt>MediaLocator</tt> to create the new instance from */ public DataSource(MediaLocator locator) { super(locator); } /** * Creates a new <tt>PullBufferStream</tt> which is to be at a specific zero-based index in the * list of streams of this <tt>PullBufferDataSource</tt>. The <tt>Format</tt>-related information * of the new instance is to be abstracted by a specific <tt>FormatControl</tt>. * * @param streamIndex the zero-based index of the <tt>PullBufferStream</tt> in the list of streams * of this <tt>PullBufferDataSource</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance * @return a new <tt>PullBufferStream</tt> which is to be at the specified <tt>streamIndex</tt> in * the list of streams of this <tt>PullBufferDataSource</tt> and which has its * <tt>Format</tt>-related information abstracted by the specified <tt>formatControl</tt> * @see AbstractPullBufferCaptureDevice#createStream(int, FormatControl) */ protected AbstractPullBufferStream createStream(int streamIndex, FormatControl formatControl) { return new AudioRecordStream(this, formatControl); } /** * Opens a connection to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening the connection to the media source * specified by the <tt>MediaLocator</tt> of this <tt>DataSource</tt> * @see AbstractPullBufferCaptureDevice#doConnect() */ @Override protected void doConnect() throws IOException { super.doConnect(); /* * XXX The AudioRecordStream will connect upon start in order to be able * to respect requests to set its format. */ } /** * Closes the connection to the media source specified by the <tt>MediaLocator</tt> of this * <tt>DataSource</tt>. * * @see AbstractPullBufferCaptureDevice#doDisconnect() */ @Override protected void doDisconnect() { synchronized (getStreamSyncRoot()) { Object[] streams = streams(); if (streams != null) for (Object stream : streams) ((AudioRecordStream) stream).disconnect(); } super.doDisconnect(); } /** Sets the priority of the calling thread to {@link #THREAD_PRIORITY}. */ public static void setThreadPriority() { setThreadPriority(THREAD_PRIORITY); } /** * Sets the priority of the calling thread to a specific value. * * @param threadPriority the priority to be set on the calling thread */ public static void setThreadPriority(int threadPriority) { Throwable exception = null; try { Process.setThreadPriority(threadPriority); } catch (IllegalArgumentException iae) { exception = iae; } catch (SecurityException se) { exception = se; } if (exception != null) logger.warn("Failed to set thread priority.", exception); } /** * Attempts to set the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> of a * <tt>PullBufferStream</tt> at a specific zero-based index in the list of streams of this * <tt>PullBufferDataSource</tt>. The <tt>PullBufferStream</tt> does not exist at the time of the * attempt to set its <tt>Format</tt>. Override the default behavior which is to not attempt to * set the specified <tt>Format</tt> so that they can enable setting the <tt>Format</tt> prior to * creating the <tt>PullBufferStream</tt>. * * @param streamIndex the zero-based index of the <tt>PullBufferStream</tt> the <tt>Format</tt> of * which is to be set * @param oldValue the last-known <tt>Format</tt> for the <tt>PullBufferStream</tt> at the * specified <tt>streamIndex</tt> * @param newValue the <tt>Format</tt> which is to be set * @return the <tt>Format</tt> to be reported by the <tt>FormatControl</tt> of the * <tt>PullBufferStream</tt> at the specified <tt>streamIndex</tt> in the list of streams of * this <tt>PullBufferStream</tt> or <tt>null</tt> if the attempt to set the <tt>Format</tt> * did not success and any last-known <tt>Format</tt> is to be left in effect * @see AbstractPullBufferCaptureDevice#setFormat(int, Format, Format) */ @Override protected Format setFormat(int streamIndex, Format oldValue, Format newValue) { /* * Accept format specifications prior to the initialization of * AudioRecordStream. Afterwards, AudioRecordStream will decide whether * to accept further format specifications. */ return newValue; } /** Implements an audio <tt>PullBufferStream</tt> using {@link AudioRecord}. */ private static class AudioRecordStream extends AbstractPullBufferStream<DataSource> implements AudioEffect.OnEnableStatusChangeListener { /** The <tt>android.media.AudioRecord</tt> which does the actual capturing of audio. */ private AudioRecord audioRecord; /** The <tt>GainControl</tt> through which the volume/gain of captured media is controlled. */ private final GainControl gainControl; /** * The length in bytes of the media data read into a <tt>Buffer</tt> via a call to {@link * #read(Buffer)}. */ private int length; /** * The indicator which determines whether this <tt>AudioRecordStream</tt> is to set the priority * of the thread in which its {@link #read(Buffer)} method is executed. */ private boolean setThreadPriority = true; /** * Initializes a new <tt>OpenSLESStream</tt> instance which is to have its * <tt>Format</tt>-related information abstracted by a specific <tt>FormatControl</tt>. * * @param dataSource the <tt>DataSource</tt> which is creating the new instance so that it * becomes one of its <tt>streams</tt> * @param formatControl the <tt>FormatControl</tt> which is to abstract the * <tt>Format</tt>-related information of the new instance */ public AudioRecordStream(DataSource dataSource, FormatControl formatControl) { super(dataSource, formatControl); MediaServiceImpl mediaServiceImpl = NeomediaActivator.getMediaServiceImpl(); gainControl = (mediaServiceImpl == null) ? null : (GainControl) mediaServiceImpl.getInputVolumeControl(); } /** * Opens a connection to the media source of the associated <tt>DataSource</tt>. * * @throws IOException if anything goes wrong while opening a connection to the media source of * the associated <tt>DataSource</tt> */ public synchronized void connect() throws IOException { javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) getFormat(); int channels = af.getChannels(); int channelConfig; switch (channels) { case Format.NOT_SPECIFIED: case 1: channelConfig = AudioFormat.CHANNEL_IN_MONO; break; case 2: channelConfig = AudioFormat.CHANNEL_IN_STEREO; break; default: throw new IOException("channels"); } int sampleSizeInBits = af.getSampleSizeInBits(); int audioFormat; switch (sampleSizeInBits) { case 8: audioFormat = AudioFormat.ENCODING_PCM_8BIT; break; case 16: audioFormat = AudioFormat.ENCODING_PCM_16BIT; break; default: throw new IOException("sampleSizeInBits"); } double sampleRate = af.getSampleRate(); length = (int) Math.round( 20 /* milliseconds */ * (sampleRate / 1000) * channels * (sampleSizeInBits / 8)); /* * Apart from the thread in which #read(Buffer) is executed, use the * thread priority for the thread which will create the AudioRecord. */ setThreadPriority(); try { int minBufferSize = AudioRecord.getMinBufferSize((int) sampleRate, channelConfig, audioFormat); audioRecord = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, (int) sampleRate, channelConfig, audioFormat, Math.max(length, minBufferSize)); // tries to configure audio effects if available configureEffects(); } catch (IllegalArgumentException iae) { IOException ioe = new IOException(); ioe.initCause(iae); throw ioe; } setThreadPriority = true; } /** Configures echo cancellation and noise suppression effects. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void configureEffects() { if (!AndroidUtils.hasAPI(16)) return; AudioSystem audioSystem = AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_AUDIORECORD); // Creates echo canceler if available if (AcousticEchoCanceler.isAvailable()) { AcousticEchoCanceler echoCanceller = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); if (echoCanceller != null) { echoCanceller.setEnableStatusListener(this); echoCanceller.setEnabled(audioSystem.isEchoCancel()); logger.info("Echo cancellation: " + echoCanceller.getEnabled()); } } // Automatic gain control if (AutomaticGainControl.isAvailable()) { AutomaticGainControl agc = AutomaticGainControl.create(audioRecord.getAudioSessionId()); if (agc != null) { agc.setEnableStatusListener(this); agc.setEnabled(audioSystem.isAutomaticGainControl()); logger.info("Auto gain control: " + agc.getEnabled()); } } // Creates noise suppressor if available if (NoiseSuppressor.isAvailable()) { NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); if (noiseSuppressor != null) { noiseSuppressor.setEnableStatusListener(this); noiseSuppressor.setEnabled(audioSystem.isDenoise()); logger.info("Noise suppressor: " + noiseSuppressor.getEnabled()); } } } /** Closes the connection to the media source of the associated <tt>DataSource</tt>. */ public synchronized void disconnect() { if (audioRecord != null) { audioRecord.release(); audioRecord = null; setThreadPriority = true; } } /** * Attempts to set the <tt>Format</tt> of this <tt>AbstractBufferStream</tt>. * * @param format the <tt>Format</tt> to be set as the format of this * <tt>AbstractBufferStream</tt> * @return the <tt>Format</tt> of this <tt>AbstractBufferStream</tt> or <tt>null</tt> if the * attempt to set the <tt>Format</tt> did not succeed and any last-known <tt>Format</tt> is * to be left in effect * @see AbstractPullBufferStream#doSetFormat(Format) */ @Override protected synchronized Format doSetFormat(Format format) { return (audioRecord == null) ? format : null; } /** * Reads media data from this <tt>PullBufferStream</tt> into a specific <tt>Buffer</tt> with * blocking. * * @param buffer the <tt>Buffer</tt> in which media data is to be read from this * <tt>PullBufferStream</tt> * @throws IOException if anything goes wrong while reading media data from this * <tt>PullBufferStream</tt> into the specified <tt>buffer</tt> * @see javax.media.protocol.PullBufferStream#read(javax.media.Buffer) */ public void read(Buffer buffer) throws IOException { if (setThreadPriority) { setThreadPriority = false; setThreadPriority(); } Object data = buffer.getData(); int length = this.length; if (data instanceof byte[]) { if (((byte[]) data).length < length) data = null; } else data = null; if (data == null) { data = new byte[length]; buffer.setData(data); } int toRead = length; byte[] bytes = (byte[]) data; int offset = 0; buffer.setLength(0); while (toRead > 0) { int read; synchronized (this) { if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) read = audioRecord.read(bytes, offset, toRead); else break; } if (read < 0) { throw new IOException( AudioRecord.class.getName() + "#read(byte[], int, int) returned " + read); } else { buffer.setLength(buffer.getLength() + read); offset += read; toRead -= read; } } buffer.setOffset(0); // Apply software gain. if (gainControl != null) { BasicVolumeControl.applyGain(gainControl, bytes, buffer.getOffset(), buffer.getLength()); } } /** * Starts the transfer of media data from this <tt>AbstractBufferStream</tt>. * * @throws IOException if anything goes wrong while starting the transfer of media data from * this <tt>AbstractBufferStream</tt> * @see AbstractBufferStream#start() */ @Override public void start() throws IOException { /* * Connect upon start because the connect has been delayed to allow * this AudioRecordStream to respect requests to set its format. */ synchronized (this) { if (audioRecord == null) connect(); } super.start(); synchronized (this) { if (audioRecord != null) { setThreadPriority = true; audioRecord.startRecording(); } } } /** * Stops the transfer of media data from this <tt>AbstractBufferStream</tt>. * * @throws IOException if anything goes wrong while stopping the transfer of media data from * this <tt>AbstractBufferStream</tt> * @see AbstractBufferStream#stop() */ @Override public void stop() throws IOException { synchronized (this) { if (audioRecord != null) { audioRecord.stop(); setThreadPriority = true; } } super.stop(); } @Override public void onEnableStatusChange(AudioEffect effect, boolean enabled) { logger.info(effect.getDescriptor() + " : " + enabled); } } }
/** * Manages a SurfaceTexture. Creates SurfaceTexture and CameraTextureRender objects, and provides * functions that wait for frames and render them to the current EGL surface. * * <p>The SurfaceTexture can be passed to Camera.setPreviewTexture() to receive camera output. */ public class SurfaceTextureManager implements SurfaceTexture.OnFrameAvailableListener { /** The logger */ private static final Logger logger = Logger.getLogger(SurfaceTextureManager.class); private SurfaceTexture surfaceTexture; private CameraTextureRender textureRender; /** guards frameAvailable */ private final Object frameSyncObject = new Object(); private boolean frameAvailable; /** Creates instances of CameraTextureRender and SurfaceTexture. */ public SurfaceTextureManager() { textureRender = new CameraTextureRender(); textureRender.surfaceCreated(); logger.debug("textureID=" + textureRender.getTextureId()); surfaceTexture = new SurfaceTexture(textureRender.getTextureId()); surfaceTexture.setOnFrameAvailableListener(this); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2) public void release() { if (textureRender != null) { textureRender.release(); textureRender = null; } if (surfaceTexture != null) { surfaceTexture.release(); surfaceTexture = null; } } /** Returns the SurfaceTexture. */ public SurfaceTexture getSurfaceTexture() { return surfaceTexture; } /** * Latches the next buffer into the texture. Must be called from the thread that created the * OutputSurface object. */ public void awaitNewImage() { final int TIMEOUT_MS = 2500; synchronized (frameSyncObject) { while (!frameAvailable) { try { // Wait for onFrameAvailable() to signal us. Use a timeout // to avoid stalling the test if it doesn't arrive. frameSyncObject.wait(TIMEOUT_MS); if (!frameAvailable) { // TODO: if "spurious wakeup", continue while loop throw new RuntimeException("Camera frame wait timed out"); } } catch (InterruptedException ie) { // shouldn't happen throw new RuntimeException(ie); } } frameAvailable = false; } // Latch the data. textureRender.checkGlError("before updateTexImage"); surfaceTexture.updateTexImage(); } /** Draws the data from SurfaceTexture onto the current EGL surface. */ public void drawImage() { textureRender.drawFrame(surfaceTexture); } @Override public void onFrameAvailable(SurfaceTexture st) { logger.trace("new frame available"); synchronized (frameSyncObject) { if (frameAvailable) { throw new RuntimeException("frameAvailable already set, frame could be dropped"); } frameAvailable = true; frameSyncObject.notifyAll(); } } }