private void createPeerConnectionInternal(EGLContext renderEGLContext) { if (factory == null || isError) { Log.e(TAG, "Peerconnection factory is not created"); return; } Log.d(TAG, "Create peer connection."); Log.d(TAG, "PCConstraints: " + pcConstraints.toString()); if (videoConstraints != null) { Log.d(TAG, "VideoConstraints: " + videoConstraints.toString()); } queuedRemoteCandidates = new LinkedList<IceCandidate>(); if (videoCallEnabled) { Log.d(TAG, "EGLContext: " + renderEGLContext); factory.setVideoHwAccelerationOptions(renderEGLContext); } PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(signalingParameters.iceServers); // TCP candidates are only useful when connecting to a server that supports // ICE-TCP. rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; // Use ECDSA encryption. rtcConfig.keyType = PeerConnection.KeyType.ECDSA; peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver); isInitiator = false; // Set default WebRTC tracing and INFO libjingle logging. // NOTE: this _must_ happen while |factory| is alive! Logging.enableTracing( "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO); // Todo mediaStream = factory.createLocalMediaStream("ARDAMS"); if (videoCallEnabled) { String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0); String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice(); if (numberOfCameras > 1 && frontCameraDeviceName != null) { cameraDeviceName = frontCameraDeviceName; } Log.d(TAG, "Opening camera: " + cameraDeviceName); videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null); if (videoCapturer == null) { reportError("Failed to open camera"); return; } mediaStream.addTrack(createVideoTrack(videoCapturer)); } mediaStream.addTrack( factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints))); peerConnection.addStream(mediaStream); Log.d(TAG, "Peer connection created."); }
private void createPeerConnectionInternal() { if (factory == null || isError) { Log.e(TAG, "Peerconnection factory is not created"); return; } Log.d(TAG, "Create peer connection"); Log.d(TAG, "PCConstraints: " + pcConstraints.toString()); if (videoConstraints != null) { Log.d(TAG, "VideoConstraints: " + videoConstraints.toString()); } queuedRemoteCandidates = new LinkedList<IceCandidate>(); peerConnection = factory.createPeerConnection(signalingParameters.iceServers, pcConstraints, pcObserver); isInitiator = false; // Set default WebRTC tracing and INFO libjingle logging. // NOTE: this _must_ happen while |factory| is alive! Logging.enableTracing( "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO); mediaStream = factory.createLocalMediaStream("ARDAMS"); if (videoCallEnabled) { String cameraDeviceName = VideoCapturerAndroid.getDeviceName(0); String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice(); if (numberOfCameras > 1 && frontCameraDeviceName != null) { cameraDeviceName = frontCameraDeviceName; } Log.d(TAG, "Opening camera: " + cameraDeviceName); videoCapturer = VideoCapturerAndroid.create(cameraDeviceName); if (videoCapturer == null) { reportError("Failed to open camera"); return; } mediaStream.addTrack(createVideoTrack(videoCapturer)); } mediaStream.addTrack( factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints))); peerConnection.addStream(mediaStream); Log.d(TAG, "Peer connection created."); }
/** begin streaming to server - MUST run on VcThread */ void doPublish(VideoStreamsView view) { if (mVideoCapturer != null) { return; } MediaConstraints videoConstraints = new MediaConstraints(); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", "320")); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", "240")); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", "10")); MediaConstraints audioConstraints = new MediaConstraints(); audioConstraints.optional.add( new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); audioConstraints.optional.add( new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); lMS = sFactory.createLocalMediaStream("ARDAMS"); if (videoConstraints != null) { mVideoCapturer = getVideoCapturer(); mVideoSource = sFactory.createVideoSource(mVideoCapturer, videoConstraints); VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0", mVideoSource); lMS.addTrack(videoTrack); } if (audioConstraints != null) { AudioTrack audioTrack = sFactory.createAudioTrack("ARDAMSa0", sFactory.createAudioSource(audioConstraints)); lMS.addTrack(audioTrack); audioTrack.setEnabled(false); } StreamDescription stream = new StreamDescription("", false, true, true, false, null, mNick); MediaConstraints pcConstraints = makePcConstraints(); MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream, true), stream); PeerConnection pc = sFactory.createPeerConnection(mIceServers, pcConstraints, pcObs); pc.addStream(lMS, new MediaConstraints()); stream.setMedia(lMS); if (view != null) { stream.attachRenderer(new VideoCallbacks(view, VideoStreamsView.LOCAL_STREAM_ID)); } stream.initLocal(pc, pcObs.getSdpObserver()); }
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_chat); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); Bundle extras = getIntent().getExtras(); if (extras == null || !extras.containsKey(Constants.ROBOT_NAME)) { Intent intent = new Intent(this, QBotActivity.class); startActivity(intent); Toast.makeText( this, "Need to pass robot_name to VideoChatActivity in intent extras (Constants.ROBOT_NAME).", Toast.LENGTH_SHORT) .show(); finish(); return; } this.robotname = extras.getString(Constants.ROBOT_NAME, ""); this.mChatList = getListView(); this.mChatEditText = (EditText) findViewById(R.id.chat_input); this.mCallStatus = (TextView) findViewById(R.id.call_status); // Set up the List View for chatting List<ChatMessage> ll = new LinkedList<ChatMessage>(); mChatAdapter = new ChatAdapter(this, ll); mChatList.setAdapter(mChatAdapter); // First, we initiate the PeerConnectionFactory with our application context and some options. PeerConnectionFactory.initializeAndroidGlobals( this, // Context true, // Audio Enabled true, // Video Enabled true, // Hardware Acceleration Enabled null); // Render EGL Context PeerConnectionFactory pcFactory = new PeerConnectionFactory(); this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.robotname); // Returns the number of cams & front/back face device name int camNumber = VideoCapturerAndroid.getDeviceCount(); String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice(); String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice(); // Creates a VideoCapturerAndroid instance for the device name VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam); // First create a Video Source, then we can make a Video Track localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints()); VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource); // First we create an AudioSource then we can create our AudioTrack AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints()); AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity // First we need to set the GLSurfaceView that it should render to this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface); // Then we set that view, and pass a Runnable to run once the surface is ready VideoRendererGui.setView(videoView, null); // Now that VideoRendererGui is ready, we can get our VideoRenderer. // IN THIS ORDER. Effects which is on top or bottom remoteRender = VideoRendererGui.create( 0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false); localRender = VideoRendererGui.create( 0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); // We start out with an empty MediaStream object, created with help from our // PeerConnectionFactory // Note that LOCAL_MEDIA_STREAM_ID can be any string MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID); // Now we can add our tracks. mediaStream.addTrack(localVideoTrack); mediaStream.addTrack(localAudioTrack); // First attach the RTC Listener so that callback events will be triggered this.pnRTCClient.attachRTCListener(new DemoRTCListener()); // Then attach your local media stream to the PnRTCClient. // This will trigger the onLocalStream callback. this.pnRTCClient.attachLocalMediaStream(mediaStream); // Listen on a channel. This is your "phone number," also set the max chat users. this.pnRTCClient.listenOn(robotname); this.pnRTCClient.setMaxConnections(1); // If the intent contains a number to dial, call it now that you are connected. // Else, remain listening for a call. if (extras.containsKey(Constants.USER_NAME)) { String callUser = extras.getString(Constants.USER_NAME, ""); connectToUser(callUser); } }
@Override public void onIceServers(List<PeerConnection.IceServer> iceServers) { factory = new PeerConnectionFactory(); MediaConstraints pcConstraints = appRtcClient.pcConstraints(); pcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true")); pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver); createDataChannelToRegressionTestBug2302(pc); // See method comment. // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging. // NOTE: this _must_ happen while |factory| is alive! // Logging.enableTracing( // "logcat:", // EnumSet.of(Logging.TraceLevel.TRACE_ALL), // Logging.Severity.LS_SENSITIVE); { final PeerConnection finalPC = pc; final Runnable repeatedStatsLogger = new Runnable() { public void run() { synchronized (quit[0]) { if (quit[0]) { return; } final Runnable runnableThis = this; if (hudView.getVisibility() == View.INVISIBLE) { vsv.postDelayed(runnableThis, 1000); return; } boolean success = finalPC.getStats( new StatsObserver() { public void onComplete(final StatsReport[] reports) { runOnUiThread( new Runnable() { public void run() { updateHUD(reports); } }); for (StatsReport report : reports) { Log.d(TAG, "Stats: " + report.toString()); } vsv.postDelayed(runnableThis, 1000); } }, null); if (!success) { throw new RuntimeException("getStats() return false!"); } } } }; vsv.postDelayed(repeatedStatsLogger, 1000); } { logAndToast("Creating local video source..."); MediaStream lMS = factory.createLocalMediaStream("ARDAMS"); if (appRtcClient.videoConstraints() != null) { VideoCapturer capturer = getVideoCapturer(); videoSource = factory.createVideoSource(capturer, appRtcClient.videoConstraints()); VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource); videoTrack.addRenderer(new VideoRenderer(localRender)); lMS.addTrack(videoTrack); } if (appRtcClient.audioConstraints() != null) { lMS.addTrack( factory.createAudioTrack( "ARDAMSa0", factory.createAudioSource(appRtcClient.audioConstraints()))); } pc.addStream(lMS, new MediaConstraints()); } logAndToast("Waiting for ICE candidates..."); }
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) { if (factory == null || isError) { Log.e(TAG, "Peerconnection factory is not created"); return; } Log.d(TAG, "Create peer connection."); Log.d(TAG, "PCConstraints: " + pcConstraints.toString()); queuedRemoteCandidates = new LinkedList<IceCandidate>(); if (videoCallEnabled) { Log.d(TAG, "EGLContext: " + renderEGLContext); factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext); } PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(signalingParameters.iceServers); // TCP candidates are only useful when connecting to a server that supports // ICE-TCP. rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; // Use ECDSA encryption. rtcConfig.keyType = PeerConnection.KeyType.ECDSA; peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver); isInitiator = false; // Set default WebRTC tracing and INFO libjingle logging. // NOTE: this _must_ happen while |factory| is alive! Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT)); Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); mediaStream = factory.createLocalMediaStream("ARDAMS"); if (videoCallEnabled) { if (peerConnectionParameters.useCamera2) { if (!peerConnectionParameters.captureToTexture) { reportError(context.getString(R.string.camera2_texture_only_error)); return; } Logging.d(TAG, "Creating capturer using camera2 API."); createCapturer(new Camera2Enumerator(context)); } else { Logging.d(TAG, "Creating capturer using camera1 API."); createCapturer(new Camera1Enumerator(peerConnectionParameters.captureToTexture)); } if (videoCapturer == null) { reportError("Failed to open camera"); return; } mediaStream.addTrack(createVideoTrack(videoCapturer)); } mediaStream.addTrack(createAudioTrack()); peerConnection.addStream(mediaStream); if (peerConnectionParameters.aecDump) { try { aecDumpFileDescriptor = ParcelFileDescriptor.open( new File( Environment.getExternalStorageDirectory().getPath() + File.separator + "Download/audio.aecdump"), ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE | ParcelFileDescriptor.MODE_TRUNCATE); factory.startAecDump(aecDumpFileDescriptor.getFd(), -1); } catch (IOException e) { Log.e(TAG, "Can not open aecdump file", e); } } Log.d(TAG, "Peer connection created."); }