private void doLoopbackTest(PeerConnectionParameters parameters, boolean decodeToTexure) throws InterruptedException { loopback = true; MockRenderer localRenderer = null; MockRenderer remoteRenderer = null; if (parameters.videoCallEnabled) { Log.d(TAG, "testLoopback for video " + parameters.videoCodec); localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME); } else { Log.d(TAG, "testLoopback for audio."); } pcClient = createPeerConnectionClient( localRenderer, remoteRenderer, parameters, decodeToTexure ? eglBase.getEglBaseContext() : null); // Wait for local SDP, rename it to answer and set as remote SDP. assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); SessionDescription remoteSdp = new SessionDescription( SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description); pcClient.setRemoteDescription(remoteSdp); // Wait for ICE connection. assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); if (parameters.videoCallEnabled) { // Check that local and remote video frames were rendered. assertTrue( "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); assertTrue( "Remote video frames were not rendered.", remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); } else { // For audio just sleep for 1 sec. // TODO(glaznev): check how we can detect that remote audio was rendered. Thread.sleep(AUDIO_RUN_TIMEOUT); } pcClient.close(); assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); Log.d(TAG, "testLoopback done."); }
// Test that a call can be setup even if the EGL context used during initialization is // released before the Video codecs are created. The HW encoder and decoder is setup to use // textures. @SmallTest public void testLoopbackEglContextReleasedAfterCreatingPc() throws InterruptedException { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19"); return; } loopback = true; PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8, true); MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME); pcClient = createPeerConnectionClient( localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext()); // Wait for local SDP, rename it to answer and set as remote SDP. assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); // Release the EGL context used for creating the PeerConnectionClient. // Since createPeerConnectionClient is asynchronous, we must wait for the local // SessionDescription. eglBase.release(); eglBase = null; SessionDescription remoteSdp = new SessionDescription( SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description); pcClient.setRemoteDescription(remoteSdp); // Wait for ICE connection. assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); // Check that local and remote video frames were rendered. assertTrue( "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); assertTrue( "Remote video frames were not rendered.", remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); pcClient.close(); assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); Log.d(TAG, "testLoopback done."); }