@Test public void testWebRtcLoopback() throws InterruptedException { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build(); webRtcEndpoint.connect(webRtcEndpoint); // Start WebRTC and wait for playing event getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV); Assert.assertTrue( "Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); // Guard time to play the video waitSeconds(PLAYTIME); // Assertions double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)", getPage().compare(PLAYTIME, currentTime)); Assert.assertTrue( "The color of the video should be green", getPage().similarColor(CHROME_VIDEOTEST_COLOR)); // Release Media Pipeline mp.release(); }
private void start(final WebSocketSession session, JsonObject jsonMessage) { try { // Media Logic (Media Pipeline and Elements) UserSession user = new UserSession(); MediaPipeline pipeline = kurento.createMediaPipeline(); user.setMediaPipeline(pipeline); WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(pipeline).build(); user.setWebRtcEndpoint(webRtcEndpoint); users.put(session.getId(), user); webRtcEndpoint.addOnIceCandidateListener( new EventListener<OnIceCandidateEvent>() { @Override public void onEvent(OnIceCandidateEvent event) { JsonObject response = new JsonObject(); response.addProperty("id", "iceCandidate"); response.add("candidate", JsonUtils.toJsonObject(event.getCandidate())); try { synchronized (session) { session.sendMessage(new TextMessage(response.toString())); } } catch (IOException e) { log.debug(e.getMessage()); } } }); mouth = new NuboMouthDetector.Builder(pipeline).build(); webRtcEndpoint.connect(mouth); mouth.connect(webRtcEndpoint); // SDP negotiation (offer and answer) String sdpOffer = jsonMessage.get("sdpOffer").getAsString(); String sdpAnswer = webRtcEndpoint.processOffer(sdpOffer); // Sending response back to client JsonObject response = new JsonObject(); response.addProperty("id", "startResponse"); response.addProperty("sdpAnswer", sdpAnswer); synchronized (session) { session.sendMessage(new TextMessage(response.toString())); } webRtcEndpoint.gatherCandidates(); } catch (Throwable t) { sendError(session, t.getMessage()); } }
private void start(final WebSocketSession session, JsonObject jsonMessage) { try { String sessionId = session.getId(); UserSession user = new UserSession(sessionId); users.put(sessionId, user); webRtcEndpoint = user.getWebRtcEndpoint(); // Ice Candidate webRtcEndpoint.addOnIceCandidateListener( new EventListener<OnIceCandidateEvent>() { @Override public void onEvent(OnIceCandidateEvent event) { JsonObject response = new JsonObject(); response.addProperty("id", "iceCandidate"); response.add("candidate", JsonUtils.toJsonObject(event.getCandidate())); sendMessage(session, new TextMessage(response.toString())); } }); face = new NuboFaceDetector.Builder(user.getMediaPipeline()).build(); face.activateServerEvents(1, 3000); addFaceListener(); webRtcEndpoint.connect(face); face.connect(webRtcEndpoint); // SDP negotiation (offer and answer) String sdpOffer = jsonMessage.get("sdpOffer").getAsString(); String sdpAnswer = webRtcEndpoint.processOffer(sdpOffer); // Sending response back to client JsonObject response = new JsonObject(); response.addProperty("id", "startResponse"); response.addProperty("sdpAnswer", sdpAnswer); synchronized (session) { sendMessage(session, new TextMessage(response.toString())); } webRtcEndpoint.gatherCandidates(); } catch (NotEnoughResourcesException e) { log.warn("Not enough resources", e); notEnoughResources(session); } catch (Throwable t) { log.error("Exception starting session", t); error(session, t.getClass().getSimpleName() + ": " + t.getMessage()); } }
private void getStats(WebSocketSession session) { try { Map<String, Stats> wr_stats = webRtcEndpoint.getStats(); for (Stats s : wr_stats.values()) { switch (s.getType()) { case endpoint: EndpointStats end_stats = (EndpointStats) s; double e2eVideLatency = end_stats.getVideoE2ELatency() / 1000000; JsonObject response = new JsonObject(); response.addProperty("id", "videoE2Elatency"); response.addProperty("message", e2eVideLatency); sendMessage(session, new TextMessage(response.toString())); break; default: break; } } } catch (Throwable t) { log.error("Exception getting stats...", t); } }
public void doTest( MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType) .build(); // Test execution getPage(BROWSER1).subscribeLocalEvents("playing"); long startWebrtc = System.currentTimeMillis(); getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER2).subscribeLocalEvents("playing"); getPage(BROWSER2) .initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER3).subscribeLocalEvents("playing"); getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); webRtcEpRed.connect(recorderEp); recorderEp.record(); Assert.assertTrue( "Not received media (timeout waiting playing event)", getPage(BROWSER1).waitForEvent("playing")); long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); startWebrtc = System.currentTimeMillis(); // green webRtcEpGreen.connect(recorderEp); Assert.assertTrue( "Not received media (timeout waiting playing event)", getPage(BROWSER2).waitForEvent("playing")); long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); startWebrtc = System.currentTimeMillis(); // blue webRtcEpBlue.connect(recorderEp); Assert.assertTrue( "Not received media (timeout waiting playing event)", getPage(BROWSER3).waitForEvent("playing")); long webrtcBlueConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); // Release Media Pipeline #1 saveGstreamerDot(mp); final CountDownLatch recorderLatch = new CountDownLatch(1); recorderEp.stopAndWait( new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue( "Not stop properly", recorderLatch.await(getPage(BROWSER3).getTimeout(), TimeUnit.SECONDS)); mp.release(); // Reloading browser getPage(BROWSER3).close(); long playtime = PLAYTIME + TimeUnit.MILLISECONDS.toSeconds( webrtcRedConnectionTime + webrtcGreenConnectionTime + webrtcBlueConnectionTime); checkRecordingFile( recordingFile, BROWSER4, EXPECTED_COLORS, playtime, expectedVideoCodec, expectedAudioCodec); success = true; }
public void doTest( final MediaProfileSpecType mediaProfileSpecType, String expectedAudioCodec, final String extension) throws Exception { long testDurationMillis = PropertiesManager.getProperty(TEST_DURATION_PROPERTY, DEFAULT_TEST_DURATION); MediaPipeline mp = kurentoClient.createMediaPipeline(); final CountDownLatch errorPipelinelatch = new CountDownLatch(1); mp.addErrorListener( new EventListener<ErrorEvent>() { @Override public void onEvent(ErrorEvent event) { msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType(); log.error(msgError); errorPipelinelatch.countDown(); } }); final WebRtcEndpoint webRtcSender = new WebRtcEndpoint.Builder(mp).build(); // WebRTC sender negotiation getPage().subscribeLocalEvents("playing"); getPage().initWebRtc(webRtcSender, WebRtcChannel.AUDIO_ONLY, WebRtcMode.SEND_ONLY); Assert.assertTrue("Not received media in sender webrtc", getPage().waitForEvent("playing")); // Recorder String recordingFile = getRecordUrl(extension); RecorderEndpoint recorder = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType) .build(); webRtcSender.connect(recorder); // Start recorder recorder.record(); // Wait recording time Thread.sleep(testDurationMillis); // Stop recorder final CountDownLatch recorderLatch = new CountDownLatch(1); recorder.stopAndWait( new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); // Release Media Pipeline Assert.assertTrue( "Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); if (mp != null) { mp.release(); } Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1); waitForFileExists(recordingFile); // Assessments AssertMedia.assertDuration(recordingFile, testDurationMillis, THRESHOLD_MS); }