@Test
  public void testWebRtcLoopback() throws InterruptedException {

    // Media Pipeline
    MediaPipeline mp = kurentoClient.createMediaPipeline();
    WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
    webRtcEndpoint.connect(webRtcEndpoint);

    // Start WebRTC and wait for playing event
    getPage().subscribeEvents("playing");
    getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);
    Assert.assertTrue(
        "Not received media (timeout waiting playing event)", getPage().waitForEvent("playing"));

    // Guard time to play the video
    waitSeconds(PLAYTIME);

    // Assertions
    double currentTime = getPage().getCurrentTime();
    Assert.assertTrue(
        "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
        getPage().compare(PLAYTIME, currentTime));
    Assert.assertTrue(
        "The color of the video should be green", getPage().similarColor(CHROME_VIDEOTEST_COLOR));

    // Release Media Pipeline
    mp.release();
  }
Example #2
0
  @Override
  public void close() {

    if (!closed) {

      executor.shutdown();

      for (final RoomParticipant user : participants.values()) {
        user.close();
      }

      participants.clear();

      if (pipeline != null) {
        pipeline.release(
            new Continuation<Void>() {

              @Override
              public void onSuccess(Void result) throws Exception {
                log.trace("ROOM {}: Released Pipeline", Room.this.name);
              }

              @Override
              public void onError(Throwable cause) throws Exception {
                log.warn("PARTICIPANT " + Room.this.name + ": Could not release Pipeline", cause);
              }
            });
      }

      log.debug("Room {} closed", this.name);

      this.closed = true;
    } else {
      log.warn("Closing a yet closed room {}", this.name);
    }
  }
  public void doTest(
      MediaProfileSpecType mediaProfileSpecType,
      String expectedVideoCodec,
      String expectedAudioCodec,
      String extension)
      throws Exception {
    // Media Pipeline #1
    MediaPipeline mp = kurentoClient.createMediaPipeline();
    WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
    WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
    WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();

    String recordingFile = getRecordUrl(extension);
    RecorderEndpoint recorderEp =
        new RecorderEndpoint.Builder(mp, recordingFile)
            .withMediaProfile(mediaProfileSpecType)
            .build();

    // Test execution
    getPage(BROWSER1).subscribeLocalEvents("playing");
    long startWebrtc = System.currentTimeMillis();
    getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

    getPage(BROWSER2).subscribeLocalEvents("playing");
    getPage(BROWSER2)
        .initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

    getPage(BROWSER3).subscribeLocalEvents("playing");
    getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);

    webRtcEpRed.connect(recorderEp);
    recorderEp.record();

    Assert.assertTrue(
        "Not received media (timeout waiting playing event)",
        getPage(BROWSER1).waitForEvent("playing"));
    long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
    Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

    startWebrtc = System.currentTimeMillis();

    // green
    webRtcEpGreen.connect(recorderEp);

    Assert.assertTrue(
        "Not received media (timeout waiting playing event)",
        getPage(BROWSER2).waitForEvent("playing"));
    long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc;
    Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

    startWebrtc = System.currentTimeMillis();

    // blue
    webRtcEpBlue.connect(recorderEp);

    Assert.assertTrue(
        "Not received media (timeout waiting playing event)",
        getPage(BROWSER3).waitForEvent("playing"));
    long webrtcBlueConnectionTime = System.currentTimeMillis() - startWebrtc;
    Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);

    // Release Media Pipeline #1
    saveGstreamerDot(mp);
    final CountDownLatch recorderLatch = new CountDownLatch(1);
    recorderEp.stopAndWait(
        new Continuation<Void>() {

          @Override
          public void onSuccess(Void result) throws Exception {
            recorderLatch.countDown();
          }

          @Override
          public void onError(Throwable cause) throws Exception {
            recorderLatch.countDown();
          }
        });

    Assert.assertTrue(
        "Not stop properly", recorderLatch.await(getPage(BROWSER3).getTimeout(), TimeUnit.SECONDS));
    mp.release();

    // Reloading browser
    getPage(BROWSER3).close();

    long playtime =
        PLAYTIME
            + TimeUnit.MILLISECONDS.toSeconds(
                webrtcRedConnectionTime + webrtcGreenConnectionTime + webrtcBlueConnectionTime);

    checkRecordingFile(
        recordingFile, BROWSER4, EXPECTED_COLORS, playtime, expectedVideoCodec, expectedAudioCodec);
    success = true;
  }
  public void doTest(
      final MediaProfileSpecType mediaProfileSpecType,
      String expectedAudioCodec,
      final String extension)
      throws Exception {

    long testDurationMillis =
        PropertiesManager.getProperty(TEST_DURATION_PROPERTY, DEFAULT_TEST_DURATION);

    MediaPipeline mp = kurentoClient.createMediaPipeline();

    final CountDownLatch errorPipelinelatch = new CountDownLatch(1);

    mp.addErrorListener(
        new EventListener<ErrorEvent>() {

          @Override
          public void onEvent(ErrorEvent event) {
            msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
            log.error(msgError);
            errorPipelinelatch.countDown();
          }
        });
    final WebRtcEndpoint webRtcSender = new WebRtcEndpoint.Builder(mp).build();

    // WebRTC sender negotiation
    getPage().subscribeLocalEvents("playing");
    getPage().initWebRtc(webRtcSender, WebRtcChannel.AUDIO_ONLY, WebRtcMode.SEND_ONLY);
    Assert.assertTrue("Not received media in sender webrtc", getPage().waitForEvent("playing"));

    // Recorder
    String recordingFile = getRecordUrl(extension);
    RecorderEndpoint recorder =
        new RecorderEndpoint.Builder(mp, recordingFile)
            .withMediaProfile(mediaProfileSpecType)
            .build();
    webRtcSender.connect(recorder);

    // Start recorder
    recorder.record();

    // Wait recording time
    Thread.sleep(testDurationMillis);

    // Stop recorder
    final CountDownLatch recorderLatch = new CountDownLatch(1);
    recorder.stopAndWait(
        new Continuation<Void>() {

          @Override
          public void onSuccess(Void result) throws Exception {
            recorderLatch.countDown();
          }

          @Override
          public void onError(Throwable cause) throws Exception {
            recorderLatch.countDown();
          }
        });

    // Release Media Pipeline
    Assert.assertTrue(
        "Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
    if (mp != null) {
      mp.release();
    }

    Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);

    waitForFileExists(recordingFile);

    // Assessments
    AssertMedia.assertDuration(recordingFile, testDurationMillis, THRESHOLD_MS);
  }