Ejemplo n.º 1
0
 @JavascriptInterface
 public void onMessage(String data) {
   try {
     JSONObject json = new JSONObject(data);
     String type = (String) json.get("type");
     if (type.equals("candidate")) {
       IceCandidate candidate =
           new IceCandidate(
               (String) json.get("id"), json.getInt("label"), (String) json.get("candidate"));
       if (queuedRemoteCandidates != null) {
         queuedRemoteCandidates.add(candidate);
       } else {
         pc.addIceCandidate(candidate);
       }
     } else if (type.equals("answer") || type.equals("offer")) {
       SessionDescription sdp =
           new SessionDescription(
               SessionDescription.Type.fromCanonicalForm(type),
               preferISAC((String) json.get("sdp")));
       pc.setRemoteDescription(sdpObserver, sdp);
     } else if (type.equals("bye")) {
       logAndToast("Remote end hung up; dropping PeerConnection");
       disconnectAndExit();
     } else {
       throw new RuntimeException("Unexpected message: " + data);
     }
   } catch (JSONException e) {
     throw new RuntimeException(e);
   }
 }
 public void execute(String peerId, JSONObject payload) throws JSONException {
   Log.d("SRSAction", "SetRemoteSDPAction");
   PnPeer peer = peers.get(peerId);
   SessionDescription sdp =
       new SessionDescription(
           SessionDescription.Type.fromCanonicalForm(payload.getString("type")),
           payload.getString("sdp"));
   peer.pc.setRemoteDescription(peer, sdp);
 }
Ejemplo n.º 3
0
  // Checks if capture format can be changed on fly and decoder can be reset properly.
  @SmallTest
  public void testCaptureFormatChange() throws InterruptedException {
    Log.d(TAG, "testCaptureFormatChange");
    loopback = true;

    MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
    MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);

    pcClient =
        createPeerConnectionClient(
            localRenderer,
            remoteRenderer,
            createParametersForVideoCall(VIDEO_CODEC_VP8, false),
            null);

    // Wait for local SDP, rename it to answer and set as remote SDP.
    assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
    SessionDescription remoteSdp =
        new SessionDescription(
            SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
    pcClient.setRemoteDescription(remoteSdp);

    // Wait for ICE connection.
    assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));

    // Check that local and remote video frames were rendered.
    assertTrue(
        "Local video frames were not rendered before camera resolution change.",
        localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    assertTrue(
        "Remote video frames were not rendered before camera resolution change.",
        remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));

    // Change capture output format a few times.
    for (int i = 0; i < 2 * CAPTURE_FORMAT_CHANGE_ATTEMPTS; i++) {
      if (i % 2 == 0) {
        pcClient.changeCaptureFormat(WIDTH_VGA, HEIGHT_VGA, MAX_VIDEO_FPS);
      } else {
        pcClient.changeCaptureFormat(WIDTH_QVGA, HEIGHT_QVGA, MAX_VIDEO_FPS);
      }

      // Reset video renders and check that local and remote video frames
      // were rendered after capture format change.
      localRenderer.reset(EXPECTED_VIDEO_FRAMES);
      remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
      assertTrue(
          "Local video frames were not rendered after capture format change.",
          localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
      assertTrue(
          "Remote video frames were not rendered after capture format change.",
          remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    }

    pcClient.close();
    assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
    Log.d(TAG, "testCaptureFormatChange done.");
  }
Ejemplo n.º 4
0
  // Checks if video source can be restarted - simulate app goes to
  // background and back to foreground.
  @SmallTest
  public void testVideoSourceRestart() throws InterruptedException {
    Log.d(TAG, "testVideoSourceRestart");
    loopback = true;

    MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
    MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);

    pcClient =
        createPeerConnectionClient(
            localRenderer,
            remoteRenderer,
            createParametersForVideoCall(VIDEO_CODEC_VP8, false),
            null);

    // Wait for local SDP, rename it to answer and set as remote SDP.
    assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
    SessionDescription remoteSdp =
        new SessionDescription(
            SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
    pcClient.setRemoteDescription(remoteSdp);

    // Wait for ICE connection.
    assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));

    // Check that local and remote video frames were rendered.
    assertTrue(
        "Local video frames were not rendered before video restart.",
        localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    assertTrue(
        "Remote video frames were not rendered before video restart.",
        remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));

    // Stop and then start video source a few times.
    for (int i = 0; i < VIDEO_RESTART_ATTEMPTS; i++) {
      pcClient.stopVideoSource();
      Thread.sleep(VIDEO_RESTART_TIMEOUT);
      pcClient.startVideoSource();

      // Reset video renders and check that local and remote video frames
      // were rendered after video restart.
      localRenderer.reset(EXPECTED_VIDEO_FRAMES);
      remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
      assertTrue(
          "Local video frames were not rendered after video restart.",
          localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
      assertTrue(
          "Remote video frames were not rendered after video restart.",
          remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    }
    pcClient.close();
    assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
    Log.d(TAG, "testVideoSourceRestart done.");
  }
 public void execute(String peerId, JSONObject payload) throws JSONException {
   Log.d("CAAction", "CreateAnswerAction");
   PnPeer peer = peers.get(peerId);
   peer.setType(PnPeer.TYPE_OFFER);
   peer.setStatus(PnPeer.STATUS_CONNECTED);
   SessionDescription sdp =
       new SessionDescription(
           SessionDescription.Type.fromCanonicalForm(payload.getString("type")),
           payload.getString("sdp"));
   peer.pc.setRemoteDescription(peer, sdp);
   peer.pc.createAnswer(peer, signalingParams.pcConstraints);
 }
Ejemplo n.º 6
0
  // Checks if default front camera can be switched to back camera and then
  // again to front camera.
  @SmallTest
  public void testCameraSwitch() throws InterruptedException {
    Log.d(TAG, "testCameraSwitch");
    loopback = true;

    MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
    MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);

    pcClient =
        createPeerConnectionClient(
            localRenderer,
            remoteRenderer,
            createParametersForVideoCall(VIDEO_CODEC_VP8, false),
            null);

    // Wait for local SDP, rename it to answer and set as remote SDP.
    assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
    SessionDescription remoteSdp =
        new SessionDescription(
            SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
    pcClient.setRemoteDescription(remoteSdp);

    // Wait for ICE connection.
    assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));

    // Check that local and remote video frames were rendered.
    assertTrue(
        "Local video frames were not rendered before camera switch.",
        localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    assertTrue(
        "Remote video frames were not rendered before camera switch.",
        remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));

    for (int i = 0; i < CAMERA_SWITCH_ATTEMPTS; i++) {
      // Try to switch camera
      pcClient.switchCamera();

      // Reset video renders and check that local and remote video frames
      // were rendered after camera switch.
      localRenderer.reset(EXPECTED_VIDEO_FRAMES);
      remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
      assertTrue(
          "Local video frames were not rendered after camera switch.",
          localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
      assertTrue(
          "Remote video frames were not rendered after camera switch.",
          remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    }
    pcClient.close();
    assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
    Log.d(TAG, "testCameraSwitch done.");
  }
Ejemplo n.º 7
0
  private void doLoopbackTest(PeerConnectionParameters parameters, boolean decodeToTexure)
      throws InterruptedException {
    loopback = true;
    MockRenderer localRenderer = null;
    MockRenderer remoteRenderer = null;
    if (parameters.videoCallEnabled) {
      Log.d(TAG, "testLoopback for video " + parameters.videoCodec);
      localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
      remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
    } else {
      Log.d(TAG, "testLoopback for audio.");
    }
    pcClient =
        createPeerConnectionClient(
            localRenderer,
            remoteRenderer,
            parameters,
            decodeToTexure ? eglBase.getEglBaseContext() : null);

    // Wait for local SDP, rename it to answer and set as remote SDP.
    assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
    SessionDescription remoteSdp =
        new SessionDescription(
            SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
    pcClient.setRemoteDescription(remoteSdp);

    // Wait for ICE connection.
    assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));

    if (parameters.videoCallEnabled) {
      // Check that local and remote video frames were rendered.
      assertTrue(
          "Local video frames were not rendered.",
          localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
      assertTrue(
          "Remote video frames were not rendered.",
          remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    } else {
      // For audio just sleep for 1 sec.
      // TODO(glaznev): check how we can detect that remote audio was rendered.
      Thread.sleep(AUDIO_RUN_TIMEOUT);
    }

    pcClient.close();
    assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
    Log.d(TAG, "testLoopback done.");
  }
Ejemplo n.º 8
0
  // Test that a call can be setup even if the EGL context used during initialization is
  // released before the Video codecs are created. The HW encoder and decoder is setup to use
  // textures.
  @SmallTest
  public void testLoopbackEglContextReleasedAfterCreatingPc() throws InterruptedException {
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
      Log.i(TAG, "Decode to textures is not supported. Requires SDK version 19");
      return;
    }

    loopback = true;
    PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8, true);
    MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
    MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
    pcClient =
        createPeerConnectionClient(
            localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext());

    // Wait for local SDP, rename it to answer and set as remote SDP.
    assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));

    // Release the EGL context used for creating the PeerConnectionClient.
    // Since createPeerConnectionClient is asynchronous, we must wait for the local
    // SessionDescription.
    eglBase.release();
    eglBase = null;

    SessionDescription remoteSdp =
        new SessionDescription(
            SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
    pcClient.setRemoteDescription(remoteSdp);

    // Wait for ICE connection.
    assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
    // Check that local and remote video frames were rendered.
    assertTrue(
        "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    assertTrue(
        "Remote video frames were not rendered.",
        remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));

    pcClient.close();
    assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
    Log.d(TAG, "testLoopback done.");
  }
Ejemplo n.º 9
0
  private void roomHttpResponseParse(String response) {
    Log.d(TAG, "Room response: " + response);
    try {
      LinkedList<IceCandidate> iceCandidates = null;
      SessionDescription offerSdp = null;
      JSONObject roomJson = new JSONObject(response);

      String result = roomJson.getString("result");
      if (!result.equals("SUCCESS")) {
        events.onSignalingParametersError("Room response error: " + result);
        return;
      }
      response = roomJson.getString("params");
      roomJson = new JSONObject(response);
      String roomId = roomJson.getString("room_id");
      String clientId = roomJson.getString("client_id");
      String wssUrl = roomJson.getString("wss_url");
      String wssPostUrl = roomJson.getString("wss_post_url");
      boolean initiator = (roomJson.getBoolean("is_initiator"));
      if (!initiator) {
        iceCandidates = new LinkedList<IceCandidate>();
        String messagesString = roomJson.getString("messages");
        JSONArray messages = new JSONArray(messagesString);
        for (int i = 0; i < messages.length(); ++i) {
          String messageString = messages.getString(i);
          JSONObject message = new JSONObject(messageString);
          String messageType = message.getString("type");
          Log.d(TAG, "GAE->C #" + i + " : " + messageString);
          if (messageType.equals("offer")) {
            offerSdp =
                new SessionDescription(
                    SessionDescription.Type.fromCanonicalForm(messageType),
                    message.getString("sdp"));
          } else if (messageType.equals("candidate")) {
            IceCandidate candidate =
                new IceCandidate(
                    message.getString("id"),
                    message.getInt("label"),
                    message.getString("candidate"));
            iceCandidates.add(candidate);
          } else {
            Log.e(TAG, "Unknown message: " + messageString);
          }
        }
      }
      Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
      Log.d(TAG, "Initiator: " + initiator);
      Log.d(TAG, "WSS url: " + wssUrl);
      Log.d(TAG, "WSS POST url: " + wssPostUrl);

      LinkedList<PeerConnection.IceServer> iceServers =
          iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
      boolean isTurnPresent = false;
      for (PeerConnection.IceServer server : iceServers) {
        Log.d(TAG, "IceServer: " + server);
        if (server.uri.startsWith("turn:")) {
          isTurnPresent = true;
          break;
        }
      }
      if (!isTurnPresent) {
        LinkedList<PeerConnection.IceServer> turnServers =
            requestTurnServers(roomJson.getString("turn_url"));
        for (PeerConnection.IceServer turnServer : turnServers) {
          Log.d(TAG, "TurnServer: " + turnServer);
          iceServers.add(turnServer);
        }
      }

      MediaConstraints pcConstraints = constraintsFromJSON(roomJson.getString("pc_constraints"));
      addDTLSConstraintIfMissing(pcConstraints, loopback);
      Log.d(TAG, "pcConstraints: " + pcConstraints);
      MediaConstraints videoConstraints =
          constraintsFromJSON(getAVConstraints("video", roomJson.getString("media_constraints")));
      Log.d(TAG, "videoConstraints: " + videoConstraints);
      MediaConstraints audioConstraints =
          constraintsFromJSON(getAVConstraints("audio", roomJson.getString("media_constraints")));
      Log.d(TAG, "audioConstraints: " + audioConstraints);

      SignalingParameters params =
          new SignalingParameters(
              iceServers,
              initiator,
              pcConstraints,
              videoConstraints,
              audioConstraints,
              clientId,
              wssUrl,
              wssPostUrl,
              offerSdp,
              iceCandidates);
      events.onSignalingParametersReady(params);
    } catch (JSONException e) {
      events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
    } catch (IOException e) {
      events.onSignalingParametersError("Room IO error: " + e.toString());
    }
  }