public static VideoCapturerAndroid create(
      String name, CameraEventsHandler eventsHandler, EGLContext sharedEglContext) {
    final int cameraId = lookupDeviceName(name);
    if (cameraId == -1) {
      return null;
    }

    final VideoCapturerAndroid capturer =
        new VideoCapturerAndroid(cameraId, eventsHandler, sharedEglContext);
    capturer.setNativeCapturer(nativeCreateVideoCapturer(capturer));
    return capturer;
  }
예제 #2
0
  private void createPeerConnectionInternal() {
    if (factory == null || isError) {
      Log.e(TAG, "Peerconnection factory is not created");
      return;
    }
    Log.d(TAG, "Create peer connection");
    Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
    if (videoConstraints != null) {
      Log.d(TAG, "VideoConstraints: " + videoConstraints.toString());
    }
    queuedRemoteCandidates = new LinkedList<IceCandidate>();

    PeerConnection.RTCConfiguration rtcConfig =
        new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
    // TCP candidates are only useful when connecting to a server that supports
    // ICE-TCP.
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
    rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
    rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;

    peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
    isInitiator = false;

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing(
        "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO);

    mediaStream = factory.createLocalMediaStream("ARDAMS");
    if (videoCallEnabled) {
      String cameraDeviceName = VideoCapturerAndroid.getDeviceName(0);
      String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice();
      if (numberOfCameras > 1 && frontCameraDeviceName != null) {
        cameraDeviceName = frontCameraDeviceName;
      }
      Log.d(TAG, "Opening camera: " + cameraDeviceName);
      videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null);
      if (videoCapturer == null) {
        reportError("Failed to open camera");
        return;
      }
      mediaStream.addTrack(createVideoTrack(videoCapturer));
    }

    mediaStream.addTrack(
        factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints)));
    peerConnection.addStream(mediaStream);

    Log.d(TAG, "Peer connection created.");
  }
 private void changeCaptureFormatInternal(int width, int height, int framerate) {
   if (!videoCallEnabled || isError || videoCapturer == null) {
     Log.e(
         TAG,
         "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
     return;
   }
   videoCapturer.onOutputFormatRequest(width, height, framerate);
 }
예제 #4
0
 private void switchCameraInternal() {
   if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
     Log.e(
         TAG,
         "Failed to switch camera. Video: "
             + videoCallEnabled
             + ". Error : "
             + isError
             + ". Number of cameras: "
             + numberOfCameras);
     return; // No video is sent or only one camera is available or error happened.
   }
   Log.d(TAG, "Switch camera");
   videoCapturer.switchCamera(null);
 }
예제 #5
0
  private void createMediaConstraintsInternal() {
    // Create peer connection constraints.
    pcConstraints = new MediaConstraints();
    // Enable DTLS for normal calls and disable for loopback calls.
    if (peerConnectionParameters.loopback) {
      pcConstraints.optional.add(
          new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "false"));
    } else {
      pcConstraints.optional.add(
          new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
    }

    // Check if there is a camera on device and disable video call if not.
    numberOfCameras = VideoCapturerAndroid.getDeviceCount();
    if (numberOfCameras == 0) {
      Log.w(TAG, "No camera on device. Switch to audio only call.");
      videoCallEnabled = false;
    }
    // Create video constraints if video call is enabled.
    if (videoCallEnabled) {
      videoConstraints = new MediaConstraints();
      int videoWidth = peerConnectionParameters.videoWidth;
      int videoHeight = peerConnectionParameters.videoHeight;

      // If VP8 HW video encoder is supported and video resolution is not
      // specified force it to HD.
      if ((videoWidth == 0 || videoHeight == 0)
          && peerConnectionParameters.videoCodecHwAcceleration
          && MediaCodecVideoEncoder.isVp8HwSupported()) {
        videoWidth = HD_VIDEO_WIDTH;
        videoHeight = HD_VIDEO_HEIGHT;
      }

      // Add video resolution constraints.
      if (videoWidth > 0 && videoHeight > 0) {
        videoWidth = Math.min(videoWidth, MAX_VIDEO_WIDTH);
        videoHeight = Math.min(videoHeight, MAX_VIDEO_HEIGHT);
        videoConstraints.mandatory.add(
            new KeyValuePair(MIN_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
        videoConstraints.mandatory.add(
            new KeyValuePair(MAX_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
        videoConstraints.mandatory.add(
            new KeyValuePair(MIN_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
        videoConstraints.mandatory.add(
            new KeyValuePair(MAX_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
      }

      // Add fps constraints.
      int videoFps = peerConnectionParameters.videoFps;
      if (videoFps > 0) {
        videoFps = Math.min(videoFps, MAX_VIDEO_FPS);
        videoConstraints.mandatory.add(
            new KeyValuePair(MIN_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
        videoConstraints.mandatory.add(
            new KeyValuePair(MAX_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
      }
    }

    // Create audio constraints.
    audioConstraints = new MediaConstraints();

    // Create SDP constraints.
    sdpMediaConstraints = new MediaConstraints();
    sdpMediaConstraints.mandatory.add(
        new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    if (videoCallEnabled || peerConnectionParameters.loopback) {
      sdpMediaConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
    } else {
      sdpMediaConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
    }
  }
예제 #6
0
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_video_chat);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    Bundle extras = getIntent().getExtras();
    if (extras == null || !extras.containsKey(Constants.ROBOT_NAME)) {
      Intent intent = new Intent(this, QBotActivity.class);
      startActivity(intent);
      Toast.makeText(
              this,
              "Need to pass robot_name to VideoChatActivity in intent extras (Constants.ROBOT_NAME).",
              Toast.LENGTH_SHORT)
          .show();
      finish();
      return;
    }
    this.robotname = extras.getString(Constants.ROBOT_NAME, "");
    this.mChatList = getListView();
    this.mChatEditText = (EditText) findViewById(R.id.chat_input);
    this.mCallStatus = (TextView) findViewById(R.id.call_status);

    // Set up the List View for chatting
    List<ChatMessage> ll = new LinkedList<ChatMessage>();
    mChatAdapter = new ChatAdapter(this, ll);
    mChatList.setAdapter(mChatAdapter);

    // First, we initiate the PeerConnectionFactory with our application context and some options.
    PeerConnectionFactory.initializeAndroidGlobals(
        this, // Context
        true, // Audio Enabled
        true, // Video Enabled
        true, // Hardware Acceleration Enabled
        null); // Render EGL Context

    PeerConnectionFactory pcFactory = new PeerConnectionFactory();
    this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.robotname);

    // Returns the number of cams & front/back face device name
    int camNumber = VideoCapturerAndroid.getDeviceCount();
    String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice();
    String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice();

    // Creates a VideoCapturerAndroid instance for the device name
    VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam);

    // First create a Video Source, then we can make a Video Track
    localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints());
    VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);

    // First we create an AudioSource then we can create our AudioTrack
    AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints());
    AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);

    // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity
    // First we need to set the GLSurfaceView that it should render to
    this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

    // Then we set that view, and pass a Runnable to run once the surface is ready
    VideoRendererGui.setView(videoView, null);

    // Now that VideoRendererGui is ready, we can get our VideoRenderer.
    // IN THIS ORDER. Effects which is on top or bottom
    remoteRender =
        VideoRendererGui.create(
            0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    localRender =
        VideoRendererGui.create(
            0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);

    // We start out with an empty MediaStream object, created with help from our
    // PeerConnectionFactory
    //  Note that LOCAL_MEDIA_STREAM_ID can be any string
    MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID);

    // Now we can add our tracks.
    mediaStream.addTrack(localVideoTrack);
    mediaStream.addTrack(localAudioTrack);

    // First attach the RTC Listener so that callback events will be triggered
    this.pnRTCClient.attachRTCListener(new DemoRTCListener());

    // Then attach your local media stream to the PnRTCClient.
    //  This will trigger the onLocalStream callback.
    this.pnRTCClient.attachLocalMediaStream(mediaStream);

    // Listen on a channel. This is your "phone number," also set the max chat users.
    this.pnRTCClient.listenOn(robotname);
    this.pnRTCClient.setMaxConnections(1);

    // If the intent contains a number to dial, call it now that you are connected.
    //  Else, remain listening for a call.
    if (extras.containsKey(Constants.USER_NAME)) {
      String callUser = extras.getString(Constants.USER_NAME, "");
      connectToUser(callUser);
    }
  }
 public static VideoCapturerAndroid create(String name, CameraEventsHandler eventsHandler) {
   return VideoCapturerAndroid.create(name, eventsHandler, null);
 }