private void createPeerConnectionInternal(EGLContext renderEGLContext) {
    if (factory == null || isError) {
      Log.e(TAG, "Peerconnection factory is not created");
      return;
    }
    Log.d(TAG, "Create peer connection.");

    Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
    if (videoConstraints != null) {
      Log.d(TAG, "VideoConstraints: " + videoConstraints.toString());
    }
    queuedRemoteCandidates = new LinkedList<IceCandidate>();

    if (videoCallEnabled) {
      Log.d(TAG, "EGLContext: " + renderEGLContext);
      factory.setVideoHwAccelerationOptions(renderEGLContext);
    }

    PeerConnection.RTCConfiguration rtcConfig =
        new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
    // TCP candidates are only useful when connecting to a server that supports
    // ICE-TCP.
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
    rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
    rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
    // Use ECDSA encryption.
    rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
    peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
    isInitiator = false;

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing(
        "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO);

    // Todo

    mediaStream = factory.createLocalMediaStream("ARDAMS");
    if (videoCallEnabled) {
      String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
      String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice();
      if (numberOfCameras > 1 && frontCameraDeviceName != null) {
        cameraDeviceName = frontCameraDeviceName;
      }
      Log.d(TAG, "Opening camera: " + cameraDeviceName);
      videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null);
      if (videoCapturer == null) {
        reportError("Failed to open camera");
        return;
      }
      mediaStream.addTrack(createVideoTrack(videoCapturer));
    }

    mediaStream.addTrack(
        factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints)));
    peerConnection.addStream(mediaStream);

    Log.d(TAG, "Peer connection created.");
  }
  private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
  }
Esempio n. 3
0
  private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
  }
  private void createPeerConnectionFactoryInternal(Context context) {
    Log.d(
        TAG,
        "Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
    isError = false;

    // Initialize field trials.
    PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_AUTOMATIC_RESIZE);

    // Check preferred video codec.
    preferredVideoCodec = VIDEO_CODEC_VP8;
    if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
      if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
        preferredVideoCodec = VIDEO_CODEC_VP9;
      } else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
        preferredVideoCodec = VIDEO_CODEC_H264;
      }
    }
    Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);

    // Check if ISAC is used by default.
    preferIsac = false;
    if (peerConnectionParameters.audioCodec != null
        && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
      preferIsac = true;
    }

    // Enable/disable OpenSL ES playback.
    if (!peerConnectionParameters.useOpenSLES) {
      Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
      WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
    } else {
      Log.d(TAG, "Allow OpenSL ES audio if device supports it");
      WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
    }

    // Create peer connection factory.
    if (!PeerConnectionFactory.initializeAndroidGlobals(
        context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
      events.onPeerConnectionError("Failed to initializeAndroidGlobals");
    }
    factory = new PeerConnectionFactory();
    if (options != null) {
      Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
      factory.setOptions(options);
    }
    Log.d(TAG, "Peer connection factory created.");
  }
 // Disconnect from remote resources, dispose of local resources, and exit.
 private void disconnectAndExit() {
   synchronized (quit[0]) {
     if (quit[0]) {
       return;
     }
     quit[0] = true;
     if (pc != null) {
       pc.dispose();
       pc = null;
     }
     if (appRtcClient != null) {
       appRtcClient.sendMessage("{\"type\": \"bye\"}");
       appRtcClient.disconnect();
       appRtcClient = null;
     }
     if (videoSource != null) {
       videoSource.dispose();
       videoSource = null;
     }
     if (factory != null) {
       factory.dispose();
       factory = null;
     }
     finish();
   }
 }
  private void createPeerConnectionInternal() {
    if (factory == null || isError) {
      Log.e(TAG, "Peerconnection factory is not created");
      return;
    }
    Log.d(TAG, "Create peer connection");
    Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
    if (videoConstraints != null) {
      Log.d(TAG, "VideoConstraints: " + videoConstraints.toString());
    }
    queuedRemoteCandidates = new LinkedList<IceCandidate>();

    peerConnection =
        factory.createPeerConnection(signalingParameters.iceServers, pcConstraints, pcObserver);
    isInitiator = false;

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing(
        "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO);

    mediaStream = factory.createLocalMediaStream("ARDAMS");
    if (videoCallEnabled) {
      String cameraDeviceName = VideoCapturerAndroid.getDeviceName(0);
      String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice();
      if (numberOfCameras > 1 && frontCameraDeviceName != null) {
        cameraDeviceName = frontCameraDeviceName;
      }
      Log.d(TAG, "Opening camera: " + cameraDeviceName);
      videoCapturer = VideoCapturerAndroid.create(cameraDeviceName);
      if (videoCapturer == null) {
        reportError("Failed to open camera");
        return;
      }
      mediaStream.addTrack(createVideoTrack(videoCapturer));
    }

    mediaStream.addTrack(
        factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints)));
    peerConnection.addStream(mediaStream);

    Log.d(TAG, "Peer connection created.");
  }
Esempio n. 7
0
 private void closeInternal() {
   if (factory != null && peerConnectionParameters.aecDump) {
     factory.stopAecDump();
   }
   Log.d(TAG, "Closing peer connection.");
   statsTimer.cancel();
   if (peerConnection != null) {
     peerConnection.dispose();
     peerConnection = null;
   }
   Log.d(TAG, "Closing audio source.");
   if (audioSource != null) {
     audioSource.dispose();
     audioSource = null;
   }
   Log.d(TAG, "Stopping capture.");
   if (videoCapturer != null) {
     try {
       videoCapturer.stopCapture();
     } catch (InterruptedException e) {
       throw new RuntimeException(e);
     }
     videoCapturer.dispose();
     videoCapturer = null;
   }
   Log.d(TAG, "Closing video source.");
   if (videoSource != null) {
     videoSource.dispose();
     videoSource = null;
   }
   Log.d(TAG, "Closing peer connection factory.");
   if (factory != null) {
     factory.dispose();
     factory = null;
   }
   options = null;
   Log.d(TAG, "Closing peer connection done.");
   events.onPeerConnectionClosed();
   PeerConnectionFactory.stopInternalTracingCapture();
   PeerConnectionFactory.shutdownInternalTracer();
 }
 private void createPeerConnectionFactoryInternal(Context context, EGLContext renderEGLContext) {
   Log.d(
       TAG,
       "Create peer connection factory with EGLContext "
           + renderEGLContext
           + ". Use video: "
           + peerConnectionParameters.videoCallEnabled);
   isError = false;
   // Check if VP9 is used by default.
   if (videoCallEnabled
       && peerConnectionParameters.videoCodec != null
       && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
     PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_VP9);
   } else {
     PeerConnectionFactory.initializeFieldTrials(null);
   }
   // Check if H.264 is used by default.
   preferH264 = false;
   if (videoCallEnabled
       && peerConnectionParameters.videoCodec != null
       && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
     preferH264 = true;
   }
   // Check if ISAC is used by default.
   preferIsac = false;
   if (peerConnectionParameters.audioCodec != null
       && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
     preferIsac = true;
   }
   if (!PeerConnectionFactory.initializeAndroidGlobals(
       context, true, true, peerConnectionParameters.videoCodecHwAcceleration, renderEGLContext)) {
     events.onPeerConnectionError("Failed to initializeAndroidGlobals");
   }
   factory = new PeerConnectionFactory();
   if (options != null) {
     Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
     factory.setOptions(options);
   }
   Log.d(TAG, "Peer connection factory created.");
 }
 private void closeInternal() {
   Log.d(TAG, "Closing peer connection.");
   statsTimer.cancel();
   if (peerConnection != null) {
     peerConnection.dispose();
     peerConnection = null;
   }
   Log.d(TAG, "Closing video source.");
   if (videoSource != null) {
     videoSource.dispose();
     videoSource = null;
   }
   Log.d(TAG, "Closing peer connection factory.");
   if (factory != null) {
     factory.dispose();
     factory = null;
   }
   options = null;
   Log.d(TAG, "Closing peer connection done.");
   events.onPeerConnectionClosed();
 }
  void doSubscribe(final StreamDescription stream) {
    if (stream.isLocal()) {
      return;
    }

    if (stream.getMedia() != null) {
      // already subscribed!
      triggerMediaAvailable(stream);
      return;
    }

    // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    // Logging.enableTracing("logcat:",
    // EnumSet.of(Logging.TraceLevel.TRACE_ALL),
    // Logging.Severity.LS_SENSITIVE);

    MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream, false), stream);
    PeerConnection pc = sFactory.createPeerConnection(mIceServers, makePcConstraints(), pcObs);

    stream.initRemote(pc, pcObs.getSdpObserver());
  }
  /** begin streaming to server - MUST run on VcThread */
  void doPublish(VideoStreamsView view) {
    if (mVideoCapturer != null) {
      return;
    }

    MediaConstraints videoConstraints = new MediaConstraints();
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", "320"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", "240"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", "10"));
    MediaConstraints audioConstraints = new MediaConstraints();
    audioConstraints.optional.add(
        new MediaConstraints.KeyValuePair("googEchoCancellation2", "true"));
    audioConstraints.optional.add(
        new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
    lMS = sFactory.createLocalMediaStream("ARDAMS");

    if (videoConstraints != null) {
      mVideoCapturer = getVideoCapturer();
      mVideoSource = sFactory.createVideoSource(mVideoCapturer, videoConstraints);
      VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0", mVideoSource);
      lMS.addTrack(videoTrack);
    }
    if (audioConstraints != null) {
      AudioTrack audioTrack =
          sFactory.createAudioTrack("ARDAMSa0", sFactory.createAudioSource(audioConstraints));
      lMS.addTrack(audioTrack);
      audioTrack.setEnabled(false);
    }

    StreamDescription stream = new StreamDescription("", false, true, true, false, null, mNick);
    MediaConstraints pcConstraints = makePcConstraints();
    MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream, true), stream);

    PeerConnection pc = sFactory.createPeerConnection(mIceServers, pcConstraints, pcObs);
    pc.addStream(lMS, new MediaConstraints());

    stream.setMedia(lMS);
    if (view != null) {
      stream.attachRenderer(new VideoCallbacks(view, VideoStreamsView.LOCAL_STREAM_ID));
    }
    stream.initLocal(pc, pcObs.getSdpObserver());
  }
Esempio n. 12
0
  private void createPeerConnectionFactoryInternal(Context context) {
    PeerConnectionFactory.initializeInternalTracer();
    if (peerConnectionParameters.tracing) {
      PeerConnectionFactory.startInternalTracingCapture(
          Environment.getExternalStorageDirectory().getAbsolutePath()
              + File.separator
              + "webrtc-trace.txt");
    }
    Log.d(
        TAG,
        "Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
    isError = false;

    // Initialize field trials.
    PeerConnectionFactory.initializeFieldTrials("");

    // Check preferred video codec.
    preferredVideoCodec = VIDEO_CODEC_VP8;
    if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
      if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
        preferredVideoCodec = VIDEO_CODEC_VP9;
      } else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
        preferredVideoCodec = VIDEO_CODEC_H264;
      }
    }
    Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);

    // Check if ISAC is used by default.
    preferIsac =
        peerConnectionParameters.audioCodec != null
            && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);

    // Enable/disable OpenSL ES playback.
    if (!peerConnectionParameters.useOpenSLES) {
      Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
      WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
    } else {
      Log.d(TAG, "Allow OpenSL ES audio if device supports it");
      WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
    }

    if (peerConnectionParameters.disableBuiltInAEC) {
      Log.d(TAG, "Disable built-in AEC even if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
    } else {
      Log.d(TAG, "Enable built-in AEC if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
    }

    if (peerConnectionParameters.disableBuiltInAGC) {
      Log.d(TAG, "Disable built-in AGC even if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(true);
    } else {
      Log.d(TAG, "Enable built-in AGC if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
    }

    if (peerConnectionParameters.disableBuiltInNS) {
      Log.d(TAG, "Disable built-in NS even if device supports it");
      WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
    } else {
      Log.d(TAG, "Enable built-in NS if device supports it");
      WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
    }

    // Create peer connection factory.
    if (!PeerConnectionFactory.initializeAndroidGlobals(
        context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
      events.onPeerConnectionError("Failed to initializeAndroidGlobals");
    }
    if (options != null) {
      Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
    }
    this.context = context;
    factory = new PeerConnectionFactory(options);
    Log.d(TAG, "Peer connection factory created.");
  }
Esempio n. 13
0
 private AudioTrack createAudioTrack() {
   audioSource = factory.createAudioSource(audioConstraints);
   localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
   localAudioTrack.setEnabled(enableAudio);
   return localAudioTrack;
 }
Esempio n. 14
0
  private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
    if (factory == null || isError) {
      Log.e(TAG, "Peerconnection factory is not created");
      return;
    }
    Log.d(TAG, "Create peer connection.");

    Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
    queuedRemoteCandidates = new LinkedList<IceCandidate>();

    if (videoCallEnabled) {
      Log.d(TAG, "EGLContext: " + renderEGLContext);
      factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
    }

    PeerConnection.RTCConfiguration rtcConfig =
        new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
    // TCP candidates are only useful when connecting to a server that supports
    // ICE-TCP.
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
    rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
    rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
    rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
    // Use ECDSA encryption.
    rtcConfig.keyType = PeerConnection.KeyType.ECDSA;

    peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
    isInitiator = false;

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
    Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);

    mediaStream = factory.createLocalMediaStream("ARDAMS");
    if (videoCallEnabled) {
      if (peerConnectionParameters.useCamera2) {
        if (!peerConnectionParameters.captureToTexture) {
          reportError(context.getString(R.string.camera2_texture_only_error));
          return;
        }

        Logging.d(TAG, "Creating capturer using camera2 API.");
        createCapturer(new Camera2Enumerator(context));
      } else {
        Logging.d(TAG, "Creating capturer using camera1 API.");
        createCapturer(new Camera1Enumerator(peerConnectionParameters.captureToTexture));
      }

      if (videoCapturer == null) {
        reportError("Failed to open camera");
        return;
      }
      mediaStream.addTrack(createVideoTrack(videoCapturer));
    }

    mediaStream.addTrack(createAudioTrack());
    peerConnection.addStream(mediaStream);

    if (peerConnectionParameters.aecDump) {
      try {
        aecDumpFileDescriptor =
            ParcelFileDescriptor.open(
                new File(
                    Environment.getExternalStorageDirectory().getPath()
                        + File.separator
                        + "Download/audio.aecdump"),
                ParcelFileDescriptor.MODE_READ_WRITE
                    | ParcelFileDescriptor.MODE_CREATE
                    | ParcelFileDescriptor.MODE_TRUNCATE);
        factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
      } catch (IOException e) {
        Log.e(TAG, "Can not open aecdump file", e);
      }
    }

    Log.d(TAG, "Peer connection created.");
  }
  @Override
  public void onIceServers(List<PeerConnection.IceServer> iceServers) {
    factory = new PeerConnectionFactory();

    MediaConstraints pcConstraints = appRtcClient.pcConstraints();
    pcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
    pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

    createDataChannelToRegressionTestBug2302(pc); // See method comment.

    // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    // Logging.enableTracing(
    //     "logcat:",
    //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
    //     Logging.Severity.LS_SENSITIVE);

    {
      final PeerConnection finalPC = pc;
      final Runnable repeatedStatsLogger =
          new Runnable() {
            public void run() {
              synchronized (quit[0]) {
                if (quit[0]) {
                  return;
                }
                final Runnable runnableThis = this;
                if (hudView.getVisibility() == View.INVISIBLE) {
                  vsv.postDelayed(runnableThis, 1000);
                  return;
                }
                boolean success =
                    finalPC.getStats(
                        new StatsObserver() {
                          public void onComplete(final StatsReport[] reports) {
                            runOnUiThread(
                                new Runnable() {
                                  public void run() {
                                    updateHUD(reports);
                                  }
                                });
                            for (StatsReport report : reports) {
                              Log.d(TAG, "Stats: " + report.toString());
                            }
                            vsv.postDelayed(runnableThis, 1000);
                          }
                        },
                        null);
                if (!success) {
                  throw new RuntimeException("getStats() return false!");
                }
              }
            }
          };
      vsv.postDelayed(repeatedStatsLogger, 1000);
    }

    {
      logAndToast("Creating local video source...");
      MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
      if (appRtcClient.videoConstraints() != null) {
        VideoCapturer capturer = getVideoCapturer();
        videoSource = factory.createVideoSource(capturer, appRtcClient.videoConstraints());
        VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        lMS.addTrack(videoTrack);
      }
      if (appRtcClient.audioConstraints() != null) {
        lMS.addTrack(
            factory.createAudioTrack(
                "ARDAMSa0", factory.createAudioSource(appRtcClient.audioConstraints())));
      }
      pc.addStream(lMS, new MediaConstraints());
    }
    logAndToast("Waiting for ICE candidates...");
  }
Esempio n. 16
0
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_video_chat);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    Bundle extras = getIntent().getExtras();
    if (extras == null || !extras.containsKey(Constants.ROBOT_NAME)) {
      Intent intent = new Intent(this, QBotActivity.class);
      startActivity(intent);
      Toast.makeText(
              this,
              "Need to pass robot_name to VideoChatActivity in intent extras (Constants.ROBOT_NAME).",
              Toast.LENGTH_SHORT)
          .show();
      finish();
      return;
    }
    this.robotname = extras.getString(Constants.ROBOT_NAME, "");
    this.mChatList = getListView();
    this.mChatEditText = (EditText) findViewById(R.id.chat_input);
    this.mCallStatus = (TextView) findViewById(R.id.call_status);

    // Set up the List View for chatting
    List<ChatMessage> ll = new LinkedList<ChatMessage>();
    mChatAdapter = new ChatAdapter(this, ll);
    mChatList.setAdapter(mChatAdapter);

    // First, we initiate the PeerConnectionFactory with our application context and some options.
    PeerConnectionFactory.initializeAndroidGlobals(
        this, // Context
        true, // Audio Enabled
        true, // Video Enabled
        true, // Hardware Acceleration Enabled
        null); // Render EGL Context

    PeerConnectionFactory pcFactory = new PeerConnectionFactory();
    this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.robotname);

    // Returns the number of cams & front/back face device name
    int camNumber = VideoCapturerAndroid.getDeviceCount();
    String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice();
    String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice();

    // Creates a VideoCapturerAndroid instance for the device name
    VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam);

    // First create a Video Source, then we can make a Video Track
    localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints());
    VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);

    // First we create an AudioSource then we can create our AudioTrack
    AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints());
    AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);

    // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity
    // First we need to set the GLSurfaceView that it should render to
    this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

    // Then we set that view, and pass a Runnable to run once the surface is ready
    VideoRendererGui.setView(videoView, null);

    // Now that VideoRendererGui is ready, we can get our VideoRenderer.
    // IN THIS ORDER. Effects which is on top or bottom
    remoteRender =
        VideoRendererGui.create(
            0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    localRender =
        VideoRendererGui.create(
            0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);

    // We start out with an empty MediaStream object, created with help from our
    // PeerConnectionFactory
    //  Note that LOCAL_MEDIA_STREAM_ID can be any string
    MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID);

    // Now we can add our tracks.
    mediaStream.addTrack(localVideoTrack);
    mediaStream.addTrack(localAudioTrack);

    // First attach the RTC Listener so that callback events will be triggered
    this.pnRTCClient.attachRTCListener(new DemoRTCListener());

    // Then attach your local media stream to the PnRTCClient.
    //  This will trigger the onLocalStream callback.
    this.pnRTCClient.attachLocalMediaStream(mediaStream);

    // Listen on a channel. This is your "phone number," also set the max chat users.
    this.pnRTCClient.listenOn(robotname);
    this.pnRTCClient.setMaxConnections(1);

    // If the intent contains a number to dial, call it now that you are connected.
    //  Else, remain listening for a call.
    if (extras.containsKey(Constants.USER_NAME)) {
      String callUser = extras.getString(Constants.USER_NAME, "");
      connectToUser(callUser);
    }
  }
  @Override
  public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));

    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    Point displaySize = new Point();
    getWindowManager().getDefaultDisplay().getRealSize(displaySize);

    vsv = new AppRTCGLView(this, displaySize);
    VideoRendererGui.setView(vsv);
    remoteRender = VideoRendererGui.create(0, 0, 100, 100);
    localRender = VideoRendererGui.create(70, 5, 25, 25);

    vsv.setOnClickListener(
        new View.OnClickListener() {
          @Override
          public void onClick(View v) {
            toggleHUD();
          }
        });
    setContentView(vsv);
    logAndToast("Tap the screen to toggle stats visibility");

    hudView = new TextView(this);
    hudView.setTextColor(Color.BLACK);
    hudView.setBackgroundColor(Color.WHITE);
    hudView.setAlpha(0.4f);
    hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
    hudView.setVisibility(View.INVISIBLE);
    addContentView(hudView, hudLayout);

    if (!factoryStaticInitialized) {
      abortUnless(
          PeerConnectionFactory.initializeAndroidGlobals(this, true, true),
          "Failed to initializeAndroidGlobals");
      factoryStaticInitialized = true;
    }

    AudioManager audioManager = ((AudioManager) getSystemService(AUDIO_SERVICE));
    // TODO(fischman): figure out how to do this Right(tm) and remove the
    // suppression.
    @SuppressWarnings("deprecation")
    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
    audioManager.setMode(
        isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

    sdpMediaConstraints = new MediaConstraints();
    sdpMediaConstraints.mandatory.add(
        new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    sdpMediaConstraints.mandatory.add(
        new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));

    final Intent intent = getIntent();
    if ("android.intent.action.VIEW".equals(intent.getAction())) {
      connectToRoom(intent.getData().toString());
      return;
    }
    showGetRoomUI();
  }