private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
  }
示例#2
0
  private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
  }
  /** begin streaming to server - MUST run on VcThread */
  void doPublish(VideoStreamsView view) {
    if (mVideoCapturer != null) {
      return;
    }

    MediaConstraints videoConstraints = new MediaConstraints();
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", "320"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", "240"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", "10"));
    MediaConstraints audioConstraints = new MediaConstraints();
    audioConstraints.optional.add(
        new MediaConstraints.KeyValuePair("googEchoCancellation2", "true"));
    audioConstraints.optional.add(
        new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
    lMS = sFactory.createLocalMediaStream("ARDAMS");

    if (videoConstraints != null) {
      mVideoCapturer = getVideoCapturer();
      mVideoSource = sFactory.createVideoSource(mVideoCapturer, videoConstraints);
      VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0", mVideoSource);
      lMS.addTrack(videoTrack);
    }
    if (audioConstraints != null) {
      AudioTrack audioTrack =
          sFactory.createAudioTrack("ARDAMSa0", sFactory.createAudioSource(audioConstraints));
      lMS.addTrack(audioTrack);
      audioTrack.setEnabled(false);
    }

    StreamDescription stream = new StreamDescription("", false, true, true, false, null, mNick);
    MediaConstraints pcConstraints = makePcConstraints();
    MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream, true), stream);

    PeerConnection pc = sFactory.createPeerConnection(mIceServers, pcConstraints, pcObs);
    pc.addStream(lMS, new MediaConstraints());

    stream.setMedia(lMS);
    if (view != null) {
      stream.attachRenderer(new VideoCallbacks(view, VideoStreamsView.LOCAL_STREAM_ID));
    }
    stream.initLocal(pc, pcObs.getSdpObserver());
  }
示例#4
0
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_video_chat);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    Bundle extras = getIntent().getExtras();
    if (extras == null || !extras.containsKey(Constants.ROBOT_NAME)) {
      Intent intent = new Intent(this, QBotActivity.class);
      startActivity(intent);
      Toast.makeText(
              this,
              "Need to pass robot_name to VideoChatActivity in intent extras (Constants.ROBOT_NAME).",
              Toast.LENGTH_SHORT)
          .show();
      finish();
      return;
    }
    this.robotname = extras.getString(Constants.ROBOT_NAME, "");
    this.mChatList = getListView();
    this.mChatEditText = (EditText) findViewById(R.id.chat_input);
    this.mCallStatus = (TextView) findViewById(R.id.call_status);

    // Set up the List View for chatting
    List<ChatMessage> ll = new LinkedList<ChatMessage>();
    mChatAdapter = new ChatAdapter(this, ll);
    mChatList.setAdapter(mChatAdapter);

    // First, we initiate the PeerConnectionFactory with our application context and some options.
    PeerConnectionFactory.initializeAndroidGlobals(
        this, // Context
        true, // Audio Enabled
        true, // Video Enabled
        true, // Hardware Acceleration Enabled
        null); // Render EGL Context

    PeerConnectionFactory pcFactory = new PeerConnectionFactory();
    this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.robotname);

    // Returns the number of cams & front/back face device name
    int camNumber = VideoCapturerAndroid.getDeviceCount();
    String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice();
    String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice();

    // Creates a VideoCapturerAndroid instance for the device name
    VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam);

    // First create a Video Source, then we can make a Video Track
    localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints());
    VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);

    // First we create an AudioSource then we can create our AudioTrack
    AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints());
    AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);

    // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity
    // First we need to set the GLSurfaceView that it should render to
    this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

    // Then we set that view, and pass a Runnable to run once the surface is ready
    VideoRendererGui.setView(videoView, null);

    // Now that VideoRendererGui is ready, we can get our VideoRenderer.
    // IN THIS ORDER. Effects which is on top or bottom
    remoteRender =
        VideoRendererGui.create(
            0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    localRender =
        VideoRendererGui.create(
            0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);

    // We start out with an empty MediaStream object, created with help from our
    // PeerConnectionFactory
    //  Note that LOCAL_MEDIA_STREAM_ID can be any string
    MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID);

    // Now we can add our tracks.
    mediaStream.addTrack(localVideoTrack);
    mediaStream.addTrack(localAudioTrack);

    // First attach the RTC Listener so that callback events will be triggered
    this.pnRTCClient.attachRTCListener(new DemoRTCListener());

    // Then attach your local media stream to the PnRTCClient.
    //  This will trigger the onLocalStream callback.
    this.pnRTCClient.attachLocalMediaStream(mediaStream);

    // Listen on a channel. This is your "phone number," also set the max chat users.
    this.pnRTCClient.listenOn(robotname);
    this.pnRTCClient.setMaxConnections(1);

    // If the intent contains a number to dial, call it now that you are connected.
    //  Else, remain listening for a call.
    if (extras.containsKey(Constants.USER_NAME)) {
      String callUser = extras.getString(Constants.USER_NAME, "");
      connectToUser(callUser);
    }
  }
  @Override
  public void onIceServers(List<PeerConnection.IceServer> iceServers) {
    factory = new PeerConnectionFactory();

    MediaConstraints pcConstraints = appRtcClient.pcConstraints();
    pcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
    pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

    createDataChannelToRegressionTestBug2302(pc); // See method comment.

    // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    // Logging.enableTracing(
    //     "logcat:",
    //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
    //     Logging.Severity.LS_SENSITIVE);

    {
      final PeerConnection finalPC = pc;
      final Runnable repeatedStatsLogger =
          new Runnable() {
            public void run() {
              synchronized (quit[0]) {
                if (quit[0]) {
                  return;
                }
                final Runnable runnableThis = this;
                if (hudView.getVisibility() == View.INVISIBLE) {
                  vsv.postDelayed(runnableThis, 1000);
                  return;
                }
                boolean success =
                    finalPC.getStats(
                        new StatsObserver() {
                          public void onComplete(final StatsReport[] reports) {
                            runOnUiThread(
                                new Runnable() {
                                  public void run() {
                                    updateHUD(reports);
                                  }
                                });
                            for (StatsReport report : reports) {
                              Log.d(TAG, "Stats: " + report.toString());
                            }
                            vsv.postDelayed(runnableThis, 1000);
                          }
                        },
                        null);
                if (!success) {
                  throw new RuntimeException("getStats() return false!");
                }
              }
            }
          };
      vsv.postDelayed(repeatedStatsLogger, 1000);
    }

    {
      logAndToast("Creating local video source...");
      MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
      if (appRtcClient.videoConstraints() != null) {
        VideoCapturer capturer = getVideoCapturer();
        videoSource = factory.createVideoSource(capturer, appRtcClient.videoConstraints());
        VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        lMS.addTrack(videoTrack);
      }
      if (appRtcClient.audioConstraints() != null) {
        lMS.addTrack(
            factory.createAudioTrack(
                "ARDAMSa0", factory.createAudioSource(appRtcClient.audioConstraints())));
      }
      pc.addStream(lMS, new MediaConstraints());
    }
    logAndToast("Waiting for ICE candidates...");
  }