@SuppressLint("NewApi") @Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { Logging.d(TAG, "VideoRendererGui.onSurfaceCreated"); // Store render EGL context. if (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION) { synchronized (VideoRendererGui.class) { eglContext = EGL14.eglGetCurrentContext(); Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext); } } synchronized (yuvImageRenderers) { // Create drawer for YUV/OES frames. drawer = new GlRectDrawer(); // Create textures for all images. for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) { yuvImageRenderer.createTextures(); } onSurfaceCreatedCalled = true; } GlUtil.checkNoGLES2Error("onSurfaceCreated done"); GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f); // Fire EGL context ready event. synchronized (VideoRendererGui.class) { if (eglContextReady != null) { eglContextReady.run(); } } }
public boolean reserveByteBuffer(byte[] data, long timeStamp) { checkIsOnValidThread(); final ByteBuffer buffer = queuedBuffers.remove(data); if (buffer == null) { // Frames might be posted to |onPreviewFrame| with the previous format while changing // capture format in |startPreviewOnCameraThread|. Drop these old frames. Logging.w( TAG, "Received callback buffer from previous configuration with length: " + (data == null ? "null" : data.length)); return false; } if (buffer.capacity() != frameSize) { throw new IllegalStateException("Callback buffer has unexpected frame size"); } if (pendingBuffers.containsKey(timeStamp)) { Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); return false; } pendingBuffers.put(timeStamp, buffer); if (queuedBuffers.isEmpty()) { Logging.d(TAG, "Camera is running out of capture buffers."); } return true; }
@Override public void run() { int cameraFramesCount = cameraStatistics.getAndResetFrameCount(); int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) / CAMERA_OBSERVER_PERIOD_MS; Logging.d( TAG, "Camera fps: " + cameraFps + ". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps()); if (cameraFramesCount == 0) { ++freezePeriodCount; if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS && eventsHandler != null) { Logging.e(TAG, "Camera freezed."); if (cameraStatistics.pendingFramesCount() == cameraStatistics.maxPendingFrames) { eventsHandler.onCameraError("Camera failure. Client must return video buffers."); } else { eventsHandler.onCameraError("Camera failure."); } return; } } else { freezePeriodCount = 0; } cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS); }
private void stopCaptureOnCameraThread() { checkIsOnCameraThread(); Logging.d(TAG, "stopCaptureOnCameraThread"); if (openCameraOnCodecThreadRunner != null) { cameraThreadHandler.removeCallbacks(openCameraOnCodecThreadRunner); } openCameraAttempts = 0; if (camera == null) { Logging.e(TAG, "Calling stopCapture() for already stopped camera."); return; } cameraThreadHandler.removeCallbacks(cameraObserver); cameraStatistics.getAndResetFrameCount(); Logging.d(TAG, "Stop preview."); camera.stopPreview(); camera.setPreviewCallbackWithBuffer(null); if (!isCapturingToTexture()) { videoBuffers.stopReturnBuffersToCamera(); Logging.d( TAG, "stopReturnBuffersToCamera called." + (cameraStatistics.pendingFramesCount() == 0 ? " All buffers have been returned." : " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + ".")); } captureFormat = null; Logging.d(TAG, "Release camera."); camera.release(); camera = null; if (eventsHandler != null) { eventsHandler.onCameraClosed(); } }
private void logStatistics() { long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs; Logging.d( TAG, "ID: " + id + ". Type: " + rendererType + ". Frames received: " + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered); if (framesReceived > 0 && framesRendered > 0) { Logging.d( TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + " ms. FPS: " + (float) framesRendered * 1e9 / timeSinceFirstFrameNs); Logging.d( TAG, "Draw time: " + (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " + (int) (copyTimeNs / (1000 * framesReceived)) + " us"); } }
private void createCapturer(CameraEnumerator enumerator) { final String[] deviceNames = enumerator.getDeviceNames(); // First, try to find front facing camera Logging.d(TAG, "Looking for front facing cameras."); for (String deviceName : deviceNames) { if (enumerator.isFrontFacing(deviceName)) { Logging.d(TAG, "Creating front facing camera capturer."); videoCapturer = enumerator.createCapturer(deviceName, null); if (videoCapturer != null) { return; } } } // Front facing camera not found, try something else Logging.d(TAG, "Looking for other cameras."); for (String deviceName : deviceNames) { if (!enumerator.isFrontFacing(deviceName)) { Logging.d(TAG, "Creating other camera capturer."); videoCapturer = enumerator.createCapturer(deviceName, null); if (videoCapturer != null) { return; } } } }
private boolean enableBuiltInNS(boolean enable) { Logging.d(TAG, "enableBuiltInNS(" + enable + ')'); if (effects == null) { Logging.e(TAG, "Built-in NS is not supported on this platform"); return false; } return effects.setNS(enable); }
private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) { checkIsOnCameraThread(); if (camera == null) { Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera."); return; } Logging.d( TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + framerate); frameObserver.onOutputFormatRequest(width, height, framerate); }
private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { return null; // MediaCodec.setParameters is missing. } for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i); if (info.isEncoder()) { continue; } String name = null; for (String mimeType : info.getSupportedTypes()) { if (mimeType.equals(mime)) { name = info.getName(); break; } } if (name == null) { continue; // No HW support in this codec; try the next one. } Logging.v(TAG, "Found candidate decoder " + name); // Check if this is supported decoder. boolean supportedCodec = false; for (String codecPrefix : supportedCodecPrefixes) { if (name.startsWith(codecPrefix)) { supportedCodec = true; break; } } if (!supportedCodec) { continue; } // Check if codec supports either yuv420 or nv12. CodecCapabilities capabilities = info.getCapabilitiesForType(mime); for (int colorFormat : capabilities.colorFormats) { Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); } for (int supportedColorFormat : supportedColorList) { for (int codecColorFormat : capabilities.colorFormats) { if (codecColorFormat == supportedColorFormat) { // Found supported HW decoder. Logging.d( TAG, "Found target decoder " + name + ". Color: 0x" + Integer.toHexString(codecColorFormat)); return new DecoderProperties(name, codecColorFormat); } } } } return null; // No HW decoder. }
public void printStackTrace() { if (cameraThread != null) { StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace(); if (cameraStackTraces.length > 0) { Logging.d(TAG, "VideoCapturerAndroid stacks trace:"); for (StackTraceElement stackTrace : cameraStackTraces) { Logging.d(TAG, stackTrace.toString()); } } } }
private void switchCameraOnCameraThread() { checkIsOnCameraThread(); Logging.d(TAG, "switchCameraOnCameraThread"); stopCaptureOnCameraThread(); synchronized (cameraIdLock) { id = (id + 1) % Camera.getNumberOfCameras(); } dropNextFrame = true; startCaptureOnCameraThread( requestedWidth, requestedHeight, requestedFramerate, frameObserver, applicationContext); Logging.d(TAG, "switchCameraOnCameraThread done"); }
// Called by native code. Returns true when camera is known to be stopped. void stopCapture() throws InterruptedException { Logging.d(TAG, "stopCapture"); final CountDownLatch barrier = new CountDownLatch(1); cameraThreadHandler.post( new Runnable() { @Override public void run() { stopCaptureOnCameraThread(); barrier.countDown(); } }); barrier.await(); Logging.d(TAG, "stopCapture done"); }
public static synchronized void remove(VideoRenderer.Callbacks renderer) { Logging.d(TAG, "VideoRendererGui.remove"); if (instance == null) { throw new RuntimeException("Attempt to remove renderer before setting GLSurfaceView"); } synchronized (instance.yuvImageRenderers) { final int index = instance.yuvImageRenderers.indexOf(renderer); if (index == -1) { Logging.w(TAG, "Couldn't remove renderer (not present in current list)"); } else { instance.yuvImageRenderers.remove(index).release(); } } }
private void updateLayoutMatrix() { synchronized (updateLayoutLock) { if (!updateLayoutProperties) { return; } // Initialize to maximum allowed area. Round to integer coordinates inwards the layout // bounding box (ceil left/top and floor right/bottom) to not break constraints. displayLayout.set( (screenWidth * layoutInPercentage.left + 99) / 100, (screenHeight * layoutInPercentage.top + 99) / 100, (screenWidth * layoutInPercentage.right) / 100, (screenHeight * layoutInPercentage.bottom) / 100); Logging.d( TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: " + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror); final float videoAspectRatio = (rotationDegree % 180 == 0) ? (float) videoWidth / videoHeight : (float) videoHeight / videoWidth; // Adjust display size based on |scalingType|. final Point displaySize = RendererCommon.getDisplaySize( scalingType, videoAspectRatio, displayLayout.width(), displayLayout.height()); displayLayout.inset( (displayLayout.width() - displaySize.x) / 2, (displayLayout.height() - displaySize.y) / 2); Logging.d( TAG, " Adjusted display size: " + displayLayout.width() + " x " + displayLayout.height()); layoutMatrix = RendererCommon.getLayoutMatrix( mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height()); updateLayoutProperties = false; Logging.d(TAG, " AdjustTextureCoords done"); } }
private boolean stopRecording() { Logging.d(TAG, "stopRecording"); assertTrue(audioThread != null); audioThread.stopThread(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); } audioThread = null; if (effects != null) { effects.release(); } audioRecord.release(); audioRecord = null; return true; }
// Called on cameraThread so must not "synchronized". @Override public void onPreviewFrame(byte[] data, Camera callbackCamera) { checkIsOnCameraThread(); if (camera == null) { return; } if (camera != callbackCamera) { throw new RuntimeException("Unexpected camera in callback!"); } final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); if (eventsHandler != null && !firstFrameReported) { eventsHandler.onFirstFrameAvailable(); firstFrameReported = true; } // Mark the frame owning |data| as used. // Note that since data is directBuffer, // data.length >= videoBuffers.frameSize. if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) { cameraStatistics.addPendingFrame(captureTimeNs); frameObserver.onByteBufferFrameCaptured( data, videoBuffers.frameSize, captureFormat.width, captureFormat.height, getFrameOrientation(), captureTimeNs); } else { Logging.w(TAG, "reserveByteBuffer failed - dropping frame."); } }
private void createPeerConnectionInternal(EGLContext renderEGLContext) { if (factory == null || isError) { Log.e(TAG, "Peerconnection factory is not created"); return; } Log.d(TAG, "Create peer connection."); Log.d(TAG, "PCConstraints: " + pcConstraints.toString()); if (videoConstraints != null) { Log.d(TAG, "VideoConstraints: " + videoConstraints.toString()); } queuedRemoteCandidates = new LinkedList<IceCandidate>(); if (videoCallEnabled) { Log.d(TAG, "EGLContext: " + renderEGLContext); factory.setVideoHwAccelerationOptions(renderEGLContext); } PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(signalingParameters.iceServers); // TCP candidates are only useful when connecting to a server that supports // ICE-TCP. rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; // Use ECDSA encryption. rtcConfig.keyType = PeerConnection.KeyType.ECDSA; peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver); isInitiator = false; // Set default WebRTC tracing and INFO libjingle logging. // NOTE: this _must_ happen while |factory| is alive! Logging.enableTracing( "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO); // Todo mediaStream = factory.createLocalMediaStream("ARDAMS"); if (videoCallEnabled) { String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0); String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice(); if (numberOfCameras > 1 && frontCameraDeviceName != null) { cameraDeviceName = frontCameraDeviceName; } Log.d(TAG, "Opening camera: " + cameraDeviceName); videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null); if (videoCapturer == null) { reportError("Failed to open camera"); return; } mediaStream.addTrack(createVideoTrack(videoCapturer)); } mediaStream.addTrack( factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints))); peerConnection.addStream(mediaStream); Log.d(TAG, "Peer connection created."); }
@Override public synchronized void renderFrame(I420Frame frame) { if (surface == null) { // This object has been released. VideoRenderer.renderFrameDone(frame); return; } if (!seenFrame && rendererEvents != null) { Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame."); rendererEvents.onFirstFrameRendered(); } framesReceived++; synchronized (pendingFrameLock) { // Check input frame parameters. if (frame.yuvFrame) { if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2 || frame.yuvStrides[2] < frame.width / 2) { Logging.e( TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1] + ", " + frame.yuvStrides[2]); VideoRenderer.renderFrameDone(frame); return; } } if (pendingFrame != null) { // Skip rendering of this frame if previous frame was not rendered yet. framesDropped++; VideoRenderer.renderFrameDone(frame); return; } pendingFrame = frame; } setSize(frame.width, frame.height, frame.rotationDegree); seenFrame = true; // Request rendering. surface.requestRender(); }
WebRtcAudioRecord(Context context, long nativeAudioRecord) { Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); this.context = context; this.nativeAudioRecord = nativeAudioRecord; if (DEBUG) { WebRtcAudioUtils.logDeviceInfo(TAG); } effects = WebRtcAudioEffects.create(); }
// Dequeue an input buffer and return its index, -1 if no input buffer is // available, or -2 if the codec is no longer operative. private int dequeueInputBuffer() { checkOnMediaCodecThread(); try { return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); } catch (IllegalStateException e) { Logging.e(TAG, "dequeueIntputBuffer failed", e); return -2; } }
private boolean startRecording() { Logging.d(TAG, "startRecording"); assertTrue(audioRecord != null); assertTrue(audioThread == null); try { audioRecord.startRecording(); } catch (IllegalStateException e) { Logging.e(TAG, "AudioRecord.startRecording failed: " + e.getMessage()); return false; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { Logging.e(TAG, "AudioRecord.startRecording failed"); return false; } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); return true; }
// Release a dequeued output buffer back to the codec for re-use. Return // false if the codec is no longer operable. private boolean releaseOutputBuffer(int index) { checkOnMediaCodecThread(); try { mediaCodec.releaseOutputBuffer(index, useSurface); return true; } catch (IllegalStateException e) { Logging.e(TAG, "releaseOutputBuffer failed", e); return false; } }
@Override public void onSurfaceChanged(GL10 unused, int width, int height) { Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + width + " x " + height + " "); screenWidth = width; screenHeight = height; synchronized (yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) { yuvImageRenderer.setScreenSize(screenWidth, screenHeight); } } }
private boolean queueInputBuffer(int inputBufferIndex, int size, long timestampUs) { checkOnMediaCodecThread(); try { inputBuffers[inputBufferIndex].position(0); inputBuffers[inputBufferIndex].limit(size); mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); return true; } catch (IllegalStateException e) { Logging.e(TAG, "decode failed", e); return false; } }
public void returnBuffer(long timeStamp) { checkIsOnValidThread(); final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp); if (returnedFrame == null) { throw new RuntimeException( "unknown data buffer with time stamp " + timeStamp + "returned?!?"); } if (camera != null && returnedFrame.capacity() == frameSize) { camera.addCallbackBuffer(returnedFrame.array()); if (queuedBuffers.isEmpty()) { Logging.d( TAG, "Frame returned when camera is running out of capture" + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp)); } queuedBuffers.put(returnedFrame.array(), returnedFrame); return; } if (returnedFrame.capacity() != frameSize) { Logging.d( TAG, "returnBuffer with time stamp " + TimeUnit.NANOSECONDS.toMillis(timeStamp) + " called with old frame size, " + returnedFrame.capacity() + "."); // Since this frame has the wrong size, don't requeue it. Frames with the correct size are // created in queueCameraBuffers so this must be an old buffer. return; } Logging.d( TAG, "returnBuffer with time stamp " + TimeUnit.NANOSECONDS.toMillis(timeStamp) + " called after camera has been stopped."); }
@Override public void onError(int error, Camera camera) { String errorMessage; if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { errorMessage = "Camera server died!"; } else { errorMessage = "Camera error: " + error; } Logging.e(TAG, errorMessage); if (eventsHandler != null) { eventsHandler.onCameraError(errorMessage); } }
private void release() { Logging.d(TAG, "Java releaseDecoder"); checkOnMediaCodecThread(); try { mediaCodec.stop(); mediaCodec.release(); } catch (IllegalStateException e) { Logging.e(TAG, "release failed", e); } mediaCodec = null; mediaCodecThread = null; if (useSurface) { surface.release(); if (textureID != 0) { Logging.d(TAG, "Delete video decoder TextureID " + textureID); GLES20.glDeleteTextures(1, new int[] {textureID}, 0); textureID = 0; } eglBase.release(); eglBase = null; } }
public static synchronized void reset(VideoRenderer.Callbacks renderer) { Logging.d(TAG, "VideoRendererGui.reset"); if (instance == null) { throw new RuntimeException("Attempt to reset renderer before setting GLSurfaceView"); } synchronized (instance.yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) { if (yuvImageRenderer == renderer) { yuvImageRenderer.reset(); } } } }
public static synchronized void setRendererEvents( VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) { Logging.d(TAG, "VideoRendererGui.setRendererEvents"); if (instance == null) { throw new RuntimeException("Attempt to set renderer events before setting GLSurfaceView"); } synchronized (instance.yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) { if (yuvImageRenderer == renderer) { yuvImageRenderer.rendererEvents = rendererEvents; } } } }
@Override public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); long lastTime = System.nanoTime(); while (keepAlive) { int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); if (bytesRead == byteBuffer.capacity()) { if (microphoneMute) { byteBuffer.clear(); byteBuffer.put(emptyBytes); } nativeDataIsRecorded(bytesRead, nativeAudioRecord); } else { Logging.e(TAG, "AudioRecord.read failed: " + bytesRead); if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { keepAlive = false; } } if (DEBUG) { long nowTime = System.nanoTime(); long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime)); lastTime = nowTime; Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead); } } try { if (audioRecord != null) { audioRecord.stop(); } } catch (IllegalStateException e) { Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); } }