/** * Sets the GLSurfaceView which will display the preview. * * @param view the GLSurfaceView */ public void setGLSurfaceView(final GLSurfaceView view) { mGlSurfaceView = view; mGlSurfaceView.setEGLContextClientVersion(2); mGlSurfaceView.setRenderer(mRenderer); mGlSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); mGlSurfaceView.requestRender(); }
@Override public void requestRendering() { if (view != null) { if (view instanceof GLSurfaceViewCupcake) ((GLSurfaceViewCupcake) view).requestRender(); if (view instanceof GLSurfaceView) ((GLSurfaceView) view).requestRender(); } }
public void requestRender(Canvas canvas, boolean waitForCompletion) { synchronized (mSyncLock) { super.requestRender(); mFunctorAttached = true; mWaitForCompletion = waitForCompletion; if (canvas == null) { mNeedsProcessGL = true; } else { mNeedsDrawGL = true; if (!waitForCompletion) { // Wait until SYNC is complete only. // Do this every time there was a new frame. try { while (mNeedsDrawGL) { mSyncLock.wait(); } } catch (InterruptedException e) { // ... } } } if (waitForCompletion) { try { while (mWaitForCompletion) { mSyncLock.wait(); } } catch (InterruptedException e) { // ... } } } }
void showFlipAnimation() { if (!inFlipAnimation) { inFlipAnimation = true; cards.setVisible(true); cards.setFirstDrawFinished(false); surfaceView.requestRender(); } }
@Override public void requestRendering() { if (view != null) { // jw: changed // view.requestRender(); if (view instanceof GLSurfaceViewCupcake) ((GLSurfaceViewCupcake) view).requestRender(); else if (view instanceof GLSurfaceViewAPI18) ((GLSurfaceViewAPI18) view).requestRender(); else if (view instanceof GLSurfaceView) ((GLSurfaceView) view).requestRender(); else throw new RuntimeException("unimplemented"); } }
/** * Sets the GLSurfaceView which will display the preview. * * @param view the GLSurfaceView */ public void setGLSurfaceView(final GLSurfaceView view) { mGlSurfaceView = view; // 设置OpenGl版本号为2 mGlSurfaceView.setEGLContextClientVersion(2); // 设置颜色缓存为RGBA,位数都为8,深度缓存位数为16,蒙版缓存位数为0 mGlSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); mGlSurfaceView.getHolder().setFormat(PixelFormat.RGBA_8888); mGlSurfaceView.setRenderer(mRenderer); // 脏模式,需要重绘时手动调用requestRender() mGlSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); mGlSurfaceView.requestRender(); }
@Override public void onFrameAvailable(SurfaceTexture st) { // The SurfaceTexture uses this to signal the availability of a new frame. The // thread that "owns" the external texture associated with the SurfaceTexture (which, // by virtue of the context being shared, *should* be either one) needs to call // updateTexImage() to latch the buffer. // // Once the buffer is latched, the GLSurfaceView thread can signal the encoder thread. // This feels backward -- we want recording to be prioritized over rendering -- but // since recording is only enabled some of the time it's easier to do it this way. // // Since GLSurfaceView doesn't establish a Looper, this will *probably* execute on // the main UI thread. Fortunately, requestRender() can be called from any thread, // so it doesn't really matter. if (VERBOSE) Log.d(TAG, "ST onFrameAvailable"); mGLView.requestRender(); }
protected void draw_mode(int mode) { Log.e("navit", "draw_mode " + mode); if (mode == 2 && parent_graphics == null) { view.draw(draw_canvas); view.invalidate(); view.requestRender(); } if (mode == 1 || (mode == 0 && parent_graphics != null)) { if (renderer != null) { renderer.flb_len = 0; } draw_bitmap.eraseColor(0); } if (mode == 0 && renderer != null) { renderer.flb_len = 0; } }
@Override public synchronized void renderFrame(I420Frame frame) { if (surface == null) { // This object has been released. VideoRenderer.renderFrameDone(frame); return; } if (!seenFrame && rendererEvents != null) { Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame."); rendererEvents.onFirstFrameRendered(); } framesReceived++; synchronized (pendingFrameLock) { // Check input frame parameters. if (frame.yuvFrame) { if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2 || frame.yuvStrides[2] < frame.width / 2) { Logging.e( TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1] + ", " + frame.yuvStrides[2]); VideoRenderer.renderFrameDone(frame); return; } } if (pendingFrame != null) { // Skip rendering of this frame if previous frame was not rendered yet. framesDropped++; VideoRenderer.renderFrameDone(frame); return; } pendingFrame = frame; } setSize(frame.width, frame.height, frame.rotationDegree); seenFrame = true; // Request rendering. surface.requestRender(); }
@Override public void doFrame(final long frameTimeNanos) { mSurfaceView.requestRender(); Choreographer.getInstance().postFrameCallback(this); }
/** Request the preview to be rendered again. */ public void requestRender() { if (mGlSurfaceView != null) { mGlSurfaceView.requestRender(); } }
@Override public synchronized void run() { while (true) { while (busy == false || stopRequest == true) { try { wait(); // wait for a new frame } catch (InterruptedException e) { } } if (stopRequest == true) { // do nothing } else { if (calcFps) { // calculate the fps if (start == 0) { start = SystemClock.uptimeMillis(); } fcount++; if (fcount == 30) { now = SystemClock.uptimeMillis(); fps = 30 / ((now - start) / 1000.0); // Log.i("AR", "fps:" + fps); start = 0; fcount = 0; } } // Pass the frame to the native code and find the // marker information. // The false at the end is a remainder of the calibration. nativelib.detectMarkers(frame, mat, frameHeight, frameWidth, false); // Needs to be reworked to. Either just deleted, or changed into // some timer delay openglView.requestRender(); // Write all current information of the detected markers into // the marker hashmap and notify markers they are recognized. int startIdx, endIdx, rotIdx, idIdx = 0; for (int i = 0; i < (int) mat[0]; i++) { startIdx = (1 + i * 18); endIdx = startIdx + 15; rotIdx = endIdx + 1; idIdx = rotIdx + 1; // Log.d(LOG_TAG, "StartIdx"); MarkerObject markerObj = markerObjectMap.get((int) mat[idIdx]); if (markerObj != null) { markerObj.OnMarkerPositionRecognized(mat, startIdx, endIdx); } else { if (unrecognizedMarkerListener != null) { unrecognizedMarkerListener.onUnrecognizedMarkerDetected( (int) mat[idIdx], mat, startIdx, endIdx, (int) mat[rotIdx]); } } } busy = false; preview.reAddCallbackBuffer(frame); } yield(); } }
public void requestRender() { mGLSurfaceView.requestRender(); }
public void requestRender() { if (mGLSurface != null) { mGLSurface.requestRender(); } }