@DSGenerator(
      tool_name = "Doppelganger",
      tool_version = "2.0",
      generated_on = "2014-03-24 16:06:05.610 -0400",
      hash_original_method = "CDA5C14F8CDCE778A8FB11BE8150AA9D",
      hash_generated_method = "3FFDA852F4258D4F3758CE4950CDFF55")
  @Override
  public Frame newBoundFrame(FrameFormat format, int bindingType, long bindingId) {
    Frame result = null;
    switch (format.getTarget()) {
      case FrameFormat.TARGET_GPU:
        {
          GLFrame glFrame = new GLFrame(format, this, bindingType, bindingId);
          glFrame.init(getGLEnvironment());
          result = glFrame;
          break;
        }

      default:
        throw new RuntimeException(
            "Attached frames are not supported for target type: "
                + FrameFormat.targetToString(format.getTarget())
                + "!");
    }
    return result;
  }
  @Override
  public void close(FilterContext context, VideoEventFilter myfilter) {
    super.close(context, myfilter);

    if (null != mPreviousFrame) {
      mPreviousFrame.release();
      mPreviousFrame = null;
    }
  }
 @Override
 public void open(FilterContext context) {
   if (mLogVerbose) Log.v(TAG, "Opening SurfaceTextureSource");
   // Create SurfaceTexture anew each time - it can use substantial memory.
   mSurfaceTexture = new SurfaceTexture(mMediaFrame.getTextureId());
   // Connect SurfaceTexture to callback
   mSurfaceTexture.setOnFrameAvailableListener(onFrameAvailableListener);
   // Connect SurfaceTexture to source
   mSourceListener.onSurfaceTextureSourceReady(mSurfaceTexture);
   mFirstFrame = true;
   mNewFrameAvailable.close();
 }
  @DSGenerator(
      tool_name = "Doppelganger",
      tool_version = "2.0",
      generated_on = "2014-03-24 16:06:05.612 -0400",
      hash_original_method = "D9BBB52F9327CED81F2FD031D449BDDB",
      hash_generated_method = "BEBB154B0E0C4A7B71B71BFF09B6D9F8")
  private Frame createNewFrame(FrameFormat format) {
    Frame result = null;
    switch (format.getTarget()) {
      case FrameFormat.TARGET_SIMPLE:
        result = new SimpleFrame(format, this);
        break;

      case FrameFormat.TARGET_NATIVE:
        result = new NativeFrame(format, this);
        break;

      case FrameFormat.TARGET_GPU:
        {
          GLFrame glFrame = new GLFrame(format, this);
          glFrame.init(getGLEnvironment());
          result = glFrame;
          break;
        }

      case FrameFormat.TARGET_VERTEXBUFFER:
        {
          result = new VertexFrame(format, this);
          break;
        }

      default:
        throw new RuntimeException(
            "Unsupported frame target type: "
                + FrameFormat.targetToString(format.getTarget())
                + "!");
    }
    return result;
  }
  @Override
  public boolean process(
      FilterContext context, VideoEventFilter myfilter, boolean isRenderOutput, GLFrame output) {
    super.process(context, myfilter, isRenderOutput, output);

    GLFrame camera = myfilter.getInputCameraGLFrame();

    FrameFormat inputFormat;
    inputFormat = camera.getFormat();

    if (null == mMainFrame) {
      MutableFrameFormat outputFormat = inputFormat.mutableCopy();
      mMainFrame = (GLFrame) context.getFrameManager().newFrame(outputFormat);
      mMainFrame.focus();
      mCopyProgramWithColor.process(camera, mMainFrame);
    }

    if (null == mPreviousFrame) {
      MutableFrameFormat outputFormat = inputFormat.mutableCopy();
      mPreviousFrame = (GLFrame) context.getFrameManager().newFrame(outputFormat);
      mPreviousFrame.focus();
    }

    Frame[] subtractInputs = {camera, mPreviousFrame};

    long currentTimeStamp = myfilter.getNowTimeStamp();
    long cameraPhoto;

    if (this.containsKey("camera_photo")) {
      try {
        cameraPhoto = ((Long) this.get("camera_photo")).longValue() + mStart;
      } catch (ClassCastException e) {
        e.printStackTrace();
        return false;
      }

      // get fix frame
      if (currentTimeStamp >= cameraPhoto) {
        if (false == mGotMainFrame) {
          mMainFrame.focus();
          mColor[3] = 1.0f;
          mCopyProgramWithColor.setHostValue("ccc", mColor);
          mCopyProgramWithColor.process(camera, mMainFrame);
          mGotMainFrame = true;
          mTool.log('d', "Got CameraInput:" + currentTimeStamp);
        }
        subtractInputs[0] = mMainFrame;
      }

      if (false == mIsGotPreviousFrame
          || true == mIsGotPreviousFrame && currentTimeStamp < mEffectStart) {
        mPreviousFrame.focus();
        if (isRenderOutput) {
          mCopyProgramWithColor.process(output, mPreviousFrame);
        } else {
          mCopyProgramWithColor.process(camera, mPreviousFrame);
        }
        mIsGotPreviousFrame = true;
        mTool.log('d', "Got PreviousInput:" + currentTimeStamp);
      }

      // finished, no need to do
      if (currentTimeStamp >= mEffectEnd || currentTimeStamp < mEffectStart) {
        return false;
      }

      float center_r = 0.0f;

      if (cameraPhoto >= currentTimeStamp) {
        center_r =
            1.0f - (float) (currentTimeStamp - mEffectStart) / (float) (cameraPhoto - mEffectStart);
        subtractInputs[0] = mPreviousFrame;
      } else {
        center_r = (float) (currentTimeStamp - cameraPhoto) / (float) (mEffectEnd - cameraPhoto);
        subtractInputs[0] = mMainFrame;
      }

      mCopyProgram.setHostValue("center_r", center_r);

      if (null != output && isRenderOutput == false) {
        mCopyProgram.process(subtractInputs, output);
      }
      return true;
    }
    return false;
  }
 @Override
 public void tearDown(FilterContext context) {
   if (mMediaFrame != null) {
     mMediaFrame.release();
   }
 }