コード例 #1
0
  protected void createPreViews() {
    reflectionCam = new Camera(renderWidth, renderHeight);
    refractionCam = new Camera(renderWidth, renderHeight);

    // create a pre-view. a view that is rendered before the main view
    reflectionView = new ViewPort("Reflection View", reflectionCam);
    reflectionView.setClearFlags(true, true, true);
    reflectionView.setBackgroundColor(ColorRGBA.Black);
    // create offscreen framebuffer
    reflectionBuffer = new FrameBuffer(renderWidth, renderHeight, 1);
    // setup framebuffer to use texture
    reflectionBuffer.setDepthBuffer(Format.Depth);
    reflectionBuffer.setColorTexture(reflectionTexture);

    // set viewport to render to offscreen framebuffer
    reflectionView.setOutputFrameBuffer(reflectionBuffer);
    reflectionView.addProcessor(
        new ReflectionProcessor(reflectionCam, reflectionBuffer, reflectionClipPlane));
    // attach the scene to the viewport to be rendered
    reflectionView.attachScene(reflectionScene);

    // create a pre-view. a view that is rendered before the main view
    refractionView = new ViewPort("Refraction View", refractionCam);
    refractionView.setClearFlags(true, true, true);
    refractionView.setBackgroundColor(ColorRGBA.Black);
    // create offscreen framebuffer
    refractionBuffer = new FrameBuffer(renderWidth, renderHeight, 1);
    // setup framebuffer to use texture
    refractionBuffer.setDepthBuffer(Format.Depth);
    refractionBuffer.setColorTexture(refractionTexture);
    refractionBuffer.setDepthTexture(depthTexture);
    // set viewport to render to offscreen framebuffer
    refractionView.setOutputFrameBuffer(refractionBuffer);
    refractionView.addProcessor(new RefractionProcessor());
    // attach the scene to the viewport to be rendered
    refractionView.attachScene(reflectionScene);
  }
コード例 #2
0
  public void setupOffscreenView() {
    offCamera = new Camera(width, height);

    // create a pre-view. a view that is rendered before the main view
    offView = renderManager.createPreView("Offscreen View", offCamera);
    offView.setBackgroundColor(ColorRGBA.DarkGray);
    offView.setClearFlags(true, true, true);

    // this will let us know when the scene has been rendered to the
    // frame buffer
    offView.addProcessor(this);

    // create offscreen framebuffer
    offBuffer = new FrameBuffer(width, height, 1);

    // setup framebuffer's cam
    offCamera.setFrustumPerspective(45f, 1f, 1f, 1000f);
    offCamera.setLocation(new Vector3f(0f, 0f, -5f));
    offCamera.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y);

    // setup framebuffer's texture
    //        offTex = new Texture2D(width, height, Format.RGBA8);

    // setup framebuffer to use renderbuffer
    // this is faster for gpu -> cpu copies
    offBuffer.setDepthBuffer(Format.Depth);
    offBuffer.setColorBuffer(Format.RGBA8);
    //        offBuffer.setColorTexture(offTex);

    // set viewport to render to offscreen framebuffer
    offView.setOutputFrameBuffer(offBuffer);

    // setup framebuffer's scene
    Box boxMesh = new Box(Vector3f.ZERO, 1, 1, 1);
    Material material = assetManager.loadMaterial("Interface/Logo/Logo.j3m");
    offBox = new Geometry("box", boxMesh);
    offBox.setMaterial(material);

    // attach the scene to the viewport to be rendered
    offView.attachScene(offBox);
  }
コード例 #3
0
  public void reshape(ViewPort vp, int w, int h) {
    // this has no effect at first init but is useful when resizing the canvas with multi views
    Camera cam = vp.getCamera();
    cam.setViewPort(left, right, bottom, top);
    // resizing the camera to fit the new viewport and saving original dimensions
    cam.resize(w, h, false);
    left = cam.getViewPortLeft();
    right = cam.getViewPortRight();
    top = cam.getViewPortTop();
    bottom = cam.getViewPortBottom();
    originalWidth = w;
    originalHeight = h;
    cam.setViewPort(0, 1, 0, 1);

    // computing real dimension of the viewport and resizing he camera
    width = (int) (w * (Math.abs(right - left)));
    height = (int) (h * (Math.abs(bottom - top)));
    width = Math.max(1, width);
    height = Math.max(1, height);
    cam.resize(width, height, false);
    cameraInit = true;
    computeDepth = false;

    if (renderFrameBuffer == null) {
      outputBuffer = viewPort.getOutputFrameBuffer();
    }

    Collection<Caps> caps = renderer.getCaps();

    // antialiasing on filters only supported in opengl 3 due to depth read problem
    if (numSamples > 1 && caps.contains(Caps.FrameBufferMultisample)) {
      renderFrameBufferMS = new FrameBuffer(width, height, numSamples);
      if (caps.contains(Caps.OpenGL31)) {
        Texture2D msColor = new Texture2D(width, height, numSamples, Format.RGBA8);
        Texture2D msDepth = new Texture2D(width, height, numSamples, Format.Depth);
        renderFrameBufferMS.setDepthTexture(msDepth);
        renderFrameBufferMS.setColorTexture(msColor);
        filterTexture = msColor;
        depthTexture = msDepth;
      } else {
        renderFrameBufferMS.setDepthBuffer(Format.Depth);
        renderFrameBufferMS.setColorBuffer(Format.RGBA8);
      }
    }

    if (numSamples <= 1 || !caps.contains(Caps.OpenGL31)) {
      renderFrameBuffer = new FrameBuffer(width, height, 1);
      renderFrameBuffer.setDepthBuffer(Format.Depth);
      filterTexture = new Texture2D(width, height, Format.RGBA8);
      renderFrameBuffer.setColorTexture(filterTexture);
    }

    for (Iterator<Filter> it = filters.iterator(); it.hasNext(); ) {
      Filter filter = it.next();
      initFilter(filter, vp);
    }

    if (renderFrameBufferMS != null) {
      viewPort.setOutputFrameBuffer(renderFrameBufferMS);
    } else {
      viewPort.setOutputFrameBuffer(renderFrameBuffer);
    }
  }