/** * Part of the deferred lighting technique, this method applies lighting through screen-space * calculations to the previously flat-lit world rendering stored in the primary FBO. // TODO: * rename sceneOpaque* FBOs to primaryA/B * * <p>See http://en.wikipedia.org/wiki/Deferred_shading as a starting point. */ public void applyLightBufferPass() { int texId = 0; GL13.glActiveTexture(GL13.GL_TEXTURE0 + texId); buffers.sceneOpaque.bindTexture(); materials.lightBufferPass.setInt("texSceneOpaque", texId++); GL13.glActiveTexture(GL13.GL_TEXTURE0 + texId); buffers.sceneOpaque.bindDepthTexture(); materials.lightBufferPass.setInt("texSceneOpaqueDepth", texId++); GL13.glActiveTexture(GL13.GL_TEXTURE0 + texId); buffers.sceneOpaque.bindNormalsTexture(); materials.lightBufferPass.setInt("texSceneOpaqueNormals", texId++); GL13.glActiveTexture(GL13.GL_TEXTURE0 + texId); buffers.sceneOpaque.bindLightBufferTexture(); materials.lightBufferPass.setInt("texSceneOpaqueLightBuffer", texId, true); buffers.sceneOpaquePingPong.bind(); graphicState.setRenderBufferMask(buffers.sceneOpaquePingPong, true, true, true); setViewportTo(buffers.sceneOpaquePingPong.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary renderingProcess.swapSceneOpaqueFBOs(); buffers.sceneOpaque.attachDepthBufferTo(buffers.sceneReflectiveRefractive); }
// TODO: have a flag to invert the eyes (Cross Eye 3D), as mentioned in // TODO: http://forum.terasology.org/threads/happy-coding.1018/#post-11264 private void renderFinalStereoImage(WorldRenderer.WorldRenderingStage renderingStage) { if (renderingProcess.isNotTakingScreenshot()) { buffers.sceneFinal.bind(); } else { buffers.ocUndistorted.bind(); } switch (renderingStage) { case LEFT_EYE: glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); renderFullscreenQuad(0, 0, fullScale.width() / 2, fullScale.height()); break; case RIGHT_EYE: // no glClear() here: the rendering for the second eye is being added besides the first // eye's rendering renderFullscreenQuad( fullScale.width() / 2 + 1, 0, fullScale.width() / 2, fullScale.height()); if (renderingProcess.isNotTakingScreenshot()) { graphicState.bindDisplay(); applyOculusDistortion(buffers.sceneFinal); } else { buffers.sceneFinal.bind(); applyOculusDistortion(buffers.ocUndistorted); renderingProcess.saveScreenshot(); // when saving a screenshot we do NOT send the image to screen, // to avoid the brief flicker of the screenshot for one frame } break; } }
private void downsampleSceneInto1x1pixelsBuffer() { PerformanceMonitor.startActivity("Rendering eye adaption"); materials.downSampler.enable(); FBO downSampledFBO; for (int i = 4; i >= 0; i--) { downSampledFBO = buffers.downSampledScene[i]; materials.downSampler.setFloat("size", downSampledFBO.width(), true); downSampledFBO.bind(); setViewportTo(downSampledFBO.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: move this block above, for consistency if (i == 4) { buffers.initialPost.bindTexture(); } else { buffers.downSampledScene[i + 1].bindTexture(); } renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: probably can be removed or moved out of the loop } setViewportToWholeDisplay(); // TODO: verify this is necessary PerformanceMonitor.endActivity(); }
/** * Enabled by the "outline" option in the render settings, this method generates landscape/objects * outlines and stores them into a buffer in its own FBO. The stored image is eventually combined * with others. * * <p>The outlines visually separate a given object (including the landscape) or parts of it from * sufficiently distant objects it overlaps. It is effectively a depth-based edge detection * technique and internally uses a Sobel operator. * * <p>For further information see: http://en.wikipedia.org/wiki/Sobel_operator */ public void generateOutline() { if (renderingConfig.isOutline()) { materials.outline.enable(); // TODO: verify inputs: shouldn't there be a texture binding here? buffers.outline.bind(); setViewportTo(buffers.outline.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary } }
// TODO: Tone mapping usually maps colors from HDR to a more limited range, // TODO: i.e. the 24 bit a monitor can display. This method however maps from an HDR buffer // TODO: to another HDR buffer and this puzzles me. Will need to dig deep in the shader to // TODO: see what it does. public void generateToneMappedScene() { PerformanceMonitor.startActivity("Tone mapping"); materials.toneMapping.enable(); buffers.sceneToneMapped.bind(); setViewportTo(buffers.sceneToneMapped.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary PerformanceMonitor.endActivity(); }
/** * Adds chromatic aberration, light shafts, 1/8th resolution bloom, vignette onto the rendering * achieved so far. Stores the result into its own buffer to be used at a later stage. */ public void initialPostProcessing() { PerformanceMonitor.startActivity("Initial Post-Processing"); materials.initialPost.enable(); // TODO: verify what the inputs are buffers.initialPost.bind(); // TODO: see if we could write this straight into sceneOpaque setViewportTo(buffers.initialPost.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary PerformanceMonitor.endActivity(); }
/** * Adds outlines and ambient occlusion to the rendering obtained so far stored in the primary FBO. * Stores the resulting output back into the primary buffer. */ public void generatePrePostComposite() { materials.prePostComposite.enable(); // TODO: verify if there should be bound textures here. buffers.sceneOpaquePingPong.bind(); setViewportTo(buffers.sceneOpaquePingPong.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary renderingProcess.swapSceneOpaqueFBOs(); buffers.sceneOpaque.attachDepthBufferTo(buffers.sceneReflectiveRefractive); }
private void generateBlurredSSAO() { materials.ssaoBlurred.enable(); materials.ssaoBlurred.setFloat2( "texelSize", 1.0f / buffers.ssaoBlurred.width(), 1.0f / buffers.ssaoBlurred.height(), true); buffers.ssao.bindTexture(); // TODO: verify this is the only input buffers.ssaoBlurred.bind(); setViewportTo(buffers.ssaoBlurred.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary }
private void generateSSAO() { materials.ssao.enable(); materials.ssao.setFloat2( "texelSize", 1.0f / buffers.ssao.width(), 1.0f / buffers.ssao.height(), true); materials.ssao.setFloat2("noiseTexelSize", 1.0f / 4.0f, 1.0f / 4.0f, true); // TODO: verify if some textures should be bound here buffers.ssao.bind(); setViewportTo(buffers.ssao.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary }
/** Generates light shafts and stores them in their own FBO. */ public void generateLightShafts() { if (renderingConfig.isLightShafts()) { PerformanceMonitor.startActivity("Rendering light shafts"); materials.lightShafts.enable(); // TODO: verify what the inputs are buffers.lightShafts.bind(); setViewportTo(buffers.lightShafts.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary PerformanceMonitor.endActivity(); } }
private void renderFinalMonoImage() { if (renderingProcess.isNotTakingScreenshot()) { graphicState.bindDisplay(); renderFullscreenQuad(0, 0, Display.getWidth(), Display.getHeight()); } else { buffers.sceneFinal.bind(); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); renderFullscreenQuad(0, 0, fullScale.width(), fullScale.height()); renderingProcess.saveScreenshot(); // when saving a screenshot we do not send the image to screen, // to avoid the brief one-frame flicker of the screenshot // This is needed to avoid the UI (which is not currently saved within the // screenshot) being rendered for one frame with buffers.sceneFinal size. setViewportToWholeDisplay(); } }
private void generateSkyBand(FBO skyBand) { materials.blur.enable(); materials.blur.setFloat("radius", 8.0f, true); materials.blur.setFloat2("texelSize", 1.0f / skyBand.width(), 1.0f / skyBand.height(), true); if (skyBand == buffers.sceneSkyBand0) { buffers.sceneOpaque.bindTexture(); } else { buffers.sceneSkyBand0.bindTexture(); } skyBand.bind(); graphicState.setRenderBufferMask(skyBand, true, false, false); setViewportTo(skyBand.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary }
private void generateBlur(FBO sceneBlur) { materials.blur.enable(); materials.blur.setFloat( "radius", overallBlurRadiusFactor * renderingConfig.getBlurRadius(), true); materials.blur.setFloat2( "texelSize", 1.0f / sceneBlur.width(), 1.0f / sceneBlur.height(), true); if (sceneBlur == buffers.sceneBlur0) { buffers.sceneToneMapped.bindTexture(); } else { buffers.sceneBlur0.bindTexture(); } sceneBlur.bind(); setViewportTo(sceneBlur.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); renderFullscreenQuad(); graphicState.bindDisplay(); setViewportToWholeDisplay(); }
private void generateHighPass() { materials.highPass.enable(); materials.highPass.setFloat("highPassThreshold", bloomHighPassThreshold, true); int texId = 0; GL13.glActiveTexture(GL13.GL_TEXTURE0 + texId); buffers.sceneOpaque.bindTexture(); materials.highPass.setInt("tex", texId); // GL13.glActiveTexture(GL13.GL_TEXTURE0 + texId); // buffers.sceneOpaque.bindDepthTexture(); // program.setInt("texDepth", texId++); buffers.sceneHighPass.bind(); setViewportTo(buffers.sceneHighPass.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); renderFullscreenQuad(); graphicState.bindDisplay(); setViewportToWholeDisplay(); }
private void generateBloom(FBO sceneBloom) { materials.blur.enable(); materials.blur.setFloat("radius", bloomBlurRadius, true); materials.blur.setFloat2( "texelSize", 1.0f / sceneBloom.width(), 1.0f / sceneBloom.height(), true); if (sceneBloom == buffers.sceneBloom0) { buffers.sceneHighPass.bindTexture(); } else if (sceneBloom == buffers.sceneBloom1) { buffers.sceneBloom0.bindTexture(); } else { buffers.sceneBloom1.bindTexture(); } sceneBloom.bind(); setViewportTo(sceneBloom.dimensions()); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // TODO: verify this is necessary renderFullscreenQuad(); graphicState.bindDisplay(); // TODO: verify this is necessary setViewportToWholeDisplay(); // TODO: verify this is necessary }