/** Regenerates any dirty mimaps. */ public void regenerateMipmaps() { if (mipmapDirty) { ByteBuffer data1 = BufferUtils.createByteBuffer(size.getWidth() * size.getHeight() * format.bytes); ByteBuffer data = data1; GL11.glBindTexture(GL11.GL_TEXTURE_2D, id); // read the texture out GL11.glGetTexImage(GL11.GL_TEXTURE_2D, 0, format.glFormat, GL11.GL_UNSIGNED_BYTE, data); data.rewind(); // regenerate the mipmaps GLU.gluBuild2DMipmaps( GL11.GL_TEXTURE_2D, format.glInternalFormat, size.getWidth(), size.getHeight(), format.glFormat, GL11.GL_UNSIGNED_BYTE, data); mipmapDirty = false; } }
/** @see org.newdawn.slick.opengl.ImageData#getImageBufferData() */ public ByteBuffer getImageBufferData() { ByteBuffer scratch = BufferUtils.createByteBuffer(rawData.length); scratch.put(rawData); scratch.flip(); return scratch; }
class TimerLinux { private final ByteBuffer time = BufferUtils.createByteBuffer(Math.max(timespec.SIZEOF, timeval.SIZEOF)); private final TimeSource timeSource; private long base; TimerLinux() { if (clock_gettime(CLOCK_MONOTONIC, time) == 0) { timeSource = new TimeSource() { @Override public double getResolution() { return 1e-9; } @Override public long getRawTime() { clock_gettime(CLOCK_MONOTONIC, time); return timespec.sec(time) * 1000000000L + timespec.nsec(time); } }; } else { timeSource = new TimeSource() { @Override public double getResolution() { return 1e-6; } @Override public long getRawTime() { gettimeofday(time, null); return timeval.sec(time) * 1000000L + timeval.usec(time); } }; } } public void init() { base = timeSource.getRawTime(); } public double getTime() { return (double) (timeSource.getRawTime() - base) * timeSource.getResolution(); } public void setTime(double time) { base = timeSource.getRawTime() - (long) (time / timeSource.getResolution()); } private interface TimeSource { double getResolution(); long getRawTime(); } }
public void setupQuad() { // We'll define our quad using 4 vertices of the custom 'Vertex' class Vertex v0 = new Vertex(); v0.setXYZ(-0.5f, 0.5f, 0f); v0.setRGB(1, 0, 0); Vertex v1 = new Vertex(); v1.setXYZ(-0.5f, -0.5f, 0f); v1.setRGB(0, 1, 0); Vertex v2 = new Vertex(); v2.setXYZ(0.5f, -0.5f, 0f); v2.setRGB(0, 0, 1); Vertex v3 = new Vertex(); v3.setXYZ(0.5f, 0.5f, 0f); v3.setRGB(1, 1, 1); Vertex[] vertices = new Vertex[] {v0, v1, v2, v3}; // Put each 'Vertex' in one FloatBuffer FloatBuffer verticesBuffer = BufferUtils.createFloatBuffer(vertices.length * Vertex.elementCount); for (int i = 0; i < vertices.length; i++) { verticesBuffer.put(vertices[i].getXYZW()); verticesBuffer.put(vertices[i].getRGBA()); } verticesBuffer.flip(); // OpenGL expects to draw vertices in counter clockwise order by default byte[] indices = { 0, 1, 2, 2, 3, 0 }; indicesCount = indices.length; ByteBuffer indicesBuffer = BufferUtils.createByteBuffer(indicesCount); indicesBuffer.put(indices); indicesBuffer.flip(); // Create a new Vertex Array Object in memory and select it (bind) vaoId = GL30.glGenVertexArrays(); GL30.glBindVertexArray(vaoId); // Create a new Vertex Buffer Object in memory and select it (bind) vboId = GL15.glGenBuffers(); GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId); GL15.glBufferData(GL15.GL_ARRAY_BUFFER, verticesBuffer, GL15.GL_STATIC_DRAW); // Put the positions in attribute list 0 GL20.glVertexAttribPointer(0, 4, GL11.GL_FLOAT, false, Vertex.sizeInBytes, 0); // Put the colors in attribute list 1 GL20.glVertexAttribPointer( 1, 4, GL11.GL_FLOAT, false, Vertex.sizeInBytes, Vertex.elementBytes * 4); GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0); // Deselect (bind to 0) the VAO GL30.glBindVertexArray(0); // Create a new VBO for the indices and select it (bind) - INDICES vboiId = GL15.glGenBuffers(); GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vboiId); GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, indicesBuffer, GL15.GL_STATIC_DRAW); GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0); }
public OpenALAudioDevice( OpenALAudio audio, int sampleRate, boolean isMono, int bufferSize, int bufferCount) { this.audio = audio; channels = isMono ? 1 : 2; this.bufferSize = bufferSize; this.bufferCount = bufferCount; this.format = channels > 1 ? AL_FORMAT_STEREO16 : AL_FORMAT_MONO16; this.sampleRate = sampleRate; secondsPerBuffer = (float) bufferSize / bytesPerSample / channels / sampleRate; tempBuffer = BufferUtils.createByteBuffer(bufferSize); }
public void setupVIBO() { ByteBuffer indexBuffer = BufferUtils.createByteBuffer(indexElementCount * getNumObjects()); for (int i = 0; i < getNumObjects(); ++i) { indexBuffer.put(Rectangle.getOrder(4 * i)); } indexBuffer.flip(); setVIBO(GL15.glGenBuffers()); GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, getVIBO()); GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, indexBuffer, GL15.GL_DYNAMIC_DRAW); GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0); }
static Texture createSampleTexture() { ByteBuffer byteBuffer = BufferUtils.createByteBuffer(256 * 256 * 4); for (int y = 0; y < 256; y++) { for (int x = 0; x < 256; x++) { byte b = (byte) (x ^ y); byteBuffer.put(b).put(b).put(b).put(b); } } ByteBuffer[][] pixels = {{byteBuffer}}; return new Texture(TextureType.TEXTURE_2D, 4, 256, 256, 0, Format.BGRA, pixels, false, false); }
public Texture(URI pngRef, int filter, int wrap) throws IOException { InputStream input = null; try { // get an InputStream from our URL input = pngRef.toURL().openStream(); // initialize the decoder PNGDecoder dec = new PNGDecoder(input); // set up image dimensions width = dec.getWidth(); height = dec.getHeight(); // we are using RGBA, i.e. 4 components or "bytes per pixel" final int bpp = 4; // create a new byte buffer which will hold our pixel data ByteBuffer buf = BufferUtils.createByteBuffer(bpp * width * height); // decode the image into the byte buffer, in RGBA format dec.decode(buf, width * bpp, PNGDecoder.Format.RGBA); // flip the buffer into "read mode" for OpenGL buf.flip(); // enable textures and generate an ID glEnable(target); id = glGenTextures(); // bind texture bind(); // setup unpack mode glPixelStorei(GL_UNPACK_ALIGNMENT, 1); // setup parameters glTexParameteri(target, GL_TEXTURE_MIN_FILTER, filter); glTexParameteri(target, GL_TEXTURE_MAG_FILTER, filter); glTexParameteri(target, GL_TEXTURE_WRAP_S, wrap); glTexParameteri(target, GL_TEXTURE_WRAP_T, wrap); // pass RGBA data to OpenGL glTexImage2D(target, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, buf); } finally { if (input != null) { try { input.close(); } catch (IOException e) { } } } }
private void init() { if (glfwInit() != GL_TRUE) { System.err.println("Could not initialize GLFW!"); return; } glfwWindowHint(GLFW_SAMPLES, 4); glfwWindowHint(GLFW_RESIZABLE, GL_FALSE); OSCompatibility.GLFWSpecifics(); window = glfwCreateWindow(width, height, "Voxel Engine", NULL, NULL); if (window == NULL) { System.err.println("Could not create GLFW window!"); return; } // Compatibility for screen with 1:X | X =/= 1 ratio ByteBuffer FBW = BufferUtils.createByteBuffer(4), FBH = BufferUtils.createByteBuffer(4); glfwGetFramebufferSize(window, FBW, FBH); pix_width = FBW.getInt(0); pix_height = FBH.getInt(0); InitWindow(); InitGL(); }
private void setupQuad() { IndexedMemoryFast<Pos4> mem = Pos4.reserveFast(10, false); mem.add(new Pos4().xyzw(-0.5f, 0.5f, 0f, 1f)); mem.add(new Pos4().xyzw(-0.5f, -0.5f, 0f, 1f)); mem.add(new Pos4().xyzw(0.5f, -0.5f, 0f, 1f)); mem.add(new Pos4().xyzw(0.5f, 0.5f, 0f, 1f)); // RawMemory colors = new RawMemory(30); IndexedMemoryFast<Color4> colors = Color4.reserveFast(10, false); colors.add(new Color4().rgba(1f, 0f, 0f, 1f)); colors.add(new Color4().rgba(0f, 1f, 0f, 1f)); colors.add(new Color4().rgba(0f, 0f, 1f, 1f)); colors.add(new Color4().rgba(1f, 1f, 1f, 1f)); // OpenGL expects to draw vertices in counter clockwise order by default byte[] indices = {0, 1, 2, 2, 3, 0}; indicesCount = indices.length; ByteBuffer indicesBuffer = BufferUtils.createByteBuffer(indicesCount); indicesBuffer.put(indices); indicesBuffer.flip(); // Create a new Array Array Object in memory and select it (bind) vaoId = glCreateVertexArrays(); glBindVertexArray(vaoId); // Create a new Array Buffer Object in memory and select it (bind) - VERTICES vboId = glCreateBuffers(); glBindBuffer(GL_ARRAY_BUFFER, vboId); glBufferData(GL_ARRAY_BUFFER, mem.burn().getArrayBuffer(), GL_STATIC_DRAW); glVertexAttribPointer(0, 4, GL_FLOAT, false, 0, 0); glBindBuffer(GL_ARRAY_BUFFER, 0); // Create a new VBO for the indices and select it (bind) - COLORS vbocId = glCreateBuffers(); glBindBuffer(GL_ARRAY_BUFFER, vbocId); glBufferData(GL_ARRAY_BUFFER, colors.burn().getArrayBuffer(), GL_STATIC_DRAW); glVertexAttribPointer(1, 4, GL_FLOAT, false, 0, 0); glBindBuffer(GL_ARRAY_BUFFER, 0); // Deselect (bind to 0) the VAO glBindVertexArray(0); // Create a new VBO for the indices and select it (bind) - INDICES vboiId = glCreateBuffers(); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboiId); glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer, GL_STATIC_DRAW); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); }
protected final void enableClipping(int yTop, int yBottom) { if (doubleBuffer == null) { doubleBuffer = BufferUtils.createByteBuffer(32).asDoubleBuffer(); } doubleBuffer.clear(); doubleBuffer.put(0).put(1).put(0).put(-yTop).flip(); glClipPlane(GL_CLIP_PLANE0, doubleBuffer); doubleBuffer.clear(); doubleBuffer.put(0).put(-1).put(0).put(yBottom).flip(); glClipPlane(GL_CLIP_PLANE1, doubleBuffer); glEnable(GL_CLIP_PLANE0); glEnable(GL_CLIP_PLANE1); }
private static boolean printLogInfo(int obj) { IntBuffer iVal = BufferUtils.createIntBuffer(1); glGetObjectParameterARB(obj, GL_OBJECT_INFO_LOG_LENGTH_ARB, iVal); int length = iVal.get(); if (length > 1) { ByteBuffer infoLog = BufferUtils.createByteBuffer(length); iVal.flip(); glGetInfoLogARB(obj, iVal, infoLog); byte[] infoBytes = new byte[length]; infoLog.get(infoBytes); String out = new String(infoBytes); System.out.println("Info log:\n" + out); return false; } return true; }
public GLImage(String _filename) { try { BufferedImage image = ImageIO.read(this.getClass().getResource("/textures/" + _filename)); int[] pixels = new int[image.getWidth() * image.getHeight()]; image.getRGB(0, 0, image.getWidth(), image.getHeight(), pixels, 0, image.getWidth()); ByteBuffer buffer = BufferUtils.createByteBuffer( image.getWidth() * image.getHeight() * 4); // 4 for RGBA, 3 for RGB for (int y = 0; y < image.getHeight(); y++) { for (int x = 0; x < image.getWidth(); x++) { int pixel = pixels[y * image.getWidth() + x]; buffer.put((byte) ((pixel >> 16) & 0xFF)); // red buffer.put((byte) ((pixel >> 8) & 0xFF)); // green buffer.put((byte) (pixel & 0xFF)); // blue buffer.put((byte) ((pixel >> 24) & 0xFF)); // alpha } } buffer.flip(); this.id = GL11.glGenTextures(); // Generate texture ID GL11.glBindTexture(GL11.GL_TEXTURE_2D, this.id); // Bind texture ID // Setup wrap mode GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_S, GL11.GL_CLAMP); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_T, GL11.GL_CLAMP); // Setup texture scaling filtering GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_NEAREST); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_NEAREST); // Send texel data to OpenGL GL11.glTexImage2D( GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA8, image.getWidth(), image.getHeight(), 0, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, buffer); this.width = image.getWidth(); this.height = image.getHeight(); } catch (IOException e) { e.printStackTrace(); } }
private static boolean printLogInfo(int obj, String filename) { IntBuffer iVal = BufferUtils.createIntBuffer(1); ARBShaderObjects.glGetObjectParameterARB( obj, ARBShaderObjects.GL_OBJECT_INFO_LOG_LENGTH_ARB, iVal); int length = iVal.get(); if (length > 1) { // We have some info we need to output. ByteBuffer infoLog = BufferUtils.createByteBuffer(length); iVal.flip(); ARBShaderObjects.glGetInfoLogARB(obj, iVal, infoLog); byte[] infoBytes = new byte[length]; infoLog.get(infoBytes); String out = new String(infoBytes); System.out.println("Info log for " + filename + ":\n" + out); } else return true; return false; }
private void recreate() { id = GL11.glGenTextures(); GL11.glBindTexture(GL11.GL_TEXTURE_2D, id); GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, format.bytes); ByteBuffer data = BufferUtils.createByteBuffer(size.getWidth() * size.getHeight() * format.bytes); if (mipmap) { GLU.gluBuild2DMipmaps( GL11.GL_TEXTURE_2D, format.glInternalFormat, size.getWidth(), size.getHeight(), format.glFormat, GL11.GL_UNSIGNED_BYTE, data); } else { GL11.glTexImage2D( GL11.GL_TEXTURE_2D, 0, format.glInternalFormat, size.getWidth(), size.getHeight(), 0, format.glFormat, GL11.GL_UNSIGNED_BYTE, data); } GLUtil.checkGLError(); for (Texture t : residentTextures) { RectanglePacker.Rectangle rpr = packer.findRectangle(t.getSourceImage()); if (rpr != null) { writeToTexture(rpr, t.getSourceImage().getData()); } } regenerateMipmaps(); }
/** * The constructor that creates the font from the given file at the given height. * * @param filePath - The path to file including the file type * @param fontHeight - The height (size) of the font */ public TrueTypeFont(String filePath, int fontHeight) { this.fontHeight = fontHeight; long startTime = 0L; if (Debug.enabled) startTime = System.nanoTime(); textureID = glGenTextures(); cdata = STBTTBakedChar.mallocBuffer(96); try { ByteBuffer ttf = IOUtil.ioResourceToByteBuffer(filePath, 160 * 1024); ByteBuffer bitmap = BufferUtils.createByteBuffer(BITMAP_W * BITMAP_H); STBTruetype.stbtt_BakeFontBitmap(ttf, fontHeight, bitmap, BITMAP_W, BITMAP_H, 32, cdata); glBindTexture(GL_TEXTURE_2D, textureID); glTexImage2D( GL_TEXTURE_2D, 0, GL_ALPHA, BITMAP_W, BITMAP_H, 0, GL_ALPHA, GL_UNSIGNED_BYTE, bitmap); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); } catch (IOException e) { throw new RuntimeException(e); } xbuf = BufferUtils.createFloatBuffer(1); ybuf = BufferUtils.createFloatBuffer(1); quad = STBTTAlignedQuad.malloc(); if (Debug.enabled) { long endTime = System.nanoTime(); Debug.println( " Loaded font: " + filePath + "\n\tFont height: " + fontHeight + "px" + "\n\tLoad time: " + Debug.noNotation.format((endTime - startTime) / 1000000000.0) + "s", Debug.ANSI_CYAN); } }
/** * Convert BufferedImage to ByteBuffer * * @param image The BufferedImage to convert * @return The converted image */ private ByteBuffer toByteBuffer(BufferedImage image) { int[] pixels = new int[image.getWidth() * image.getHeight()]; image.getRGB(0, 0, image.getWidth(), image.getHeight(), pixels, 0, image.getWidth()); ByteBuffer buffer = BufferUtils.createByteBuffer( image.getWidth() * image.getHeight() * 4); // 4 for RGBA, 3 for RGB for (int y = 0; y < image.getHeight(); y++) { for (int x = 0; x < image.getWidth(); x++) { int pixel = pixels[y * image.getWidth() + x]; buffer.put((byte) ((pixel >> 16) & 0xFF)); // Red component buffer.put((byte) ((pixel >> 8) & 0xFF)); // Green component buffer.put((byte) (pixel & 0xFF)); // Blue component buffer.put((byte) ((pixel >> 24) & 0xFF)); // Alpha component. Only for RGBA } } buffer.flip(); // FOR THE LOVE OF GOD DO NOT FORGET THIS return buffer; }
/** * Sets one or more icons for the Display. * * <ul> * <li>On Windows you should supply at least one 16x16 icon and one 32x32. * <li>Linux (and similar platforms) expect one 32x32 icon. * <li>Mac OS X should be supplied one 128x128 icon * </ul> * * The implementation will use the supplied ByteBuffers with image data in RGBA (size must be a * power of two) and perform any conversions nescesarry for the specific platform. * * <p><b>NOTE:</b> The display will make a deep copy of the supplied byte buffer array, for the * purpose of recreating the icons when you go back and forth fullscreen mode. You therefore only * need to set the icon once per instance. * * @param icons Array of icons in RGBA mode. Pass the icons in order of preference. * @return number of icons used, or 0 if display hasn't been created */ public static int setIcon(ByteBuffer[] icons) { synchronized (GlobalLock.lock) { // make deep copy so we dont rely on the supplied buffers later on // don't recache! if (cached_icons != icons) { cached_icons = new ByteBuffer[icons.length]; for (int i = 0; i < icons.length; i++) { cached_icons[i] = BufferUtils.createByteBuffer(icons[i].capacity()); int old_position = icons[i].position(); cached_icons[i].put(icons[i]); icons[i].position(old_position); cached_icons[i].flip(); } } if (Display.isCreated() && parent == null) { return display_impl.setIcon(cached_icons); } else { return 0; } } }
public BufferTexture(BufferedImage image) { int[] pixels = new int[image.getWidth() * image.getHeight()]; image.getRGB(0, 0, image.getWidth(), image.getHeight(), pixels, 0, image.getWidth()); ByteBuffer buffer = BufferUtils.createByteBuffer(image.getWidth() * image.getHeight() * 4); for (int y = 0; y < image.getHeight(); y++) { for (int x = 0; x < image.getWidth(); x++) { int pixel = pixels[(y * image.getWidth()) + x]; buffer.put((byte) ((pixel >> 16) & 0xFF)); buffer.put((byte) ((pixel >> 8) & 0xFF)); buffer.put((byte) (pixel & 0xFF)); buffer.put((byte) ((pixel >> 24) & 0xFF)); } } buffer.flip(); this.width = image.getWidth(); this.height = image.getHeight(); this.textureId = GL11.glGenTextures(); GL11.glBindTexture(GL11.GL_TEXTURE_2D, this.textureId); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_NEAREST); GL11.glTexParameterf(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_NEAREST); GL11.glTexParameterf(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_S, GL11.GL_CLAMP); GL11.glTexParameterf(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_WRAP_T, GL11.GL_CLAMP); GL11.glTexImage2D( GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA8, this.width, this.height, 0, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, buffer); }
// Spout Start public static BufferedImage getScreenshot(int texture, int screenWidth, int screenHeight) { ByteBuffer buffer = BufferUtils.createByteBuffer(screenWidth * screenHeight * 3); byte[] pixelData = new byte[screenWidth * screenHeight * 3]; int[] imageData = new int[screenWidth * screenHeight]; buffer.clear(); GL11.glBindTexture(GL11.GL_TEXTURE_2D, texture); GL11.glGetTexImage(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGB, GL11.GL_UNSIGNED_BYTE, buffer); buffer.clear(); buffer.get(pixelData); for (int l = 0; l < screenWidth; l++) { for (int i1 = 0; i1 < screenHeight; i1++) { int j1 = l + (screenHeight - i1 - 1) * screenWidth; int k1 = pixelData[j1 * 3 + 0] & 0xff; int l1 = pixelData[j1 * 3 + 1] & 0xff; int i2 = pixelData[j1 * 3 + 2] & 0xff; int j2 = 0xff000000 | k1 << 16 | l1 << 8 | i2; imageData[l + i1 * screenWidth] = j2; } } BufferedImage bufferedimage = new BufferedImage(screenWidth, screenHeight, 1); bufferedimage.setRGB(0, 0, screenWidth, screenHeight, imageData, 0, screenWidth); return bufferedimage; }
private void verifyCompile(int shaderId) { IntBuffer intBuffer = BufferUtils.createIntBuffer(1); GL20.glGetShader(shaderId, GL20.GL_INFO_LOG_LENGTH, intBuffer); int length = intBuffer.get(); if (length <= 1) { return; } valid = false; ByteBuffer infoBuffer = BufferUtils.createByteBuffer(length); intBuffer.flip(); GL20.glGetShaderInfoLog(shaderId, intBuffer, infoBuffer); int actualLength = intBuffer.get(); byte[] infoBytes = new byte[actualLength]; infoBuffer.get(infoBytes); logger.log(Level.INFO, "{0}", new String(infoBytes)); }
protected BackgroundLoader() { running = true; texture = BufferUtils.createByteBuffer(WIDTH * HEIGHT * 3); }
/** * Returns a new {@link java.nio.ByteBuffer} instance with a capacity equal to {@link #SIZEOF}. */ public static ByteBuffer malloc() { return BufferUtils.createByteBuffer(SIZEOF); }
public static void main(String[] args) throws Exception { Display.setDisplayMode(new DisplayMode(640, 480)); Display.create(); Shape baseBox = new Shape(VertexDataUtils.createBox(new Vec3(-1, 1, -1), new Vec3(1, 3, 1))); Texture baseTexture = SGUtil.createTexture(GL11.GL_RGBA, 128, 128); RenderTarget baseTarget = new RenderTarget(128, 128, new DepthBuffer(GL30.GL_DEPTH24_STENCIL8), false, baseTexture); RenderPass basePass = new RenderPass(); basePass.setClearMask(GL11.GL_DEPTH_BUFFER_BIT | GL11.GL_COLOR_BUFFER_BIT); basePass.setClearColor(new Color4f(0, 1, 1, 0)); basePass.setView(View.createPerspective((float) Math.PI / 4f, 1, 0.1f, 100f)); basePass .getView() .setCameraMatrix(new Mat4().rotateEulerDeg(-30, 0, 0).translate(0, 0, 10).invert_()); basePass.setRenderTarget(baseTarget); basePass.addShape( new Shape(VertexDataUtils.createBox(new Vec3(-4, -1, -4), new Vec3(4, 0, 4)))); basePass.addShape(baseBox); int w = 256; int h = 256; ByteBuffer[][] pixels = {{BufferUtils.createByteBuffer(w * h * 4)}}; Texture shadowTexture = new Texture( TextureType.TEXTURE_2D, GL30.GL_RG32F, w, h, 0, Format.RGBA, pixels, false, false); shadowTexture.setWrapS(Wrap.CLAMP_TO_EDGE); shadowTexture.setWrapT(Wrap.CLAMP_TO_EDGE); // shadowTexture.setMinFilter(MinFilter.LINEAR_MIPMAP_LINEAR); shadowTexture.setMaxAnisotropy(4f); RenderTarget shadowTarget = new RenderTarget(w, h, new DepthBuffer(GL11.GL_DEPTH_COMPONENT), false, shadowTexture); RenderPass shadowPass = new RenderPass(); shadowPass.setClearMask(GL11.GL_DEPTH_BUFFER_BIT | GL11.GL_COLOR_BUFFER_BIT); shadowPass.setClearColor(new Color4f(0, 0, 1, 0)); shadowPass.setView(View.createPerspective((float) Math.PI / 4f, 1, 0.1f, 100f)); shadowPass.setRenderTarget(shadowTarget); shadowPass.addShape( new Shape(VertexDataUtils.createBox(new Vec3(-4, -1, -4), new Vec3(4, 0, 4)))); shadowPass.addShape( new Shape(VertexDataUtils.createBox(new Vec3(-1, 1, -1), new Vec3(1, 3, 1)))); shadowPass .getView() .setCameraMatrix(new Mat4().rotateEulerDeg(-90 - 45, 120, 0).translate(0, 0, 10).invert_()); Shape baseShape = new Shape(VertexDataUtils.createQuad(50, 100 + 256, 256, -256, 0)); baseShape.getState().setUnit(0, new Unit(baseTexture)); Shape shadowShape = new Shape(VertexDataUtils.createQuad(350, 100, 256, 256, 0)); shadowShape.getState().setUnit(0, new Unit(shadowTexture)); RenderPass finalPass = new RenderPass(); finalPass.setClearMask(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT); finalPass.setClearColor(new Color4f(1, 1, 0, 0)); finalPass.setView(View.createOrtho(0, 640, 0, 480, -1000, 1000)); finalPass.addShape(baseShape); finalPass.addShape(shadowShape); // Texture testTexture = createSampleTexture(); shadowStuff(basePass, shadowPass, shadowTexture); ByteBuffer[][] blurPixels = {{BufferUtils.createByteBuffer(128 * 128 * 4)}}; Texture blurTexture = new Texture( TextureType.TEXTURE_2D, GL30.GL_RG32F, 128, 128, 0, Format.RGBA, blurPixels, false, false); blurTexture.setWrapS(Wrap.CLAMP_TO_EDGE); blurTexture.setWrapT(Wrap.CLAMP_TO_EDGE); SceneGraph finalSceneGraph = new SceneGraph(); finalSceneGraph.addRenderPass(shadowPass); finalSceneGraph.addRenderPass( Blur.createPass(shadowTexture, blurTexture, 1.5f / shadowTexture.getWidth(), 0f)); finalSceneGraph.addRenderPass( Blur.createPass(blurTexture, shadowTexture, 0, 1.5f / shadowTexture.getWidth())); finalSceneGraph.addRenderPass(basePass); finalSceneGraph.addRenderPass(finalPass); Renderer finalRenderer = new Renderer(finalSceneGraph); int angle = 0; long startTime = System.currentTimeMillis(); while (!Display.isCloseRequested()) { if (Mouse.isButtonDown(0)) { angle += Mouse.getDX(); } // shadowPass.getView().setCameraMatrix(new Mat4().rotateEulerDeg(-90 - 45, angle, // 0).translate(0, 0, 14).invert_()); // basePass.getView().setCameraMatrix(new Mat4().rotateEulerDeg(-30, angle, 0).translate(0, // 0, 10).invert_()); float timeSec = (System.currentTimeMillis() - startTime) / 1000f; baseBox.setModelMatrix( new Mat4() .rotateEulerDeg(0, timeSec * 45, 0) .translate(0, Math.sin(timeSec * 1.7f) * 2, 0)); for (int i = 0; i < basePass.getShapeCount(); i++) { shadowPass.getShape(i).setModelMatrix(basePass.getShape(i).getModelMatrix()); } // shadowRenderer.render(); finalRenderer.render(); Display.update(); } Display.destroy(); }
public class ClientRenderTexture extends ClientTexture { public static final int INVALID_BUFFER = -1; public static final int SCREEN_BUFFER = 0; public static ByteBuffer EMPTY_BUFFER = BufferUtils.createByteBuffer(0); private int framebuffer = INVALID_BUFFER; private boolean useDepthBuffer = false; private boolean useStencilBuffer = false; int depthTarget = INVALID_BUFFER; int stencilTarget = INVALID_BUFFER; // TODO: Implement stencil component boolean useEXT = false; public ClientRenderTexture() { super( null, (int) ((SpoutClient) Spout.getEngine()).getResolution().getX(), (int) ((SpoutClient) Spout.getEngine()).getResolution().getY()); // Detect which path we should use to create framebuffers. // if both of these are false, we cannot use framebuffers so throw an exception boolean arb = GLContext.getCapabilities().GL_ARB_framebuffer_object; boolean ext = GLContext.getCapabilities().GL_EXT_framebuffer_object; if (!arb && !ext) throw new ComputerIsPotatoException("Does not support Framebuffers"); // if arb is false, use ext if (!arb) useEXT = true; Spout.log("Using EXT: " + useEXT); } public ClientRenderTexture(boolean depth) { this(); useDepthBuffer = true; } @Override public Texture subTexture(int x, int y, int w, int h) { // TODO Auto-generated method stub return null; } public void activate() { if (framebuffer == INVALID_BUFFER) return; // Can't set this to active if it's not created yet if (useEXT) EXTFramebufferObject.glBindFramebufferEXT( EXTFramebufferObject.GL_FRAMEBUFFER_EXT, framebuffer); else GL30.glBindFramebuffer(GL30.GL_FRAMEBUFFER, framebuffer); GL11.glViewport(0, 0, width, height); } public void release() { if (framebuffer != INVALID_BUFFER) { if (useEXT) EXTFramebufferObject.glBindFramebufferEXT( EXTFramebufferObject.GL_FRAMEBUFFER_EXT, SCREEN_BUFFER); else GL30.glBindFramebuffer(GL30.GL_FRAMEBUFFER, SCREEN_BUFFER); GL11.glViewport(0, 0, width, height); } } @Override public void writeGPU() { if (framebuffer != INVALID_BUFFER) throw new IllegalStateException("Framebuffer already created!"); // Create the color buffer for this renderTexture textureID = GL11.glGenTextures(); GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureID); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_NEAREST); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_NEAREST); GL11.glTexImage2D( GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA8, width, height, 0, GL11.GL_RGBA, GL11.GL_INT, (java.nio.ByteBuffer) null); // Create the texture data if (useEXT) { framebuffer = EXTFramebufferObject.glGenFramebuffersEXT(); EXTFramebufferObject.glBindFramebufferEXT( EXTFramebufferObject.GL_FRAMEBUFFER_EXT, framebuffer); GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureID); EXTFramebufferObject.glFramebufferTexture2DEXT( EXTFramebufferObject.GL_FRAMEBUFFER_EXT, EXTFramebufferObject.GL_COLOR_ATTACHMENT0_EXT, GL11.GL_TEXTURE_2D, textureID, 0); if (useDepthBuffer) { depthTarget = GL30.glGenRenderbuffers(); EXTFramebufferObject.glBindRenderbufferEXT( EXTFramebufferObject.GL_RENDERBUFFER_EXT, depthTarget); EXTFramebufferObject.glRenderbufferStorageEXT( EXTFramebufferObject.GL_RENDERBUFFER_EXT, GL11.GL_DEPTH_COMPONENT, this.getWidth(), this.getHeight()); EXTFramebufferObject.glFramebufferRenderbufferEXT( EXTFramebufferObject.GL_FRAMEBUFFER_EXT, GL30.GL_DEPTH_ATTACHMENT, EXTFramebufferObject.GL_RENDERBUFFER_EXT, depthTarget); } if (EXTFramebufferObject.glCheckFramebufferStatusEXT(EXTFramebufferObject.GL_FRAMEBUFFER_EXT) != EXTFramebufferObject.GL_FRAMEBUFFER_COMPLETE_EXT) { System.out.println("ERROR: Framebuffer not complete"); throw new ComputerIsPotatoException("Framebuffer not complete"); } } else { framebuffer = GL30.glGenFramebuffers(); GL30.glBindFramebuffer(GL30.GL_FRAMEBUFFER, framebuffer); GL11.glBindTexture(GL11.GL_TEXTURE_2D, textureID); GL30.glFramebufferTexture2D( GL30.GL_FRAMEBUFFER, GL30.GL_COLOR_ATTACHMENT0, GL11.GL_TEXTURE_2D, textureID, 0); if (useDepthBuffer) { depthTarget = GL30.glGenRenderbuffers(); GL30.glBindRenderbuffer(GL30.GL_RENDERBUFFER, depthTarget); GL30.glRenderbufferStorage( GL30.GL_RENDERBUFFER, GL11.GL_DEPTH_COMPONENT, this.getWidth(), this.getHeight()); GL30.glFramebufferRenderbuffer( GL30.GL_FRAMEBUFFER, GL30.GL_DEPTH_ATTACHMENT, GL30.GL_RENDERBUFFER, depthTarget); } GL30.glBindFramebuffer(GL30.GL_FRAMEBUFFER, SCREEN_BUFFER); if (GL30.glCheckFramebufferStatus(GL30.GL_FRAMEBUFFER) != GL30.GL_FRAMEBUFFER_COMPLETE) { System.out.println("ERROR: Framebuffer not complete"); throw new ComputerIsPotatoException("Framebuffer not complete"); } } GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); } protected boolean isGL30() { return ((SpoutClient) Spout.getEngine()).getRenderMode() == RenderMode.GL30; } @Override public boolean isLoaded() { return framebuffer != INVALID_BUFFER; } }
public static Geometry generateGalaxyPointGeometry(long seed) { Random rand = new Random(seed); /*double[] noiseSeedR = new double[16*16*16]; double[] noiseSeedI = new double[16*16*16]; for(int i = 0; i < noiseSeedR.length; i++) { noiseSeedR[i] = 2.0 * rand.nextDouble() - 1.0; noiseSeedI[i] = 2.0 * rand.nextDouble() - 1.0; }*/ List<Float> starPositions = new ArrayList<Float>(); List<Byte> color = new ArrayList<Byte>(); // Noise noiseSeedR = new Noise(seed), // noiseSeedI = new Noise(seed ^42l); int subdivs = 50; // int starsTotal = 100; // Go through all sectors for (float x = 0.5f; x < subdivs; x++) { for (float y = 0.5f; y < subdivs; y++) { for (float z = 0.5f; z < subdivs; z++) { // Amount of stars in this sector /*int amount = (int) ( (float)starsTotal/subdivs *Math.sin( x/Math.PI + noise(noiseSeedR, noiseSeedI, x/subdivs, y/subdivs, z/subdivs) + 0.5*noise(noiseSeedR, noiseSeedI, 2*x/subdivs, 2*y/subdivs, 2*z/subdivs) + 0.25*noise(noiseSeedR, noiseSeedI, 4*x/subdivs, 4*y/subdivs, 4*z/subdivs) + 0.125*noise(noiseSeedR, noiseSeedI, 8*x/subdivs, 8*y/subdivs, 8*z/subdivs)) );*/ int amount = 1; for (int i = 0; i < amount; i++) { // Put a star in the sector starPositions.add(((float) x + rand.nextFloat()) / subdivs - 0.5f); starPositions.add(((float) y + rand.nextFloat()) / subdivs - 0.5f); starPositions.add(((float) z + rand.nextFloat()) / subdivs - 0.5f); // TODO: Improve colors float c = rand.nextFloat(); if (c < 0.1f) { color.add((byte) 255); color.add((byte) (210 - (c - 1f))); color.add((byte) (170 - (c - 1f))); } else if (c < 0.9f) { color.add((byte) 255); color.add((byte) 255); color.add((byte) 255); } else { color.add((byte) (190 * c * c)); color.add((byte) (210 * c)); color.add((byte) 255); } } } } } List<Attribute> attributes = new ArrayList<Attribute>(); Attribute at = new Attribute(); at.buffer = BufferUtils.createFloatBuffer(starPositions.size()); at.name = "position"; at.size = 3; at.type = Type.FLOAT; Iterator<Float> fIt = starPositions.iterator(); while (fIt.hasNext()) ((FloatBuffer) at.buffer).put(fIt.next()); at.buffer.flip(); attributes.add(at); Attribute atColor = new Attribute(); atColor.buffer = BufferUtils.createByteBuffer(color.size()); atColor.name = "color"; atColor.normalized = true; atColor.size = 3; atColor.type = Type.UBYTE; Iterator<Byte> fItC = color.iterator(); while (fItC.hasNext()) ((ByteBuffer) atColor.buffer).put(fItC.next()); atColor.buffer.flip(); attributes.add(atColor); // TODO: Are the indices needed? int amountOfStars = starPositions.size() / 3; IntBuffer indices = BufferUtils.createIntBuffer(amountOfStars); for (int i = 0; i < amountOfStars; i++) { indices.put(i); } indices.flip(); return new Geometry(PrimitiveType.POINTS, indices, attributes); }
@Override public void onLoad(ALAudioFileInputStream ais) throws Exception { // header telling this is a wave file if (ais.readInt() != RIFF_INT) { throw new Exception("Not a wave file"); } @SuppressWarnings("unused") int file_size = ais.readInt() + 8; if (ais.readInt() != WAVE_INT) { throw new Exception("Not a wave file"); } boolean fmt = false; boolean data = false; // read block by block while (ais.available() > 0) { int id = ais.readInt(); int block_size = ais.readLittleInt(); if (id == FMT_INT) { if (fmt) { throw new Exception("Multiple FMT blocks in WAV file"); } fmt = true; @SuppressWarnings("unused") short audio_format = ais.readLittleShort(); short channels = ais.readLittleShort(); System.out.println(channels); int frequence = ais.readLittleInt(); @SuppressWarnings("unused") int avg_bytes_per_sec = ais.readLittleInt(); @SuppressWarnings("unused") short bytes_per_sample = ais.readLittleShort(); short bits_per_sample = ais.readLittleShort(); super.setFrequence(frequence); if (bits_per_sample == 8) { if (channels == 1) { super.setFormat(AL10.AL_FORMAT_MONO8); } else if (channels == 2) { super.setFormat(AL10.AL_FORMAT_STEREO8); } else { throw new Exception("Wrong number of channels (not supported): " + channels); } } else if (bits_per_sample == 16) { if (channels == 1) { super.setFormat(AL10.AL_FORMAT_MONO16); } else if (channels == 2) { super.setFormat(AL10.AL_FORMAT_STEREO16); } else { throw new Exception("Wrong number of channels (not supported): " + channels); } } else { throw new Exception("Wrong bits per sample: " + bits_per_sample); } } else if (id == DATA_INT) { if (data) { throw new Exception("Multiple data section in WAV file"); } data = true; byte[] bytes = new byte[block_size]; ais.read(bytes, 0, block_size); ByteBuffer buffer = BufferUtils.createByteBuffer(block_size); buffer.put(bytes); buffer.flip(); super.setData(buffer); } else if (block_size >= 0 && block_size <= ais.available()) { ais.skip(block_size); } else { break; } } if (!fmt) { throw new Exception("No FMT"); } if (!data) { throw new Exception("No data"); } }
// Spout Start - Method renamed from func_74292_a to saveScreenshot public static String saveScreenshot( File file, String imageFileName, int screenWidth, int screenHeight) { ByteBuffer buffer = BufferUtils.createByteBuffer(screenWidth * screenHeight * 3); byte[] pixelData = new byte[screenWidth * screenHeight * 3]; int[] imageData = new int[screenWidth * screenHeight]; try { // Spout Start - Renamed from file1 File screenshotsDir = new File(file, "screenshots"); screenshotsDir.mkdir(); // Just intialise it up there! /*if (buffer == null || buffer.capacity() < i * j) { buffer = BufferUtils.createByteBuffer(i * j * 3); } if (imageData == null || imageData.length < i * j * 3) { pixelData = new byte[i * j * 3]; imageData = new int[i * j]; }*/ // Use constants instead of magic numbers GL11.glPixelStorei(GL11.GL_PACK_ALIGNMENT, 1); GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, 1); buffer.clear(); GL11.glReadPixels( 0, 0, screenWidth, screenHeight, GL11.GL_RGB, GL11.GL_UNSIGNED_BYTE, buffer); // Spout End buffer.clear(); String s1 = (new StringBuilder()).append("").append(dateFormat.format(new Date())).toString(); File file2; if (imageFileName == null) { for (int k = 1; (file2 = new File( screenshotsDir, (new StringBuilder()) .append(s1) .append( k != 1 ? (new StringBuilder()).append("_").append(k).toString() : "") .append(".png") .toString())) .exists(); k++) {} } else { file2 = new File(screenshotsDir, imageFileName); } buffer.get(pixelData); for (int l = 0; l < screenWidth; l++) { for (int i1 = 0; i1 < screenHeight; i1++) { int j1 = l + (screenHeight - i1 - 1) * screenWidth; int k1 = pixelData[j1 * 3 + 0] & 0xff; int l1 = pixelData[j1 * 3 + 1] & 0xff; int i2 = pixelData[j1 * 3 + 2] & 0xff; int j2 = 0xff000000 | k1 << 16 | l1 << 8 | i2; imageData[l + i1 * screenWidth] = j2; } } BufferedImage bufferedimage = new BufferedImage(screenWidth, screenHeight, 1); bufferedimage.setRGB(0, 0, screenWidth, screenHeight, imageData, 0, screenWidth); ImageIO.write(bufferedimage, "png", file2); return (new StringBuilder()) .append("Saved screenshot as ") .append(file2.getName()) .toString(); } catch (Exception exception) { exception.printStackTrace(); return (new StringBuilder()).append("Failed to save: ").append(exception).toString(); } }
@Override public ByteBuffer createByteBuffer(final int size) { return org.lwjgl.BufferUtils.createByteBuffer(size); }
/** * Creates a PixelData buffer with the specified size and format. * * <p>Note that Slick currently loads textures with an internal RGBA format; this means that even * if we upload, say, 2-component (e.g. GRAYALPHA) texture data, it will eventually be stored in * OpenGL video memory using RGBA. For better performance and memory management, create textures * with the same internal format as the format given to PixelData. The static 'createTexture' * utility method is intended for this purpose. * * <p>Also note that most drivers will expand formats to RGBA internally, so this optimization may * not be necessary. * * @param width the width in pixels of our data * @param height the height in pixels of our data * @param format the desired format to use during uploading (RGB, RGBA, GRAY, GRAYALPHA, ALPHA) */ public PixelData(int width, int height, ImageData.Format format) { this.format = format; this.width = width; this.height = height; this.pixels = BufferUtils.createByteBuffer(width * height * format.getColorComponents()); }