@SuppressWarnings("OverridableMethodCallInConstructor") public LWJGLRenderer() throws LWJGLException { this.ib16 = BufferUtils.createIntBuffer(16); this.textureAreas = new ArrayList<TextureArea>(); this.rotatedTextureAreas = new ArrayList<TextureAreaRotated>(); this.dynamicImages = new ArrayList<LWJGLDynamicImage>(); this.tintStateRoot = new TintStack(); this.tintStack = tintStateRoot; this.clipStack = new ClipStack(); this.clipRectTemp = new Rect(); syncViewportSize(); GL11.glGetInteger(GL11.GL_MAX_TEXTURE_SIZE, ib16); maxTextureSize = ib16.get(0); if (Mouse.isCreated()) { int minCursorSize = Cursor.getMinCursorSize(); IntBuffer tmp = BufferUtils.createIntBuffer(minCursorSize * minCursorSize); emptyCursor = new Cursor( minCursorSize, minCursorSize, minCursorSize / 2, minCursorSize / 2, 1, tmp, null); } else { emptyCursor = null; } swCursorAnimState = new SWCursorAnimState(); }
/** * Creates a RenderTexture object for enabling render-to-texture on a P-buffer. * * <p>NOTE: Only one of useRGB and useRGBA can be true at the same time. * * <p>NOTE: useRGB(A) and useDepth can be true at the same time, thus allowing two different * render textures. * * <p>NOTE: The target parameter can be one of the following: * * <p>RENDER_TEXTURE_1D RENDER_TEXTURE_2D RENDER_TEXTURE_RECTANGLE RENDER_TEXTURE_CUBE_MAP * * @param useRGB - When true the P-buffer can be used as an RGB render texture. * @param useRGBA - When true the P-buffer can be used as an RGBA render texture. * @param useDepth - When true the P-buffer can be used as a depth render texture. * @param isRectangle - When true rectangle textures will be allowed on the P-buffer. * @param target - The texture target of the render texture. * @param mipmaps - How many mipmap levels to allocate on the P-buffer. */ public RenderTexture( boolean useRGB, boolean useRGBA, boolean useDepth, boolean isRectangle, int target, int mipmaps) { if (useRGB && useRGBA) throw new IllegalArgumentException("A RenderTexture can't be both RGB and RGBA."); if (mipmaps < 0) throw new IllegalArgumentException("The mipmap levels can't be negative."); if (isRectangle && target != RENDER_TEXTURE_RECTANGLE) throw new IllegalArgumentException( "When the RenderTexture is rectangle the target must be RENDER_TEXTURE_RECTANGLE."); pixelFormatCaps = BufferUtils.createIntBuffer(4); pBufferAttribs = BufferUtils.createIntBuffer(8); if (useRGB) { pixelFormatCaps.put( isRectangle ? WGL_BIND_TO_TEXTURE_RECTANGLE_RGB_NV : WGL_BIND_TO_TEXTURE_RGB_ARB); pixelFormatCaps.put(GL_TRUE); pBufferAttribs.put(WGL_TEXTURE_FORMAT_ARB); pBufferAttribs.put(WGL_TEXTURE_RGB_ARB); } else if (useRGBA) { pixelFormatCaps.put( isRectangle ? WGL_BIND_TO_TEXTURE_RECTANGLE_RGBA_NV : WGL_BIND_TO_TEXTURE_RGBA_ARB); pixelFormatCaps.put(GL_TRUE); pBufferAttribs.put(WGL_TEXTURE_FORMAT_ARB); pBufferAttribs.put(WGL_TEXTURE_RGBA_ARB); } if (useDepth) { pixelFormatCaps.put( isRectangle ? WGL_BIND_TO_TEXTURE_RECTANGLE_DEPTH_NV : WGL_BIND_TO_TEXTURE_DEPTH_NV); pixelFormatCaps.put(GL_TRUE); pBufferAttribs.put(WGL_DEPTH_TEXTURE_FORMAT_NV); pBufferAttribs.put(WGL_TEXTURE_DEPTH_COMPONENT_NV); } pBufferAttribs.put(WGL_TEXTURE_TARGET_ARB); pBufferAttribs.put(target); if (mipmaps != 0) { pBufferAttribs.put(WGL_MIPMAP_TEXTURE_ARB); pBufferAttribs.put(mipmaps); } pixelFormatCaps.flip(); pBufferAttribs.flip(); }
private void resize() { IntBuffer b1 = BufferUtils.createIntBuffer(1); IntBuffer b2 = BufferUtils.createIntBuffer(1); GLFW.glfwGetFramebufferSize(window, b1, b2); GL11.glViewport(0, 0, b1.get(), b2.get()); persp = GLHelper.initPerspectiveProjectionMatrix(FOV, width, height, zNear, zFar); ortho = GLHelper.initOrthographicProjectionMatrix( -width / 2, width / 2, -height / 2, height / 2, zNear, zFar); ortho2D = GLHelper.initOrthographicProjectionMatrix(0, width, 0, height, 0, 1); transform = new Matrix4f(); }
public Window(int width, int height, String title, long monitor, long share) { this.width = width; this.height = height; this.title = title; this.monitor = monitor; this.share = share; widthBuffer = BufferUtils.createIntBuffer(1); heightBuffer = BufferUtils.createIntBuffer(1); windowHandle = glfwCreateWindow(width, height, title, monitor, share); }
/** * Stops and releases all sources, clears each of the specified Audio buffers, destroys the OpenAL * context, and resets SoundStore for future use. * * <p>Calling SoundStore.get().init() will re-initialize the OpenAL context after a call to * destroyOpenAL (Note: AudioLoader.getXXX calls init for you). * * @author davedes (http://slick.ninjacave.com/forum/viewtopic.php?t=3920) */ private static void destroyOpenAL() { if (!trackExists()) return; stop(); try { // get Music object's (private) Audio object reference Field sound = player.getClass().getDeclaredField("sound"); sound.setAccessible(true); Audio audio = (Audio) (sound.get(player)); // first clear the sources allocated by SoundStore int max = SoundStore.get().getSourceCount(); IntBuffer buf = BufferUtils.createIntBuffer(max); for (int i = 0; i < max; i++) { int source = SoundStore.get().getSource(i); buf.put(source); // stop and detach any buffers at this source AL10.alSourceStop(source); AL10.alSourcei(source, AL10.AL_BUFFER, 0); } buf.flip(); AL10.alDeleteSources(buf); int exc = AL10.alGetError(); if (exc != AL10.AL_NO_ERROR) { throw new SlickException("Could not clear SoundStore sources, err: " + exc); } // delete any buffer data stored in memory, too... if (audio != null && audio.getBufferID() != 0) { buf = BufferUtils.createIntBuffer(1).put(audio.getBufferID()); buf.flip(); AL10.alDeleteBuffers(buf); exc = AL10.alGetError(); if (exc != AL10.AL_NO_ERROR) { throw new SlickException( "Could not clear buffer " + audio.getBufferID() + ", err: " + exc); } } // clear OpenAL AL.destroy(); // reset SoundStore so that next time we create a Sound/Music, it will reinit SoundStore.get().clear(); player = null; } catch (Exception e) { ErrorHandler.error("Failed to destroy OpenAL.", e, true); } }
public Texture(String path) { try { BufferedImage image = ImageIO.read(new FileInputStream(path)); width = image.getWidth(); height = image.getHeight(); pixels = new int[width * height]; image.getRGB(0, 0, width, height, pixels, 0, width); } catch (IOException e) { e.printStackTrace(); } int[] data = new int[width * height]; for (int i = 0; i < data.length; i++) { int a = (pixels[i] & 0xff000000) >> 24; int r = (pixels[i] & 0xff0000) >> 16; int g = (pixels[i] & 0xff00) >> 8; int b = (pixels[i] & 0xff); data[i] = a << 24 | b << 16 | g << 8 | r; } int id = glGenTextures(); glBindTexture(GL_TEXTURE_2D, id); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); IntBuffer buffer = (IntBuffer) BufferUtils.createIntBuffer(data.length).put(data).flip(); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, buffer); this.index = id; glBindTexture(GL_TEXTURE_2D, 0); }
private int createTubeVbo(int tube, Color color) { float[] c = new float[] {color.getBlue() / 255f, color.getGreen() / 255f, color.getRed() / 255f}; totalNumVerts[tube] = tubes[tube].indeces.length; IntBuffer buf = BufferUtils.createIntBuffer(1); GL15.glGenBuffers(buf); int vbo = buf.get(); FloatBuffer data = BufferUtils.createFloatBuffer(tubes[tube].indeces.length * 9); for (int i = 0; i < tubes[tube].indeces.length; i++) { data.put(c); Vector3D vertex = tubes[tube].vertices[tubes[tube].indeces[i]]; Vector3D normal = tubes[tube].normals[tubes[tube].indeces[i]]; float[] vertexf = new float[] {vertex.x, vertex.y, vertex.z}; float[] normalf = new float[] {normal.x, normal.y, normal.z}; data.put(vertexf); data.put(normalf); } data.rewind(); int bytesPerFloat = Float.SIZE / Byte.SIZE; int numBytes = data.capacity() * bytesPerFloat; GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vbo); GL15.glBufferData(GL15.GL_ARRAY_BUFFER, data, GL15.GL_STATIC_DRAW); GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0); return vbo; }
public static String func_74292_a(File par0File, String par1Str, int par2, int par3) { try { File var4 = new File(par0File, "screenshots"); var4.mkdir(); int var5 = par2 * par3; if (field_74293_b == null || field_74293_b.capacity() < var5) { field_74293_b = BufferUtils.createIntBuffer(var5); field_74294_c = new int[var5]; } GL11.glPixelStorei(GL11.GL_PACK_ALIGNMENT, 1); GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, 1); field_74293_b.clear(); GL11.glReadPixels( 0, 0, par2, par3, GL12.GL_BGRA, GL12.GL_UNSIGNED_INT_8_8_8_8_REV, field_74293_b); field_74293_b.get(field_74294_c); func_74289_a(field_74294_c, par2, par3); BufferedImage var6 = new BufferedImage(par2, par3, 1); var6.setRGB(0, 0, par2, par3, field_74294_c, 0, par2); File var7; if (par1Str == null) { var7 = func_74290_a(var4); } else { var7 = new File(var4, par1Str); } ImageIO.write(var6, "png", var7); return "Saved screenshot as " + var7.getName(); } catch (Exception var8) { var8.printStackTrace(); return "Failed to save: " + var8; } }
public Mesh(float[] vertices, int[] indices) { this.vertices = vertices; this.indices = indices; vbo = glGenBuffers(); ibo = glGenBuffers(); FloatBuffer fb = BufferUtils.createFloatBuffer(vertices.length); fb.put(vertices); fb.flip(); IntBuffer ib = BufferUtils.createIntBuffer(indices.length); ib.put(indices); ib.flip(); glBindBuffer(GL_ARRAY_BUFFER, vbo); glBufferData(GL_ARRAY_BUFFER, fb, GL_STATIC_DRAW); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo); glBufferData(GL_ELEMENT_ARRAY_BUFFER, ib, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); }
/* Generate a Triangle mesh that represents this Box. */ private void buildMesh() { // Create the OBJMesh MeshData box = new MeshData(); box.vertexCount = 8; box.indexCount = 36; // Add positions box.positions = BufferUtils.createFloatBuffer(box.vertexCount * 3); box.positions.put( new float[] { (float) minPt.x, (float) minPt.y, (float) minPt.z, (float) minPt.x, (float) maxPt.y, (float) minPt.z, (float) maxPt.x, (float) maxPt.y, (float) minPt.z, (float) maxPt.x, (float) minPt.y, (float) minPt.z, (float) minPt.x, (float) minPt.y, (float) maxPt.z, (float) minPt.x, (float) maxPt.y, (float) maxPt.z, (float) maxPt.x, (float) maxPt.y, (float) maxPt.z, (float) maxPt.x, (float) minPt.y, (float) maxPt.z }); box.indices = BufferUtils.createIntBuffer(box.indexCount); box.indices.put( new int[] { 0, 1, 2, 0, 2, 3, 0, 5, 1, 0, 4, 5, 0, 7, 4, 0, 3, 7, 4, 6, 5, 4, 7, 6, 2, 5, 6, 2, 1, 5, 2, 6, 7, 2, 7, 3 }); this.mesh = new Mesh(box); // set transformations and absorptioins this.mesh.setTransformation(this.tMat, this.tMatInv, this.tMatTInv); this.mesh.shader = this.shader; }
public static String func_74292_a( File p_74292_0_, String p_74292_1_, int p_74292_2_, int p_74292_3_) { try { File file = new File(p_74292_0_, "screenshots"); file.mkdir(); int i = p_74292_2_ * p_74292_3_; if (field_74293_b == null || field_74293_b.capacity() < i) { field_74293_b = BufferUtils.createIntBuffer(i); field_74294_c = new int[i]; } GL11.glPixelStorei(3333, 1); GL11.glPixelStorei(3317, 1); field_74293_b.clear(); GL11.glReadPixels(0, 0, p_74292_2_, p_74292_3_, 32993, 33639, field_74293_b); field_74293_b.get(field_74294_c); func_74289_a(field_74294_c, p_74292_2_, p_74292_3_); BufferedImage bufferedimage = new BufferedImage(p_74292_2_, p_74292_3_, 1); bufferedimage.setRGB(0, 0, p_74292_2_, p_74292_3_, field_74294_c, 0, p_74292_2_); File file1; if (p_74292_1_ == null) { file1 = func_74290_a(file); } else { file1 = new File(file, p_74292_1_); } ImageIO.write(bufferedimage, "png", file1); return (new StringBuilder()) .append("Saved screenshot as ") .append(file1.getName()) .toString(); } catch (Exception exception) { exception.printStackTrace(); return (new StringBuilder()).append("Failed to save: ").append(exception).toString(); } }
private IntBuffer storeInIntBuffer(int[] data) { IntBuffer buffer = BufferUtils.createIntBuffer(data.length); buffer.put(data); buffer.flip(); return buffer; }
public String glGetActiveUniform(int program, int index, IntBuffer size, Buffer type) { // FIXME this is less than ideal of course... IntBuffer typeTmp = BufferUtils.createIntBuffer(2); String name = GL20.glGetActiveUniform(program, index, 256, typeTmp); size.put(typeTmp.get(0)); if (type instanceof IntBuffer) ((IntBuffer) type).put(typeTmp.get(1)); return name; }
@Override public void createBuffers() throws Exception { vertexBuffer = BufferUtils.createFloatBuffer(vertices.length); vertexBuffer.put(vertices); vertexBuffer.rewind(); indexBuffer = BufferUtils.createIntBuffer(numVertices); indexBuffer.put(vertex_indices); indexBuffer.rewind(); fetchPNGTexture(); }
public static void init() { int maxDrawBuffers = glGetInteger(GL_MAX_DRAW_BUFFERS); System.out.println("GL_MAX_DRAW_BUFFERS = " + maxDrawBuffers); colorAttachments = 4; for (int i = 0; i < ProgramCount; ++i) { if (programNames[i].equals("")) { programs[i] = 0; } else { programs[i] = setupProgram( "shaders/" + programNames[i] + ".vsh", "shaders/" + programNames[i] + ".fsh"); } } if (colorAttachments > maxDrawBuffers) { System.out.println("Not enough draw buffers!"); } for (int i = 0; i < ProgramCount; ++i) { for (int n = i; programs[i] == 0; n = programBackups[n]) { if (n == programBackups[n]) { break; } programs[i] = programs[programBackups[n]]; } } dfbDrawBuffers = BufferUtils.createIntBuffer(colorAttachments); for (int i = 0; i < colorAttachments; ++i) { dfbDrawBuffers.put(i, GL_COLOR_ATTACHMENT0_EXT + i); } dfbTextures = BufferUtils.createIntBuffer(colorAttachments); dfbRenderBuffers = BufferUtils.createIntBuffer(colorAttachments); resize(); setupShadowMap(); isInitialized = true; }
static { IntBuffer offsets = BufferUtils.createIntBuffer(6); SIZEOF = offsets(memAddress(offsets)); WIDTH = offsets.get(0); HEIGHT = offsets.get(1); REDBITS = offsets.get(2); GREENBITS = offsets.get(3); BLUEBITS = offsets.get(4); REFRESHRATE = offsets.get(5); }
/** * Called in response to the opengl context going away, perhaps as a result of the display mode * changing */ public static void recreateTextures() { IntBuffer texNames = BufferUtils.createIntBuffer(textures.size()); for (GLTexture glt : textures) { texNames.put(glt.id); } texNames.flip(); GL11.glDeleteTextures(texNames); for (GLTexture glt : textures) { glt.recreate(); } }
public JGLCanvas() { this.mNewCanvasSize = new AtomicReference<>(); this.mMouseListeners = new ArrayList<>(); this.mMouseWheelListeners = new ArrayList<>(); this.mMouseMotionListeners = new ArrayList<>(); this.mKeyListeners = new ArrayList<>(); mIB16 = BufferUtils.createIntBuffer(16); addComponentListener( new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { mNewCanvasSize.set(getSize()); } }); }
private static boolean printLogInfo(int obj) { IntBuffer iVal = BufferUtils.createIntBuffer(1); glGetObjectParameterARB(obj, GL_OBJECT_INFO_LOG_LENGTH_ARB, iVal); int length = iVal.get(); if (length > 1) { ByteBuffer infoLog = BufferUtils.createByteBuffer(length); iVal.flip(); glGetInfoLogARB(obj, iVal, infoLog); byte[] infoBytes = new byte[length]; infoLog.get(infoBytes); String out = new String(infoBytes); System.out.println("Info log:\n" + out); return false; } return true; }
public IntBuffer getIndices(int index) { IntBuffer indicesloc; int[] i = { (index * 6), (index * 6) + 1, (index * 6) + 2, (index * 6) + 3, (index * 6) + 4, (index * 6) + 5 }; indicesloc = BufferUtils.createIntBuffer(6); indicesloc.put(i); indicesloc.flip(); return indicesloc; }
/** * Get the Sound based on a specified OGG file * * @param ref The reference to the OGG file in the classpath * @param in The stream to the OGG to load * @return The Sound read from the OGG file * @throws IOException Indicates a failure to load the OGG */ public Audio getOgg(String ref, InputStream in) throws IOException { if (!soundWorks) { return new NullAudio(); } if (!inited) { throw new RuntimeException( "Can't load sounds until SoundStore is init(). Use the container init() method."); } if (deferred) { return new DeferredSound(ref, in, DeferredSound.OGG); } int buffer = -1; if (loaded.get(ref) != null) { buffer = ((Integer) loaded.get(ref)).intValue(); } else { try { IntBuffer buf = BufferUtils.createIntBuffer(1); OggDecoder decoder = new OggDecoder(); OggData ogg = decoder.getData(in); AL10.alGenBuffers(buf); AL10.alBufferData( buf.get(0), ogg.channels > 1 ? AL10.AL_FORMAT_STEREO16 : AL10.AL_FORMAT_MONO16, ogg.data, ogg.rate); loaded.put(ref, new Integer(buf.get(0))); buffer = buf.get(0); } catch (Exception e) { Log.error(e); Log.info("Failed to load: " + ref + " - " + e.getMessage()); throw new IOException("Unable to load: " + ref); } } if (buffer == -1) { throw new IOException("Unable to load: " + ref); } return new AudioImpl(this, buffer); }
private static boolean printLogInfo(int obj, String filename) { IntBuffer iVal = BufferUtils.createIntBuffer(1); ARBShaderObjects.glGetObjectParameterARB( obj, ARBShaderObjects.GL_OBJECT_INFO_LOG_LENGTH_ARB, iVal); int length = iVal.get(); if (length > 1) { // We have some info we need to output. ByteBuffer infoLog = BufferUtils.createByteBuffer(length); iVal.flip(); ARBShaderObjects.glGetInfoLogARB(obj, iVal, infoLog); byte[] infoBytes = new byte[length]; infoLog.get(infoBytes); String out = new String(infoBytes); System.out.println("Info log for " + filename + ":\n" + out); } else return true; return false; }
private void doEye() { int mouseX = Mouse.getX(); int mouseY = Mouse.getY(); FloatBuffer modelview = BufferUtils.createFloatBuffer(16); GL11.glGetFloat(GL11.GL_MODELVIEW_MATRIX, modelview); FloatBuffer projection = BufferUtils.createFloatBuffer(16); GL11.glGetFloat(GL11.GL_PROJECTION_MATRIX, projection); IntBuffer viewport = BufferUtils.createIntBuffer(16); GL11.glGetInteger(GL11.GL_VIEWPORT, viewport); float winX = mouseX; float winY = viewport.get(3) - mouseY; FloatBuffer winZBuffer = BufferUtils.createFloatBuffer(1); GL11.glReadPixels(mouseX, mouseY, 1, 1, GL11.GL_DEPTH_COMPONENT, GL11.GL_FLOAT, winZBuffer); float winZ = winZBuffer.get(0); FloatBuffer pos = BufferUtils.createFloatBuffer(3); GLU.gluUnProject(winX, winY, winZ, modelview, projection, viewport, pos); mEyeRay = new Point3f(pos.get(0), pos.get(1), pos.get(2)); }
public static void loadDataContainer( TerrainDataContainer dest, float[][] map, int x, int y, int size) { float[][] harray = new float[size - 2][size - 2]; float q = Chunk.CHUNK_SIZE / (float) (size - 3); int bSize = (size - 2) * (size - 2); FloatBuffer vBuff = BufferUtils.createFloatBuffer(bSize * 4); FloatBuffer tBuff = BufferUtils.createFloatBuffer(bSize * 2); FloatBuffer nBuff = BufferUtils.createFloatBuffer(bSize * 3); int iAmount = 6 * (size - 3) * (size - 3); IntBuffer iBuff = BufferUtils.createIntBuffer(iAmount); for (int pX = 0; pX < size - 2; pX++) for (int pZ = 0; pZ < size - 2; pZ++) { harray[pX][pZ] = map[x + pX + 1][y + pZ + 1]; vBuff.put(pX * q); vBuff.put(map[x + pX + 1][y + pZ + 1]); vBuff.put(pZ * q); vBuff.put(1); tBuff.put((pX / (float) (size - 3)) * Chunk.TEXTURES); tBuff.put((pZ / (float) (size - 3)) * Chunk.TEXTURES); float hU = map[x + pX + 1][y + pZ + 2]; float hD = map[x + pX + 1][y + pZ]; float hL = map[x + pX][y + pZ + 1]; float hR = map[x + pX + 2][y + pZ + 1]; temp.set(hL - hR, 2f, hD - hU); temp.normalise(temp); nBuff.put(temp.x); nBuff.put(temp.y); nBuff.put(temp.z); if (pX < size - 3 && pZ < size - 3) { iBuff.put((pZ + 1) * (size - 2) + pX); iBuff.put(pZ * (size - 2) + pX); iBuff.put(pZ * (size - 2) + pX + 1); iBuff.put((pZ + 1) * (size - 2) + pX + 1); iBuff.put((pZ + 1) * (size - 2) + pX); iBuff.put(pZ * (size - 2) + pX + 1); } } vBuff.flip(); tBuff.flip(); nBuff.flip(); iBuff.flip(); dest.load(vBuff, tBuff, nBuff, iBuff, iAmount, q, harray); }
public void writeSamples(byte[] data, int offset, int length) { if (length < 0) throw new IllegalArgumentException("length cannot be < 0."); if (sourceID == -1) { sourceID = audio.obtainSource(true); if (sourceID == -1) return; if (buffers == null) { buffers = BufferUtils.createIntBuffer(bufferCount); alGenBuffers(buffers); if (alGetError() != AL_NO_ERROR) throw new GdxRuntimeException("Unabe to allocate audio buffers."); } alSourcei(sourceID, AL_LOOPING, AL_FALSE); alSourcef(sourceID, AL_GAIN, volume); // Fill initial buffers. int queuedBuffers = 0; for (int i = 0; i < bufferCount; i++) { int bufferID = buffers.get(i); int written = Math.min(bufferSize, length); tempBuffer.clear(); tempBuffer.put(data, offset, written).flip(); alBufferData(bufferID, format, tempBuffer, sampleRate); alSourceQueueBuffers(sourceID, bufferID); length -= written; offset += written; queuedBuffers++; } // Queue rest of buffers, empty. tempBuffer.clear().flip(); for (int i = queuedBuffers; i < bufferCount; i++) { int bufferID = buffers.get(i); alBufferData(bufferID, format, tempBuffer, sampleRate); alSourceQueueBuffers(sourceID, bufferID); } alSourcePlay(sourceID); isPlaying = true; } while (length > 0) { int written = fillBuffer(data, offset, length); length -= written; offset += written; } }
private void enumerateDevices() { if (!alcIsExtensionPresent(null, "ALC_ENUMERATION_EXT")) return; IntBuffer buf = BufferUtils.createIntBuffer(1); alcGetInteger(null, ALC_MAJOR_VERSION, buf); int major = buf.get(0); buf.position(0); alcGetInteger(null, ALC_MINOR_VERSION, buf); int minor = buf.get(0); System.out.println(String.format("AL Version: %s.%s", major, minor)); String str = alcGetString(null, ALC_DEVICE_SPECIFIER); String[] devices = str.split("\0"); for (String string : devices) { // System.out.println("Audio device: " + string); // if(string.contains("Wave File") && !hasWavWriter) } }
/** * Render a text string in 2D over the scene, using the character set created by this GLFont * object. * * @see makeFont() */ public void print(int x, int y, String msg) { if (msg != null) { GL11.glPushMatrix(); GL11.glEnable(GL11.GL_BLEND); GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); // enable the charset texture GL11.glEnable(GL11.GL_TEXTURE_2D); texture.bind(); // draw the text GL11.glTranslatef(x, y, 0); // Position The Text (in pixel coords) IntBuffer buffer = BufferUtils.createIntBuffer(msg.length()); for (int i = 0; i < msg.length(); i++) { buffer.put(fontListBase + (msg.charAt(i) - 32)); } buffer.flip(); GL11.glCallLists(buffer); GL11.glPopMatrix(); } }
/** * Get the Sound based on a specified AIF file * * @param ref The reference to the AIF file in the classpath * @param in The stream to the AIF to load * @return The Sound read from the AIF file * @throws IOException Indicates a failure to load the AIF */ public Audio getAIF(String ref, InputStream in) throws IOException { in = new BufferedInputStream(in); if (!soundWorks) { return new NullAudio(); } if (!inited) { throw new RuntimeException( "Can't load sounds until SoundStore is init(). Use the container init() method."); } if (deferred) { return new DeferredSound(ref, in, DeferredSound.AIF); } int buffer = -1; if (loaded.get(ref) != null) { buffer = ((Integer) loaded.get(ref)).intValue(); } else { try { IntBuffer buf = BufferUtils.createIntBuffer(1); AiffData data = AiffData.create(in); AL10.alGenBuffers(buf); AL10.alBufferData(buf.get(0), data.format, data.data, data.samplerate); loaded.put(ref, new Integer(buf.get(0))); buffer = buf.get(0); } catch (Exception e) { Log.error(e); IOException x = new IOException("Failed to load: " + ref); x.initCause(e); throw x; } } if (buffer == -1) { throw new IOException("Unable to load: " + ref); } return new AudioImpl(this, buffer); }
private void verifyCompile(int shaderId) { IntBuffer intBuffer = BufferUtils.createIntBuffer(1); GL20.glGetShader(shaderId, GL20.GL_INFO_LOG_LENGTH, intBuffer); int length = intBuffer.get(); if (length <= 1) { return; } valid = false; ByteBuffer infoBuffer = BufferUtils.createByteBuffer(length); intBuffer.flip(); GL20.glGetShaderInfoLog(shaderId, intBuffer, infoBuffer); int actualLength = intBuffer.get(); byte[] infoBytes = new byte[actualLength]; infoBuffer.get(infoBytes); logger.log(Level.INFO, "{0}", new String(infoBytes)); }
public Point2D[] project( int tube, float[] modelMatrix, float[] projectionMatrix, int[] viewport, int step) { GLU glu = new GLU(); FloatBuffer modelbuf = BufferUtils.createFloatBuffer(16); modelbuf.put(modelMatrix); modelbuf.rewind(); FloatBuffer projbuf = BufferUtils.createFloatBuffer(16); projbuf.put(projectionMatrix); projbuf.rewind(); IntBuffer viewportbuf = BufferUtils.createIntBuffer(4); viewportbuf.put(viewport); viewportbuf.rewind(); Point2D[] projectedSegments = new Point2D[(int) Math.ceil(tubes[tube].segments.length / (double) step)]; int cnt = 0; for (int j = 0; j < tubes[tube].segments.length; j = j + step) { FloatBuffer result = BufferUtils.createFloatBuffer(3); glu.gluProject( tubes[tube].segments[j].x, tubes[tube].segments[j].y, tubes[tube].segments[j].z, modelbuf, projbuf, viewportbuf, result); Point2D p = new Point2D.Double(result.get(0), viewport[3] - result.get(1) - 1); projectedSegments[cnt++] = p; } return projectedSegments; }