public OpenALAudioDevice (OpenALAudio audio, int sampleRate, boolean isMono, int bufferSize, int bufferCount) { this.audio = audio; channels = isMono ? 1 : 2; this.bufferSize = bufferSize; this.bufferCount = bufferCount; this.format = channels > 1 ? AL_FORMAT_STEREO16 : AL_FORMAT_MONO16; this.sampleRate = sampleRate; secondsPerBuffer = (float)bufferSize / bytesPerSample / channels / sampleRate; tempBuffer = BufferUtils.createByteBuffer(bufferSize); }
public OpenALAudioDevice (OpenALAudio audio, int sampleRate, boolean isMono, int bufferSize, int bufferCount) { this.audio = audio; channels = isMono ? 1 : 2; this.bufferSize = bufferSize; this.bufferCount = bufferCount; this.format = channels > 1 ? AL_FORMAT_STEREO16 : AL_FORMAT_MONO16; this.sampleRate = sampleRate; secondsPerBuffer = (float)bufferSize / bytesPerSample / channels / sampleRate; tempBuffer = BufferUtils.createByteBuffer(bufferSize); }
public OpenALAudioDevice (OpenALAudio audio, int sampleRate, boolean isMono, int bufferSize, int bufferCount) { this.audio = audio; channels = isMono ? 1 : 2; this.bufferSize = bufferSize; this.bufferCount = bufferCount; this.format = channels > 1 ? AL_FORMAT_STEREO16 : AL_FORMAT_MONO16; this.sampleRate = sampleRate; secondsPerBuffer = (float)bufferSize / bytesPerSample / channels / sampleRate; tempBuffer = BufferUtils.createByteBuffer(bufferSize); }
public OpenALAudioDevice (OpenALAudio audio, int sampleRate, boolean isMono, int bufferSize, int bufferCount) { this.audio = audio; channels = isMono ? 1 : 2; this.bufferSize = bufferSize; this.bufferCount = bufferCount; this.format = channels > 1 ? AL_FORMAT_STEREO16 : AL_FORMAT_MONO16; this.sampleRate = sampleRate; secondsPerBuffer = (float)bufferSize / bytesPerSample / channels / sampleRate; tempBuffer = BufferUtils.createByteBuffer(bufferSize); }
public TempBuffer() { b16 = BufferUtils.createByteBuffer(16); b16s = b16.asShortBuffer(); b16i = b16.asIntBuffer(); b16l = b16.asLongBuffer(); b16f = b16.asFloatBuffer(); b16d = b16.asDoubleBuffer(); } }
public TempBuffer() { b16 = BufferUtils.createByteBuffer(16); b16s = b16.asShortBuffer(); b16i = b16.asIntBuffer(); b16l = b16.asLongBuffer(); b16f = b16.asFloatBuffer(); b16d = b16.asDoubleBuffer(); } }
public APIBuffer() { buffer = BufferUtils.createByteBuffer(DEFAULT_CAPACITY); address = memAddress(buffer); }
/** Create a new stream to decode OGG data, reusing buffers from another stream. * * It's not a good idea to use the old stream instance afterwards. * * @param input The input stream from which to read the OGG file * @param previousStream The stream instance to reuse buffers from, may be null */ OggInputStream (InputStream input, OggInputStream previousStream) { if (previousStream == null) { convbuffer = new byte[convsize]; pcmBuffer = BufferUtils.createByteBuffer(4096 * 500); } else { convbuffer = previousStream.convbuffer; pcmBuffer = previousStream.pcmBuffer; } this.input = input; try { total = input.available(); } catch (IOException ex) { throw new GdxRuntimeException(ex); } init(); }
/** Create a new stream to decode OGG data, reusing buffers from another stream. * * It's not a good idea to use the old stream instance afterwards. * * @param input The input stream from which to read the OGG file * @param previousStream The stream instance to reuse buffers from, may be null */ OggInputStream (InputStream input, OggInputStream previousStream) { if (previousStream == null) { convbuffer = new byte[convsize]; pcmBuffer = BufferUtils.createByteBuffer(4096 * 500); } else { convbuffer = previousStream.convbuffer; pcmBuffer = previousStream.pcmBuffer; } this.input = input; try { total = input.available(); } catch (IOException ex) { throw new GdxRuntimeException(ex); } init(); }
private void ensureCapacity(int capacity) { if (capacity <= buffer.capacity()) { return; } ByteBuffer resized = BufferUtils.createByteBuffer(mathRoundPoT(capacity)); resized.put(buffer); resized.clear(); buffer = resized; address = memAddress(resized); }
@Override public StaticSoundData load(ResourceUrn urn, List<AssetDataFile> inputs) throws IOException { try (OggReader reader = new OggReader(inputs.get(0).openStream())) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ByteStreams.copy(reader, bos); ByteBuffer data = BufferUtils.createByteBuffer(bos.size()).put(bos.toByteArray()); data.flip(); return new StaticSoundData(data, reader.getChannels(), reader.getRate(), 16); } catch (IOException e) { throw new IOException("Failed to load sound: " + e.getMessage(), e); } } }
public ByteBuffer readBackPixels() { bind(); cachedBuffer = glMapBufferARB(GL_PIXEL_PACK_BUFFER_EXT, GL_READ_ONLY, cachedBuffer); // Maybe fix for the issues appearing on some platforms where accessing the "cachedBuffer" causes a JVM exception and therefore a crash... ByteBuffer resultBuffer = BufferUtils.createByteBuffer(cachedBuffer.capacity()); resultBuffer.put(cachedBuffer); cachedBuffer.rewind(); resultBuffer.flip(); glUnmapBufferARB(GL_PIXEL_PACK_BUFFER_EXT); unbind(); return resultBuffer; } }
private Texture generateNoiseTexture() { Optional<Texture> texture = Assets.getTexture("engine:ssaoNoise"); if (!texture.isPresent()) { ByteBuffer noiseValues = BufferUtils.createByteBuffer(SSAO_NOISE_SIZE * SSAO_NOISE_SIZE * 4); for (int i = 0; i < SSAO_NOISE_SIZE * SSAO_NOISE_SIZE; ++i) { Vector3f noiseVector = new Vector3f(randomGenerator.nextFloat(-1.0f, 1.0f), randomGenerator.nextFloat(-1.0f, 1.0f), 0.0f); noiseVector.normalize(); noiseValues.put((byte) ((noiseVector.x * 0.5 + 0.5) * 255.0f)); noiseValues.put((byte) ((noiseVector.y * 0.5 + 0.5) * 255.0f)); noiseValues.put((byte) ((noiseVector.z * 0.5 + 0.5) * 255.0f)); noiseValues.put((byte) 0x0); } noiseValues.flip(); return Assets.generateAsset(new ResourceUrn("engine:ssaoNoise"), new TextureData(SSAO_NOISE_SIZE, SSAO_NOISE_SIZE, new ByteBuffer[]{noiseValues}, Texture.WrapMode.REPEAT, Texture.FilterMode.NEAREST), Texture.class); } return texture.get(); } }
public PBO(int width, int height) { pboId = glGenBuffersARB(); int byteSize = width * height * 4; cachedBuffer = BufferUtils.createByteBuffer(byteSize); bind(); glBufferDataARB(GL_PIXEL_PACK_BUFFER_EXT, byteSize, GL_STREAM_READ_ARB); unbind(); }
/** * Returns the content of the color buffer from GPU memory as a ByteBuffer. * @return a ByteBuffer or null */ public ByteBuffer getColorBufferRawData() { ByteBuffer buffer = BufferUtils.createByteBuffer(this.width() * this.height() * 4); this.bindTexture(); GL11.glGetTexImage(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, buffer); FBO.unbindTexture(); return buffer; }
@Override public ImageFormat[] querySupportedFormats(MemoryAccess access, Image.ImageType type) { long memFlags = Utils.getMemoryAccessFlags(access); int typeFlag = LwjglImage.decodeImageType(type); Utils.tempBuffers[0].b16i.rewind(); //query count int ret = CL10.clGetSupportedImageFormats(context, memFlags, typeFlag, null, Utils.tempBuffers[0].b16i); Utils.checkError(ret, "clGetSupportedImageFormats"); int count = Utils.tempBuffers[0].b16i.get(0); if (count == 0) { return new ImageFormat[0]; } //get formats ByteBuffer formatsB = BufferUtils.createByteBuffer(count * 8); ret = CL10.clGetSupportedImageFormats(context, memFlags, typeFlag, formatsB, null); Utils.checkError(ret, "clGetSupportedImageFormats"); //convert formats ImageFormat[] formats = new ImageFormat[count]; IntBuffer formatsBi = formatsB.asIntBuffer(); formatsBi.rewind(); for (int i=0; i<count; ++i) { Image.ImageChannelOrder channelOrder = LwjglImage.encodeImageChannelOrder(formatsBi.get()); Image.ImageChannelType channelType = LwjglImage.encodeImageChannelType(formatsBi.get()); formats[i] = new ImageFormat(channelOrder, channelType); } return formats; }
private String Log(long device) { Utils.pointerBuffers[0].rewind(); int ret = CL10.clGetProgramBuildInfo(program, device, CL10.CL_PROGRAM_BUILD_LOG, (ByteBuffer) null, Utils.pointerBuffers[0]); Utils.checkError(ret, "clGetProgramBuildInfo"); int count = (int) Utils.pointerBuffers[0].get(0); final ByteBuffer buffer = BufferUtils.createByteBuffer(count); ret = CL10.clGetProgramBuildInfo(program, device, CL10.CL_PROGRAM_BUILD_LOG, buffer, null); Utils.checkError(ret, "clGetProgramBuildInfo"); return MemoryUtil.memASCII(buffer); }
@Override public ImageFormat[] querySupportedFormats(MemoryAccess access, Image.ImageType type) { long memFlags = Utils.getMemoryAccessFlags(access); int typeFlag = LwjglImage.decodeImageType(type); Utils.tempBuffers[0].b16i.rewind(); //query count int ret = CL10.clGetSupportedImageFormats(context, memFlags, typeFlag, null, Utils.tempBuffers[0].b16i); Utils.checkError(ret, "clGetSupportedImageFormats"); int count = Utils.tempBuffers[0].b16i.get(0); if (count == 0) { return new ImageFormat[0]; } //get formats CLImageFormat.Buffer formatsB = new CLImageFormat.Buffer(BufferUtils.createByteBuffer(count * CLImageFormat.SIZEOF)); ret = CL10.clGetSupportedImageFormats(context, memFlags, typeFlag, formatsB, (IntBuffer) null); Utils.checkError(ret, "clGetSupportedImageFormats"); //convert formats ImageFormat[] formats = new ImageFormat[count]; for (int i=0; i<count; ++i) { CLImageFormat f = formatsB.get(); Image.ImageChannelOrder channelOrder = LwjglImage.encodeImageChannelOrder(f.image_channel_order()); Image.ImageChannelType channelType = LwjglImage.encodeImageChannelType(f.image_channel_data_type()); formats[i] = new ImageFormat(channelOrder, channelType); } return formats; }
private static ByteBuffer resizeBuffer(ByteBuffer buffer, int newCapacity) { ByteBuffer newBuffer = BufferUtils.createByteBuffer(newCapacity); buffer.flip(); newBuffer.put(buffer); return newBuffer; }
private static ByteBuffer resizeBuffer(ByteBuffer buffer, int newCapacity) { ByteBuffer newBuffer = BufferUtils.createByteBuffer(newCapacity); buffer.flip(); newBuffer.put(buffer); return newBuffer; }