private void setupViewPortFrameBuffer() { if (renderFrameBufferMS != null) { viewPort.setOutputFrameBuffer(renderFrameBufferMS); } else { viewPort.setOutputFrameBuffer(renderFrameBuffer); } } }
public void attachTo(boolean overrideMainFramebuffer, ViewPort... vps) { if (viewPorts.size() > 0) { for (ViewPort vp : viewPorts) { vp.setOutputFrameBuffer(null); } viewPorts.get(viewPorts.size() - 1).removeProcessor(this); } viewPorts.addAll(Arrays.asList(vps)); viewPorts.get(viewPorts.size() - 1).addProcessor(this); this.attachAsMain = overrideMainFramebuffer; }
/** * create an offscreen frame buffer. * * @param mapSize * @param offView * @return */ protected FrameBuffer createOffScreenFrameBuffer(int mapSize, ViewPort offView) { // create offscreen framebuffer final FrameBuffer offBuffer = new FrameBuffer(mapSize, mapSize, 1); offBuffer.setDepthBuffer(Image.Format.Depth); offView.setOutputFrameBuffer(offBuffer); return offBuffer; }
public void reshape(ViewPort vp, int w, int h){ if (mainSceneFB != null){ renderer.deleteFrameBuffer(mainSceneFB); } mainSceneFB = new FrameBuffer(w, h, 1); mainScene = new Texture2D(w, h, bufFormat); mainSceneFB.setDepthBuffer(Format.Depth); mainSceneFB.setColorTexture(mainScene); mainScene.setMagFilter(fbMagFilter); mainScene.setMinFilter(fbMinFilter); if (msFB != null){ renderer.deleteFrameBuffer(msFB); } tone.setTexture("Texture", mainScene); Collection<Caps> caps = renderer.getCaps(); if (numSamples > 1 && caps.contains(Caps.FrameBufferMultisample)){ msFB = new FrameBuffer(w, h, numSamples); msFB.setDepthBuffer(Format.Depth); msFB.setColorBuffer(bufFormat); vp.setOutputFrameBuffer(msFB); }else{ if (numSamples > 1) logger.warning("FBO multisampling not supported on this GPU, request ignored."); vp.setOutputFrameBuffer(mainSceneFB); } createLumShaders(); }
public void initialize(RenderManager rm, ViewPort vp) { reshape(vp, vp.getCamera().getWidth(), vp.getCamera().getHeight()); viewPort.setOutputFrameBuffer(fb); guiViewPort.setClearFlags(true, true, true); guiNode.attachChild(display); // guiNode.attachChild(display1); guiNode.attachChild(display2); // guiNode.attachChild(display3); // guiNode.attachChild(display4); guiNode.updateGeometricState(); }
/** * compute the index of the last filter to render */ private void updateLastFilterIndex() { lastFilterIndex = -1; for (int i = filters.size() - 1; i >= 0 && lastFilterIndex == -1; i--) { if (filters.get(i).isEnabled()) { lastFilterIndex = i; //the Fpp is initialized, but the viwport framebuffer is the //original out framebuffer so we must recover from a situation //where no filter was enabled. So we set th correc framebuffer //on the viewport if(isInitialized() && viewPort.getOutputFrameBuffer()==outputBuffer){ setupViewPortFrameBuffer(); } return; } } if (isInitialized() && lastFilterIndex == -1) { //There is no enabled filter, we restore the original framebuffer //to the viewport to bypass the fpp. viewPort.setOutputFrameBuffer(outputBuffer); } }
private void setupFinalFullTexture(Camera cam) { if (environment != null) { if (environment.getApplication() != null) { // create offscreen framebuffer FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); //offBuffer.setSrgb(true); //setup framebuffer's texture dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); dualEyeTex.setMinFilter(Texture2D.MinFilter.BilinearNoMipMaps); dualEyeTex.setMagFilter(Texture2D.MagFilter.Bilinear); logger.config("Dual eye texture " + dualEyeTex.getName() + " (" + dualEyeTex.getImage().getId() + ")"); logger.config(" Type: " + dualEyeTex.getType()); logger.config(" Size: " + dualEyeTex.getImage().getWidth() + "x" + dualEyeTex.getImage().getHeight()); logger.config(" Image depth: " + dualEyeTex.getImage().getDepth()); logger.config(" Image format: " + dualEyeTex.getImage().getFormat()); logger.config(" Image color space: " + dualEyeTex.getImage().getColorSpace()); //setup framebuffer to use texture out.setDepthBuffer(Image.Format.Depth); out.setColorTexture(dualEyeTex); ViewPort viewPort = environment.getApplication().getViewPort(); viewPort.setClearFlags(true, true, true); viewPort.setBackgroundColor(ColorRGBA.Black); viewPort.setOutputFrameBuffer(out); } else { throw new IllegalStateException("This VR environment is not attached to any application."); } } else { throw new IllegalStateException("This VR view manager is not attached to any VR environment."); } }
viewPort.setClearFlags(true, true, true); viewPort.setBackgroundColor(ColorRGBA.Black); viewPort.setOutputFrameBuffer(out); } else { throw new IllegalStateException("This VR environment is not attached to any application.");
private ViewPort setupViewBuffers(Camera cam, String viewName) { if (environment != null) { if (environment.getApplication() != null) { // create offscreen framebuffer FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); //offBufferLeft.setSrgb(true); //setup framebuffer's texture Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); offTex.setMinFilter(Texture2D.MinFilter.BilinearNoMipMaps); offTex.setMagFilter(Texture2D.MagFilter.Bilinear); //setup framebuffer to use texture offBufferLeft.setDepthBuffer(Image.Format.Depth); offBufferLeft.setColorTexture(offTex); ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam); viewPort.setClearFlags(true, true, true); viewPort.setBackgroundColor(ColorRGBA.Black); Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator(); while (spatialIter.hasNext()) { viewPort.attachScene(spatialIter.next()); } //set viewport to render to offscreen framebuffer viewPort.setOutputFrameBuffer(offBufferLeft); return viewPort; } else { throw new IllegalStateException("This VR environment is not attached to any application."); } } else { throw new IllegalStateException("This VR view manager is not attached to any VR environment."); } }
viewPort.setOutputFrameBuffer(offBufferLeft); return viewPort; } else {
pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, false); viewPort.attachScene(pic); viewPort.setOutputFrameBuffer(null);
private ViewPort setupMirrorBuffers(Camera cam, Texture2D tex, boolean expand) { if (environment != null) { if (environment.getApplication() != null) { Camera clonecam = cam.clone(); ViewPort viewPort = environment.getApplication().getRenderManager().createPostView("MirrorView", clonecam); clonecam.setParallelProjection(true); viewPort.setClearFlags(true, true, true); viewPort.setBackgroundColor(ColorRGBA.Black); Picture pic = new Picture("fullscene"); pic.setLocalTranslation(-0.75f, -0.5f, 0f); if (expand) { pic.setLocalScale(3f, 1f, 1f); } else { pic.setLocalScale(1.5f, 1f, 1f); } pic.setQueueBucket(Bucket.Opaque); pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D) tex, false); viewPort.attachScene(pic); viewPort.setOutputFrameBuffer(null); pic.updateGeometricState(); return viewPort; } else { throw new IllegalStateException("This VR environment is not attached to any application."); } } else { throw new IllegalStateException("This VR view manager is not attached to any VR environment."); } }
@Override public void render() { // Calculate the render pose (translation/rotation) for each eye. // LibOVR takes the difference between this and the real position of each eye at display time // to apply AZW (timewarp). OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2); hmdToEyeOffsets.put(0, hardware.getEyePose(ovrEye_Left)); hmdToEyeOffsets.put(1, hardware.getEyePose(ovrEye_Right)); //calculate eye poses OVRUtil.ovr_CalcEyePoses(hardware.getHeadPose(), hmdToEyeOffsets, hardware.getLayer0().RenderPose()); hmdToEyeOffsets.free(); for (int eye = 0; eye < 2; eye++) { IntBuffer currentIndexB = BufferUtils.createIntBuffer(1); ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(eye), currentIndexB); int index = currentIndexB.get(); // Constantly (each frame) rotating through a series of // frame buffers, so make sure we write into the correct one. (eye == ovrEye_Left ? leftViewPort : rightViewPort).setOutputFrameBuffer(hardware.getFramebuffers(eye)[index]); } // Now the game will render into the buffers given to us by LibOVR }
pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, false); viewPort.attachScene(pic); viewPort.setOutputFrameBuffer(null);
public void cleanup() { if (viewPort != null) { //reset the viewport camera viewport to its initial value viewPort.getCamera().resize(originalWidth, originalHeight, true); viewPort.getCamera().setViewPort(left, right, bottom, top); viewPort.setOutputFrameBuffer(outputBuffer); viewPort = null; if(renderFrameBuffer != null){ renderFrameBuffer.dispose(); } if(depthTexture!=null){ depthTexture.getImage().dispose(); } filterTexture.getImage().dispose(); if(renderFrameBufferMS != null){ renderFrameBufferMS.dispose(); } for (Filter filter : filters.getArray()) { filter.cleanup(renderer); } } }
public void preFrame(float tpf) { if (filters.isEmpty() || lastFilterIndex == -1) { //If the camera is initialized and there are no filter to render, the camera viewport is restored as it was if (cameraInit) { viewPort.getCamera().resize(originalWidth, originalHeight, true); viewPort.getCamera().setViewPort(left, right, bottom, top); viewPort.setOutputFrameBuffer(outputBuffer); cameraInit = false; } } else { setupViewPortFrameBuffer(); //if we are ina multiview situation we need to resize the camera //to the viewportsize so that the backbuffer is rendered correctly if (multiView) { viewPort.getCamera().resize(width, height, false); viewPort.getCamera().setViewPort(0, 1, 0, 1); viewPort.getCamera().update(); renderManager.setCamera(viewPort.getCamera(), false); } } for (Filter filter : filters.getArray()) { if (filter.isEnabled()) { if (prof != null) prof.spStep(SpStep.ProcPreFrame, FPP, filter.getName()); filter.preFrame(tpf); } } }
offView.setOutputFrameBuffer(offBuffer);
offView.setOutputFrameBuffer(offBuffer);
niftyView.setOutputFrameBuffer(fb);
public void simpleInitApp() { this.flyCam.setMoveSpeed(10); cam.setLocation(new Vector3f(0.028406568f, 2.015769f, 7.386517f)); cam.setRotation(new Quaternion(-1.0729783E-5f, 0.9999721f, -0.0073241726f, -0.0014647911f)); makeScene(); //Creating the main view port post processor FilterPostProcessor fpp = new FilterPostProcessor(assetManager); fpp.addFilter(new ColorOverlayFilter(ColorRGBA.Blue)); viewPort.addProcessor(fpp); //creating a frame buffer for the mainviewport FrameBuffer mainVPFrameBuffer = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); Texture2D mainVPTexture = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); mainVPFrameBuffer.addColorTexture(mainVPTexture); mainVPFrameBuffer.setDepthBuffer(Image.Format.Depth); viewPort.setOutputFrameBuffer(mainVPFrameBuffer); //creating the post processor for the gui viewport final FilterPostProcessor guifpp = new FilterPostProcessor(assetManager); guifpp.setFrameBufferFormat(Image.Format.RGBA8); guifpp.addFilter(new ColorOverlayFilter(ColorRGBA.Red)); //this will compose the main viewport texture with the guiviewport back buffer. //Note that you can switch the order of the filters so that guiviewport filters are applied or not to the main viewport texture guifpp.addFilter(new ComposeFilter(mainVPTexture)); guiViewPort.addProcessor(guifpp); //compositing is done by mixing texture depending on the alpha channel, //it's important that the guiviewport clear color alpha value is set to 0 guiViewPort.setBackgroundColor(ColorRGBA.BlackNoAlpha); guiViewPort.setClearColor(true); }