Move rendering setup from Oculus VRViewManager to OculusVR, and implement cleanup

empirephoenix-patch-1
Campbell Suter 7 years ago
parent 1c975918b1
commit 8a3336704a
No known key found for this signature in database
GPG Key ID: 754A66CCF3F73C0F
  1. 180
      jme3-vr/src/main/java/com/jme3/input/vr/OculusVR.java
  2. 138
      jme3-vr/src/main/java/com/jme3/util/VRViewManagerOculus.java

@ -8,11 +8,17 @@ package com.jme3.input.vr;
import com.jme3.app.VREnvironment;
import com.jme3.math.*;
import com.jme3.renderer.Camera;
import com.jme3.texture.FrameBuffer;
import com.jme3.texture.Image;
import com.jme3.texture.Texture2D;
import org.lwjgl.BufferUtils;
import org.lwjgl.PointerBuffer;
import org.lwjgl.ovr.*;
import java.nio.IntBuffer;
import java.util.logging.Logger;
import static org.lwjgl.BufferUtils.createPointerBuffer;
import static org.lwjgl.ovr.OVR.*;
import static org.lwjgl.ovr.OVRErrorCode.ovrSuccess;
import static org.lwjgl.ovr.OVRUtil.ovr_Detect;
@ -81,6 +87,24 @@ public class OculusVR implements VRAPI {
*/
private final Matrix4f[] eyePoses = new Matrix4f[2];
// The size of the texture drawn onto the HMD
private int textureW;
private int textureH;
// Layers to render into
private PointerBuffer layers;
private OVRLayerEyeFov layer0;
/**
* Chain texture set thing.
*/
private long chain;
/**
* Frame buffers we can draw into.
*/
private FrameBuffer framebuffers[];
public OculusVR(VREnvironment environment) {
this.environment = environment;
}
@ -226,7 +250,32 @@ public class OculusVR implements VRAPI {
@Override
public void destroy() {
throw new UnsupportedOperationException();
// fovPorts: contents are managed by LibOVR, no need to do anything.
// Check if we've set up rendering - if so, clean that up.
if (chain != 0) {
// Destroy our set of huge buffer images.
ovr_DestroyTextureSwapChain(session, chain);
// Free up the layer
layer0.free();
// The layers array apparently takes care of itself (and crashes if we try to free it)
}
for (OVREyeRenderDesc eye : eyeRenderDesc) {
eye.free();
}
for (OVRMatrix4f projection : projections) {
projection.free();
}
hmdDesc.free();
sessionStatus.free();
// Wrap everything up
ovr_Destroy(session);
ovr_Shutdown();
}
@Override
@ -305,6 +354,10 @@ public class OculusVR implements VRAPI {
throw new UnsupportedOperationException("Cannot use LibOVR without compositor!");
}
findHMDTextureSize();
setupLayers();
setupFramebuffers();
// TODO move initialization code here from VRViewManagerOculus
return true;
}
@ -325,6 +378,114 @@ public class OculusVR implements VRAPI {
throw new UnsupportedOperationException("Not yet implemented!");
}
// Rendering-type stuff
public void findHMDTextureSize() {
// Texture sizes
float pixelScaling = 1.0f; // pixelsPerDisplayPixel
OVRSizei leftTextureSize = OVRSizei.malloc();
ovr_GetFovTextureSize(session, ovrEye_Left, fovPorts[ovrEye_Left], pixelScaling, leftTextureSize);
System.out.println("leftTextureSize W=" + leftTextureSize.w() + ", H=" + leftTextureSize.h());
OVRSizei rightTextureSize = OVRSizei.malloc();
ovr_GetFovTextureSize(session, ovrEye_Right, fovPorts[ovrEye_Right], pixelScaling, rightTextureSize);
System.out.println("rightTextureSize W=" + rightTextureSize.w() + ", H=" + rightTextureSize.h());
textureW = leftTextureSize.w() + rightTextureSize.w();
textureH = Math.max(leftTextureSize.h(), rightTextureSize.h());
leftTextureSize.free();
rightTextureSize.free();
}
private PointerBuffer setupTextureChain() {
// Set up the information for the texture buffer chain thing
OVRTextureSwapChainDesc swapChainDesc = OVRTextureSwapChainDesc.calloc()
.Type(ovrTexture_2D)
.ArraySize(1)
.Format(OVR_FORMAT_R8G8B8A8_UNORM_SRGB)
.Width(textureW)
.Height(textureH)
.MipLevels(1)
.SampleCount(1)
.StaticImage(false); // ovrFalse
// Create the chain
PointerBuffer textureSetPB = createPointerBuffer(1);
if (OVRGL.ovr_CreateTextureSwapChainGL(session, swapChainDesc, textureSetPB) != ovrSuccess) {
throw new RuntimeException("Failed to create Swap Texture Set");
}
chain = textureSetPB.get(0);
swapChainDesc.free();
System.out.println("done chain creation");
return textureSetPB;
}
public void setupLayers() {
PointerBuffer chainPtr = setupTextureChain();
//Layers
layer0 = OVRLayerEyeFov.calloc();
layer0.Header().Type(ovrLayerType_EyeFov);
layer0.Header().Flags(ovrLayerFlag_TextureOriginAtBottomLeft);
for (int eye = 0; eye < 2; eye++) {
OVRRecti viewport = OVRRecti.calloc();
viewport.Pos().x(0);
viewport.Pos().y(0);
viewport.Size().w(textureW);
viewport.Size().h(textureH);
layer0.ColorTexture(chainPtr);
layer0.Viewport(eye, viewport);
layer0.Fov(eye, fovPorts[eye]);
viewport.free();
// we update pose only when we have it in the render loop
}
layers = createPointerBuffer(1);
layers.put(0, layer0);
}
/**
* Create framebuffers bound to each of the eye textures
*/
public void setupFramebuffers() {
// Find the chain length
IntBuffer length = BufferUtils.createIntBuffer(1);
ovr_GetTextureSwapChainLength(session, chain, length);
int chainLength = length.get();
System.out.println("chain length=" + chainLength);
// Create the frame buffers
framebuffers = new FrameBuffer[chainLength];
for (int i = 0; i < chainLength; i++) {
// find the GL texture ID for this texture
IntBuffer textureIdB = BufferUtils.createIntBuffer(1);
OVRGL.ovr_GetTextureSwapChainBufferGL(session, chain, i, textureIdB);
int textureId = textureIdB.get();
// TODO less hacky way of getting our texture into JMonkeyEngine
Image img = new Image();
img.setId(textureId);
img.setFormat(Image.Format.RGBA8);
img.setWidth(textureW);
img.setHeight(textureH);
Texture2D tex = new Texture2D(img);
FrameBuffer buffer = new FrameBuffer(textureW, textureH, 1);
buffer.setDepthBuffer(Image.Format.Depth);
buffer.setColorTexture(tex);
framebuffers[i] = buffer;
}
}
// UTILITIES
// TODO move to helper class
@ -382,16 +543,21 @@ public class OculusVR implements VRAPI {
}
// Getters, intended for VRViewManager.
public OVRHmdDesc getHmdDesc() {
return hmdDesc;
public long getSessionPointer() {
return session;
}
public OVRFovPort[] getFovPorts() {
return fovPorts;
public long getChain() {
return chain;
}
public long getSessionPointer() {
return session;
public FrameBuffer[] getFramebuffers() {
return framebuffers;
}
public PointerBuffer getLayers() {
return layers;
}
}

@ -65,24 +65,6 @@ public class VRViewManagerOculus extends AbstractVRViewManager {
private final VREnvironment environment;
private final OculusVR hardware;
// The size of the texture drawn onto the HMD
private int textureW;
private int textureH;
// Layers to render into
private PointerBuffer layers;
private OVRLayerEyeFov layer0;
/**
* Chain texture set thing.
*/
private long chain;
/**
* Frame buffers we can draw into.
*/
private FrameBuffer framebuffers[];
public VRViewManagerOculus(VREnvironment environment) {
this.environment = environment;
@ -101,124 +83,12 @@ public class VRViewManagerOculus extends AbstractVRViewManager {
@Override
public void initialize() {
setupCamerasAndViews();
findHMDTextureSize();
setupLayers();
setupFramebuffers();
}
private void findHMDTextureSize() {
OVRFovPort fovPorts[] = hardware.getFovPorts();
// Texture sizes
float pixelScaling = 1.0f; // pixelsPerDisplayPixel
OVRSizei leftTextureSize = OVRSizei.malloc();
ovr_GetFovTextureSize(session(), ovrEye_Left, fovPorts[ovrEye_Left], pixelScaling, leftTextureSize);
System.out.println("leftTextureSize W=" + leftTextureSize.w() + ", H=" + leftTextureSize.h());
OVRSizei rightTextureSize = OVRSizei.malloc();
ovr_GetFovTextureSize(session(), ovrEye_Right, fovPorts[ovrEye_Right], pixelScaling, rightTextureSize);
System.out.println("rightTextureSize W=" + rightTextureSize.w() + ", H=" + rightTextureSize.h());
textureW = leftTextureSize.w() + rightTextureSize.w();
textureH = Math.max(leftTextureSize.h(), rightTextureSize.h());
leftTextureSize.free();
rightTextureSize.free();
}
private long session() {
return hardware.getSessionPointer();
}
private PointerBuffer setupTextureChain() {
// Set up the information for the texture buffer chain thing
OVRTextureSwapChainDesc swapChainDesc = OVRTextureSwapChainDesc.calloc()
.Type(ovrTexture_2D)
.ArraySize(1)
.Format(OVR_FORMAT_R8G8B8A8_UNORM_SRGB)
.Width(textureW)
.Height(textureH)
.MipLevels(1)
.SampleCount(1)
.StaticImage(false); // ovrFalse
// Create the chain
PointerBuffer textureSetPB = createPointerBuffer(1);
if (OVRGL.ovr_CreateTextureSwapChainGL(session(), swapChainDesc, textureSetPB) != ovrSuccess) {
throw new RuntimeException("Failed to create Swap Texture Set");
}
chain = textureSetPB.get(0);
swapChainDesc.free();
System.out.println("done chain creation");
return textureSetPB;
}
private void setupLayers() {
PointerBuffer chainPtr = setupTextureChain();
//Layers
layer0 = OVRLayerEyeFov.calloc();
layer0.Header().Type(ovrLayerType_EyeFov);
layer0.Header().Flags(ovrLayerFlag_TextureOriginAtBottomLeft);
for (int eye = 0; eye < 2; eye++) {
OVRRecti viewport = OVRRecti.calloc();
viewport.Pos().x(0);
viewport.Pos().y(0);
viewport.Size().w(textureW);
viewport.Size().h(textureH);
layer0.ColorTexture(chainPtr);
layer0.Viewport(eye, viewport);
layer0.Fov(eye, hardware.getFovPorts()[eye]);
viewport.free();
// we update pose only when we have it in the render loop
}
layers = createPointerBuffer(1);
layers.put(0, layer0);
}
/**
* Create framebuffers bound to each of the eye textures
*/
private void setupFramebuffers() {
// Find the chain length
IntBuffer length = BufferUtils.createIntBuffer(1);
ovr_GetTextureSwapChainLength(session(), chain, length);
int chainLength = length.get();
System.out.println("chain length=" + chainLength);
// Create the frame buffers
framebuffers = new FrameBuffer[chainLength];
for (int i = 0; i < chainLength; i++) {
// find the GL texture ID for this texture
IntBuffer textureIdB = BufferUtils.createIntBuffer(1);
OVRGL.ovr_GetTextureSwapChainBufferGL(session(), chain, i, textureIdB);
int textureId = textureIdB.get();
// TODO less hacky way of getting our texture into JMonkeyEngine
Image img = new Image();
img.setId(textureId);
img.setFormat(Image.Format.RGBA8);
img.setWidth(textureW);
img.setHeight(textureH);
Texture2D tex = new Texture2D(img);
FrameBuffer buffer = new FrameBuffer(textureW, textureH, 1);
buffer.setDepthBuffer(Image.Format.Depth);
buffer.setColorTexture(tex);
framebuffers[i] = buffer;
}
}
@Override
public void update(float tpf) {
// TODO
@ -234,10 +104,10 @@ public class VRViewManagerOculus extends AbstractVRViewManager {
// layer0.RenderPose(eye, eyePose);
IntBuffer currentIndexB = BufferUtils.createIntBuffer(1);
ovr_GetTextureSwapChainCurrentIndex(session(), chain, currentIndexB);
ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(), currentIndexB);
int index = currentIndexB.get();
(eye == 0 ? leftViewPort : rightViewPort).setOutputFrameBuffer(framebuffers[index]);
(eye == 0 ? leftViewPort : rightViewPort).setOutputFrameBuffer(hardware.getFramebuffers()[index]);
}
// Now the game will render into the buffers given to us by LibOVR
@ -246,10 +116,10 @@ public class VRViewManagerOculus extends AbstractVRViewManager {
@Override
public void postRender() {
// We're done with our textures now - the game is done drawing into them.
ovr_CommitTextureSwapChain(session(), chain);
ovr_CommitTextureSwapChain(session(), hardware.getChain());
// Send the result to the HMD
int result = ovr_SubmitFrame(session(), 0, null, layers);
int result = ovr_SubmitFrame(session(), 0, null, hardware.getLayers());
if (result != ovrSuccess) {
throw new IllegalStateException("Failed to submit frame!");
}

Loading…
Cancel
Save