commit
c460593000
@ -0,0 +1,661 @@ |
||||
/* |
||||
* To change this license header, choose License Headers in Project Properties. |
||||
* To change this template file, choose Tools | Templates |
||||
* and open the template in the editor. |
||||
*/ |
||||
package com.jme3.input.vr; |
||||
|
||||
import com.jme3.app.VREnvironment; |
||||
import com.jme3.math.*; |
||||
import com.jme3.renderer.Camera; |
||||
import com.jme3.texture.*; |
||||
import org.lwjgl.*; |
||||
import org.lwjgl.ovr.*; |
||||
|
||||
import java.nio.IntBuffer; |
||||
import java.util.logging.Logger; |
||||
|
||||
import static org.lwjgl.BufferUtils.createPointerBuffer; |
||||
import static org.lwjgl.ovr.OVR.*; |
||||
import static org.lwjgl.ovr.OVRErrorCode.ovrSuccess; |
||||
import static org.lwjgl.ovr.OVRUtil.ovr_Detect; |
||||
import static org.lwjgl.system.MemoryUtil.*; |
||||
|
||||
/** |
||||
* Oculus VR (LibOVR 1.3.0) Native support. |
||||
* <p> |
||||
* A few notes about the Oculus coordinate system: |
||||
* <ul> |
||||
* <li>Matrices should be transposed</li> |
||||
* <li>Quaternions should be inverted<li/> |
||||
* <li>Vectors should have their X and Z axes flipped, but apparently not Y.</li> |
||||
* </ul> |
||||
* |
||||
* @author Campbell Suter <znix@znix.xyz> |
||||
*/ |
||||
public class OculusVR implements VRAPI { |
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(OculusVR.class.getName()); |
||||
|
||||
private final VREnvironment environment; |
||||
private boolean initialized; |
||||
|
||||
/** |
||||
* Pointer to the HMD object |
||||
*/ |
||||
private long session; |
||||
|
||||
/** |
||||
* Information about the VR session (should the app quit, is |
||||
* it visible or is the universal menu open, etc) |
||||
*/ |
||||
private OVRSessionStatus sessionStatus; |
||||
|
||||
/** |
||||
* HMD information, such as product name and manufacturer. |
||||
*/ |
||||
private OVRHmdDesc hmdDesc; |
||||
|
||||
/** |
||||
* The horizontal resolution of the HMD |
||||
*/ |
||||
private int resolutionW; |
||||
|
||||
/** |
||||
* The vertical resolution of the HMD |
||||
*/ |
||||
private int resolutionH; |
||||
|
||||
/** |
||||
* Field-of-view data for each eye (how many degrees from the |
||||
* center can the user see). |
||||
*/ |
||||
private final OVRFovPort fovPorts[] = new OVRFovPort[2]; |
||||
|
||||
/** |
||||
* Data about each eye to be rendered - in particular, the |
||||
* offset from the center of the HMD to the eye. |
||||
*/ |
||||
private final OVREyeRenderDesc eyeRenderDesc[] = new OVREyeRenderDesc[2]; |
||||
|
||||
/** |
||||
* Store the projections for each eye, so we don't have to malloc |
||||
* and recalculate them each frame. |
||||
*/ |
||||
private final OVRMatrix4f[] projections = new OVRMatrix4f[2]; |
||||
|
||||
/** |
||||
* Store the poses for each eye, relative to the HMD. |
||||
* |
||||
* @see #getHMDMatrixPoseLeftEye() |
||||
*/ |
||||
private final Matrix4f[] hmdRelativeEyePoses = new Matrix4f[2]; |
||||
|
||||
/** |
||||
* Store the positions for each eye, relative to the HMD. |
||||
* |
||||
* @see #getHMDVectorPoseLeftEye() |
||||
*/ |
||||
private final Vector3f[] hmdRelativeEyePositions = new Vector3f[2]; |
||||
|
||||
/** |
||||
* The current state of the tracked components (HMD, touch) |
||||
*/ |
||||
private OVRTrackingState trackingState; |
||||
|
||||
/** |
||||
* The position and orientation of the user's head. |
||||
*/ |
||||
private OVRPosef headPose; |
||||
|
||||
/** |
||||
* The state of the Touch controllers. |
||||
*/ |
||||
private OculusVRInput input; |
||||
|
||||
// The size of the texture drawn onto the HMD
|
||||
private int textureW; |
||||
private int textureH; |
||||
|
||||
// Layers to render into
|
||||
private PointerBuffer layers; |
||||
private OVRLayerEyeFov layer0; |
||||
|
||||
/** |
||||
* Chain texture set thing. |
||||
*/ |
||||
private long chains[]; |
||||
|
||||
/** |
||||
* Frame buffers we can draw into. |
||||
*/ |
||||
private FrameBuffer framebuffers[][]; |
||||
|
||||
public OculusVR(VREnvironment environment) { |
||||
this.environment = environment; |
||||
} |
||||
|
||||
@Override |
||||
public OculusVRInput getVRinput() { |
||||
return input; |
||||
} |
||||
|
||||
@Override |
||||
public String getName() { |
||||
return "OVR"; |
||||
} |
||||
|
||||
@Override |
||||
public int getDisplayFrequency() { |
||||
// TODO find correct frequency. I'm not sure
|
||||
// if LibOVR has a way to do that, though.
|
||||
return 60; |
||||
} |
||||
|
||||
@Override |
||||
public boolean initialize() { |
||||
// Check to make sure the HMD is connected
|
||||
OVRDetectResult detect = OVRDetectResult.calloc(); |
||||
ovr_Detect(0, detect); |
||||
boolean connected = detect.IsOculusHMDConnected(); |
||||
LOGGER.config("OVRDetectResult.IsOculusHMDConnected = " + connected); |
||||
LOGGER.config("OVRDetectResult.IsOculusServiceRunning = " + detect.IsOculusServiceRunning()); |
||||
detect.free(); |
||||
|
||||
if (!connected) { |
||||
LOGGER.info("Oculus Rift not connected"); |
||||
return false; |
||||
} |
||||
|
||||
initialized = true; |
||||
|
||||
// Set up the HMD
|
||||
OVRLogCallback callback = new OVRLogCallback() { |
||||
@Override |
||||
public void invoke(long userData, int level, long message) { |
||||
LOGGER.fine("LibOVR [" + userData + "] [" + level + "] " + memASCII(message)); |
||||
} |
||||
}; |
||||
OVRInitParams initParams = OVRInitParams.calloc(); |
||||
initParams.LogCallback(callback); |
||||
if (ovr_Initialize(initParams) != ovrSuccess) { |
||||
LOGGER.severe("LibOVR Init Failed"); |
||||
return false; // TODO fix memory leak - destroy() is not called
|
||||
} |
||||
LOGGER.config("LibOVR Version " + ovr_GetVersionString()); |
||||
initParams.free(); |
||||
|
||||
// Get access to the HMD
|
||||
LOGGER.info("Initialize HMD Session"); |
||||
PointerBuffer pHmd = memAllocPointer(1); |
||||
OVRGraphicsLuid luid = OVRGraphicsLuid.calloc(); |
||||
if (ovr_Create(pHmd, luid) != ovrSuccess) { |
||||
LOGGER.severe("Failed to create HMD"); |
||||
return false; // TODO fix memory leak - destroy() is not called
|
||||
} |
||||
session = pHmd.get(0); |
||||
memFree(pHmd); |
||||
luid.free(); |
||||
sessionStatus = OVRSessionStatus.calloc(); |
||||
|
||||
// Get the information about the HMD
|
||||
LOGGER.fine("Get HMD properties"); |
||||
hmdDesc = OVRHmdDesc.malloc(); |
||||
ovr_GetHmdDesc(session, hmdDesc); |
||||
if (hmdDesc.Type() == ovrHmd_None) { |
||||
LOGGER.warning("No HMD connected"); |
||||
return false; // TODO fix memory leak - destroy() is not called
|
||||
} |
||||
|
||||
resolutionW = hmdDesc.Resolution().w(); |
||||
resolutionH = hmdDesc.Resolution().h(); |
||||
|
||||
LOGGER.config("HMD Properties: " |
||||
+ "\t Manufacturer: " + hmdDesc.ManufacturerString() |
||||
+ "\t Product: " + hmdDesc.ProductNameString() |
||||
+ "\t Serial: <hidden>" // + hmdDesc.SerialNumberString() // Hidden for privacy reasons
|
||||
+ "\t Type: " + hmdDesc.Type() |
||||
+ "\t Resolution (total): " + resolutionW + "," + resolutionH); |
||||
|
||||
if (resolutionW == 0) { |
||||
LOGGER.severe("HMD witdth=0 : aborting"); |
||||
return false; // TODO fix memory leak - destroy() is not called
|
||||
} |
||||
|
||||
// Find the FOV for each eye
|
||||
for (int eye = 0; eye < 2; eye++) { |
||||
fovPorts[eye] = hmdDesc.DefaultEyeFov(eye); |
||||
} |
||||
|
||||
// Get the pose for each eye, and cache it for later.
|
||||
for (int eye = 0; eye < 2; eye++) { |
||||
// Create the projection objects
|
||||
projections[eye] = OVRMatrix4f.malloc(); |
||||
hmdRelativeEyePoses[eye] = new Matrix4f(); |
||||
hmdRelativeEyePositions[eye] = new Vector3f(); |
||||
|
||||
// Find the eye render information - we use this in the
|
||||
// view manager for giving LibOVR it's timewarp information.
|
||||
eyeRenderDesc[eye] = OVREyeRenderDesc.malloc(); |
||||
ovr_GetRenderDesc(session, eye, fovPorts[eye], eyeRenderDesc[eye]); |
||||
|
||||
// Get the pose of the eye
|
||||
OVRPosef pose = eyeRenderDesc[eye].HmdToEyePose(); |
||||
|
||||
// Get the position and rotation of the eye
|
||||
vecO2J(pose.Position(), hmdRelativeEyePositions[eye]); |
||||
Quaternion rotation = quatO2J(pose.Orientation(), new Quaternion()); |
||||
|
||||
// Put it into a matrix for the get eye pose functions
|
||||
hmdRelativeEyePoses[eye].loadIdentity(); |
||||
hmdRelativeEyePoses[eye].setTranslation(hmdRelativeEyePositions[eye]); |
||||
hmdRelativeEyePoses[eye].setRotationQuaternion(rotation); |
||||
} |
||||
|
||||
// Recenter the HMD. The game itself should do this too, but just in case / before they do.
|
||||
reset(); |
||||
|
||||
// Do this so others relying on our texture size (the GUI in particular) get it correct.
|
||||
findHMDTextureSize(); |
||||
|
||||
// Allocate the memory for the tracking state - we actually
|
||||
// set it up later, but Input uses it so calloc it now.
|
||||
trackingState = OVRTrackingState.calloc(); |
||||
|
||||
// Set up the input
|
||||
input = new OculusVRInput(this, session, sessionStatus, trackingState); |
||||
|
||||
// TODO find some way to get in ovrTrackingOrigin_FloorLevel
|
||||
|
||||
// throw new UnsupportedOperationException("Not yet implemented!");
|
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
public void updatePose() { |
||||
double ftiming = ovr_GetPredictedDisplayTime(session, 0); |
||||
ovr_GetTrackingState(session, ftiming, true, trackingState); |
||||
ovr_GetSessionStatus(session, sessionStatus); |
||||
|
||||
input.updateControllerStates(); |
||||
|
||||
headPose = trackingState.HeadPose().ThePose(); |
||||
} |
||||
|
||||
@Override |
||||
public boolean isInitialized() { |
||||
return initialized; |
||||
} |
||||
|
||||
@Override |
||||
public void destroy() { |
||||
// fovPorts: contents are managed by LibOVR, no need to do anything.
|
||||
|
||||
// Clean up the input
|
||||
input.dispose(); |
||||
|
||||
// Check if we've set up rendering - if so, clean that up.
|
||||
if (chains != null) { |
||||
// Destroy our set of huge buffer images.
|
||||
for (long chain : chains) { |
||||
ovr_DestroyTextureSwapChain(session, chain); |
||||
} |
||||
|
||||
// Free up the layer
|
||||
layer0.free(); |
||||
|
||||
// The layers array apparently takes care of itself (and crashes if we try to free it)
|
||||
} |
||||
|
||||
for (OVREyeRenderDesc eye : eyeRenderDesc) { |
||||
eye.free(); |
||||
} |
||||
for (OVRMatrix4f projection : projections) { |
||||
projection.free(); |
||||
} |
||||
|
||||
hmdDesc.free(); |
||||
trackingState.free(); |
||||
sessionStatus.free(); |
||||
|
||||
// Wrap everything up
|
||||
ovr_Destroy(session); |
||||
ovr_Shutdown(); |
||||
} |
||||
|
||||
@Override |
||||
public void reset() { |
||||
// Reset the coordinate system - where the user's head is now is facing forwards from [0,0,0]
|
||||
ovr_RecenterTrackingOrigin(session); |
||||
} |
||||
|
||||
@Override |
||||
public void getRenderSize(Vector2f store) { |
||||
if (!isInitialized()) { |
||||
throw new IllegalStateException("Cannot call getRenderSize() before initialized!"); |
||||
} |
||||
store.x = textureW; |
||||
store.y = textureH; |
||||
} |
||||
|
||||
@Override |
||||
public float getInterpupillaryDistance() { |
||||
return 0.065f; // TODO
|
||||
} |
||||
|
||||
@Override |
||||
public Quaternion getOrientation() { |
||||
return quatO2J(headPose.Orientation(), new Quaternion()); |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getPosition() { |
||||
return vecO2J(headPose.Position(), new Vector3f()); |
||||
} |
||||
|
||||
@Override |
||||
public void getPositionAndOrientation(Vector3f storePos, Quaternion storeRot) { |
||||
storePos.set(getPosition()); |
||||
storeRot.set(getOrientation()); |
||||
} |
||||
|
||||
private Matrix4f calculateProjection(int eye, Camera cam) { |
||||
Matrix4f mat = new Matrix4f(); |
||||
|
||||
// Get LibOVR to find the correct projection
|
||||
OVRUtil.ovrMatrix4f_Projection(fovPorts[eye], cam.getFrustumNear(), cam.getFrustumFar(), OVRUtil.ovrProjection_None, projections[eye]); |
||||
|
||||
matrixO2J(projections[eye], mat); |
||||
|
||||
return mat; |
||||
} |
||||
|
||||
@Override |
||||
public Matrix4f getHMDMatrixProjectionLeftEye(Camera cam) { |
||||
return calculateProjection(ovrEye_Left, cam); |
||||
} |
||||
|
||||
@Override |
||||
public Matrix4f getHMDMatrixProjectionRightEye(Camera cam) { |
||||
return calculateProjection(ovrEye_Right, cam); |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getHMDVectorPoseLeftEye() { |
||||
return hmdRelativeEyePositions[ovrEye_Left]; |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getHMDVectorPoseRightEye() { |
||||
return hmdRelativeEyePositions[ovrEye_Right]; |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getSeatedToAbsolutePosition() { |
||||
throw new UnsupportedOperationException(); |
||||
} |
||||
|
||||
@Override |
||||
public Matrix4f getHMDMatrixPoseLeftEye() { |
||||
return hmdRelativeEyePoses[ovrEye_Left]; |
||||
} |
||||
|
||||
@Override |
||||
public Matrix4f getHMDMatrixPoseRightEye() { |
||||
return hmdRelativeEyePoses[ovrEye_Left]; |
||||
} |
||||
|
||||
@Override |
||||
public HmdType getType() { |
||||
return HmdType.OCULUS_RIFT; |
||||
} |
||||
|
||||
public boolean initVRCompositor(boolean set) { |
||||
if (!set) { |
||||
throw new UnsupportedOperationException("Cannot use LibOVR without compositor!"); |
||||
} |
||||
|
||||
setupLayers(); |
||||
|
||||
framebuffers = new FrameBuffer[2][]; |
||||
for (int eye = 0; eye < 2; eye++) |
||||
setupFramebuffers(eye); |
||||
|
||||
// TODO move initialization code here from VRViewManagerOculus
|
||||
return true; |
||||
} |
||||
|
||||
public void printLatencyInfoToConsole(boolean set) { |
||||
throw new UnsupportedOperationException("Not yet implemented!"); |
||||
} |
||||
|
||||
public void setFlipEyes(boolean set) { |
||||
throw new UnsupportedOperationException("Not yet implemented!"); |
||||
} |
||||
|
||||
public Void getCompositor() { |
||||
throw new UnsupportedOperationException("Not yet implemented!"); |
||||
} |
||||
|
||||
public Void getVRSystem() { |
||||
throw new UnsupportedOperationException("Not yet implemented!"); |
||||
} |
||||
|
||||
// Rendering-type stuff
|
||||
|
||||
public void findHMDTextureSize() { |
||||
// Texture sizes
|
||||
float pixelScaling = 1.0f; // pixelsPerDisplayPixel
|
||||
|
||||
OVRSizei leftTextureSize = OVRSizei.malloc(); |
||||
ovr_GetFovTextureSize(session, ovrEye_Left, fovPorts[ovrEye_Left], pixelScaling, leftTextureSize); |
||||
|
||||
OVRSizei rightTextureSize = OVRSizei.malloc(); |
||||
ovr_GetFovTextureSize(session, ovrEye_Right, fovPorts[ovrEye_Right], pixelScaling, rightTextureSize); |
||||
|
||||
if (leftTextureSize.w() != rightTextureSize.w()) { |
||||
throw new IllegalStateException("Texture sizes do not match [horizontal]"); |
||||
} |
||||
if (leftTextureSize.h() != rightTextureSize.h()) { |
||||
throw new IllegalStateException("Texture sizes do not match [vertical]"); |
||||
} |
||||
|
||||
textureW = leftTextureSize.w(); |
||||
textureH = leftTextureSize.h(); |
||||
|
||||
leftTextureSize.free(); |
||||
rightTextureSize.free(); |
||||
} |
||||
|
||||
private long setupTextureChain() { |
||||
// Set up the information for the texture buffer chain thing
|
||||
OVRTextureSwapChainDesc swapChainDesc = OVRTextureSwapChainDesc.calloc() |
||||
.Type(ovrTexture_2D) |
||||
.ArraySize(1) |
||||
.Format(OVR_FORMAT_R8G8B8A8_UNORM_SRGB) |
||||
.Width(textureW) |
||||
.Height(textureH) |
||||
.MipLevels(1) |
||||
.SampleCount(1) |
||||
.StaticImage(false); // ovrFalse
|
||||
|
||||
// Create the chain
|
||||
PointerBuffer textureSetPB = createPointerBuffer(1); |
||||
if (OVRGL.ovr_CreateTextureSwapChainGL(session, swapChainDesc, textureSetPB) != ovrSuccess) { |
||||
throw new RuntimeException("Failed to create Swap Texture Set"); |
||||
} |
||||
swapChainDesc.free(); |
||||
|
||||
return textureSetPB.get(); // TODO is this a memory leak?
|
||||
} |
||||
|
||||
public void setupLayers() { |
||||
//Layers
|
||||
layer0 = OVRLayerEyeFov.calloc(); |
||||
layer0.Header().Type(ovrLayerType_EyeFov); |
||||
layer0.Header().Flags(ovrLayerFlag_TextureOriginAtBottomLeft); |
||||
|
||||
chains = new long[2]; |
||||
for (int eye = 0; eye < 2; eye++) { |
||||
long eyeChain = setupTextureChain(); |
||||
chains[eye] = eyeChain; |
||||
|
||||
OVRRecti viewport = OVRRecti.calloc(); |
||||
viewport.Pos().x(0); |
||||
viewport.Pos().y(0); |
||||
viewport.Size().w(textureW); |
||||
viewport.Size().h(textureH); |
||||
|
||||
layer0.ColorTexture(eye, eyeChain); |
||||
layer0.Viewport(eye, viewport); |
||||
layer0.Fov(eye, fovPorts[eye]); |
||||
|
||||
viewport.free(); |
||||
// we update pose only when we have it in the render loop
|
||||
} |
||||
|
||||
layers = createPointerBuffer(1); |
||||
layers.put(0, layer0); |
||||
} |
||||
|
||||
/** |
||||
* Create a framebuffer for an eye. |
||||
*/ |
||||
public void setupFramebuffers(int eye) { |
||||
// Find the chain length
|
||||
IntBuffer length = BufferUtils.createIntBuffer(1); |
||||
ovr_GetTextureSwapChainLength(session, chains[eye], length); |
||||
int chainLength = length.get(); |
||||
|
||||
LOGGER.fine("HMD Eye #" + eye + " texture chain length: " + chainLength); |
||||
|
||||
// Create the frame buffers
|
||||
framebuffers[eye] = new FrameBuffer[chainLength]; |
||||
for (int i = 0; i < chainLength; i++) { |
||||
// find the GL texture ID for this texture
|
||||
IntBuffer textureIdB = BufferUtils.createIntBuffer(1); |
||||
OVRGL.ovr_GetTextureSwapChainBufferGL(session, chains[eye], i, textureIdB); |
||||
int textureId = textureIdB.get(); |
||||
|
||||
// TODO less hacky way of getting our texture into JMonkeyEngine
|
||||
Image img = new Image(); |
||||
img.setId(textureId); |
||||
img.setFormat(Image.Format.RGBA8); |
||||
img.setWidth(textureW); |
||||
img.setHeight(textureH); |
||||
|
||||
Texture2D tex = new Texture2D(img); |
||||
|
||||
FrameBuffer buffer = new FrameBuffer(textureW, textureH, 1); |
||||
buffer.setDepthBuffer(Image.Format.Depth); |
||||
buffer.setColorTexture(tex); |
||||
|
||||
framebuffers[eye][i] = buffer; |
||||
} |
||||
} |
||||
|
||||
// UTILITIES
|
||||
// TODO move to helper class
|
||||
|
||||
/** |
||||
* Copy the values from a LibOVR matrix into a jMonkeyEngine matrix. |
||||
* |
||||
* @param from The matrix to copy from. |
||||
* @param to The matrix to copy to. |
||||
* @return The {@code to} argument. |
||||
*/ |
||||
public static Matrix4f matrixO2J(OVRMatrix4f from, Matrix4f to) { |
||||
to.loadIdentity(); // For the additional columns (unless I'm badly misunderstanding matricies)
|
||||
|
||||
for (int x = 0; x < 4; x++) { |
||||
for (int y = 0; y < 4; y++) { |
||||
float val = from.M(x + y * 4); // TODO verify this
|
||||
to.set(x, y, val); |
||||
} |
||||
} |
||||
|
||||
to.transposeLocal(); // jME vs LibOVR coordinate spaces - Yay!
|
||||
|
||||
return to; |
||||
} |
||||
|
||||
/** |
||||
* Copy the values from a LibOVR quaternion into a jMonkeyEngine quaternion. |
||||
* |
||||
* @param from The quaternion to copy from. |
||||
* @param to The quaternion to copy to. |
||||
* @return The {@code to} argument. |
||||
*/ |
||||
public static Quaternion quatO2J(OVRQuatf from, Quaternion to) { |
||||
// jME and LibOVR do their coordinate spaces differently for rotations, so flip Y and W (thanks, jMonkeyVR).
|
||||
to.set( |
||||
from.x(), |
||||
-from.y(), |
||||
from.z(), |
||||
-from.w() |
||||
); |
||||
|
||||
to.normalizeLocal(); |
||||
|
||||
return to; |
||||
} |
||||
|
||||
/** |
||||
* Copy the values from a LibOVR vector into a jMonkeyEngine vector. |
||||
* |
||||
* @param from The vector to copy from. |
||||
* @param to The vector to copy to. |
||||
* @return The {@code to} argument. |
||||
*/ |
||||
public static Vector3f vecO2J(OVRVector3f from, Vector3f to) { |
||||
// jME and LibOVR disagree on which way X and Z is, too.
|
||||
to.set( |
||||
-from.x(), |
||||
from.y(), |
||||
-from.z() |
||||
); |
||||
|
||||
return to; |
||||
} |
||||
|
||||
// Getters, intended for VRViewManager.
|
||||
|
||||
public long getSessionPointer() { |
||||
return session; |
||||
} |
||||
|
||||
public long getChain(int eye) { |
||||
return chains[eye]; |
||||
} |
||||
|
||||
public FrameBuffer[] getFramebuffers(int eye) { |
||||
return framebuffers[eye]; |
||||
} |
||||
|
||||
public PointerBuffer getLayers() { |
||||
return layers; |
||||
} |
||||
|
||||
public OVRLayerEyeFov getLayer0() { |
||||
return layer0; |
||||
} |
||||
|
||||
public OVRFovPort getFovPort() { |
||||
return fovPorts[ovrEye_Left]; // TODO checking the left and right eyes match
|
||||
} |
||||
|
||||
public OVRPosef getHeadPose() { |
||||
return headPose; |
||||
} |
||||
|
||||
public OVRPosef getEyePose(int eye) { |
||||
return eyeRenderDesc[eye].HmdToEyePose(); |
||||
} |
||||
|
||||
public VREnvironment getEnvironment() { |
||||
return environment; |
||||
} |
||||
} |
||||
|
||||
/* vim: set ts=4 softtabstop=0 sw=4 expandtab: */ |
||||
|
@ -0,0 +1,367 @@ |
||||
package com.jme3.input.vr; |
||||
|
||||
import com.jme3.app.VREnvironment; |
||||
import com.jme3.math.*; |
||||
import com.jme3.renderer.Camera; |
||||
import com.jme3.scene.Spatial; |
||||
import com.jme3.util.VRViewManagerOculus; |
||||
import org.lwjgl.ovr.*; |
||||
|
||||
import static org.lwjgl.ovr.OVR.*; |
||||
|
||||
public class OculusVRInput implements VRInputAPI { |
||||
// State control
|
||||
private final OVRInputState inputState; |
||||
private final OVRSessionStatus sessionStatus; |
||||
private final OVRTrackingState trackingState; |
||||
private final OculusVR hardware; |
||||
private long session; |
||||
|
||||
// Setup values
|
||||
private float axisMultiplier = 1; |
||||
|
||||
// Cached stuff
|
||||
private int buttons, touch; |
||||
|
||||
// Used to calculate sinceLastCall stuff
|
||||
private int lastButtons, lastTouch; |
||||
private final Vector2f[][] lastAxises; |
||||
|
||||
/** |
||||
* The state data (linear and angular velocity and acceleration) for each hand |
||||
*/ |
||||
private OVRPoseStatef[] handStates; |
||||
|
||||
/** |
||||
* The position and orientation of the Touch controllers. |
||||
*/ |
||||
private OVRPosef[] handPoses; |
||||
|
||||
/** |
||||
* The object forms of the tracked controllers. |
||||
*/ |
||||
private final OculusController[] controllers = { |
||||
new OculusController(0), |
||||
new OculusController(1) |
||||
}; |
||||
|
||||
public OculusVRInput(OculusVR hardware, long session, |
||||
OVRSessionStatus sessionStatus, OVRTrackingState trackingState) { |
||||
this.hardware = hardware; |
||||
this.session = session; |
||||
this.sessionStatus = sessionStatus; |
||||
this.trackingState = trackingState; |
||||
|
||||
inputState = OVRInputState.calloc(); |
||||
|
||||
handStates = new OVRPoseStatef[ovrHand_Count]; |
||||
handPoses = new OVRPosef[handStates.length]; |
||||
lastAxises = new Vector2f[handStates.length][3]; // trigger+grab+thumbstick for each hand.
|
||||
} |
||||
|
||||
public void dispose() { |
||||
inputState.free(); |
||||
session = 0; // Crashing > undefined behaviour if this object is incorrectly accessed again.
|
||||
} |
||||
|
||||
@Override |
||||
public void updateControllerStates() { |
||||
// Handle buttons, axies
|
||||
ovr_GetInputState(session, ovrControllerType_Touch, inputState); |
||||
buttons = inputState.Buttons(); |
||||
touch = inputState.Touches(); |
||||
|
||||
// Get the touch controller poses
|
||||
// TODO what if no touch controllers are available?
|
||||
for (int hand = 0; hand < handPoses.length; hand++) { |
||||
handStates[hand] = trackingState.HandPoses(hand); |
||||
handPoses[hand] = handStates[hand].ThePose(); |
||||
} |
||||
} |
||||
|
||||
private Vector3f cv(OVRVector3f in) { |
||||
// TODO do we want to reuse vectors rather than making new ones?
|
||||
// TODO OpenVRInput does this, but it will probably cause some bugs.
|
||||
return OculusVR.vecO2J(in, new Vector3f()); // This also fixes the coordinate space transform issues.
|
||||
} |
||||
|
||||
private Vector2f cv(OVRVector2f in) { |
||||
// TODO do we want to reuse vectors rather than making new ones?
|
||||
// TODO OpenVRInput does this, but it will probably cause some bugs.
|
||||
return new Vector2f(in.x(), in.y()); |
||||
} |
||||
|
||||
private Quaternion cq(OVRQuatf in) { |
||||
// TODO do we want to reuse quaternions rather than making new ones?
|
||||
// TODO OpenVRInput does this, but it will probably cause some bugs.
|
||||
return OculusVR.quatO2J(in, new Quaternion()); // This also fixes the coordinate space transform issues.
|
||||
} |
||||
|
||||
private Vector2f axis(float input) { |
||||
// See above comments about reusing vectors
|
||||
return new Vector2f(input, input); |
||||
} |
||||
|
||||
// Tracking (position, rotation, velocity, status)
|
||||
|
||||
@Override |
||||
public Vector3f getPosition(int index) { |
||||
return cv(handPoses[index].Position()); |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getVelocity(int controllerIndex) { |
||||
return cv(handStates[controllerIndex].LinearVelocity()); |
||||
} |
||||
|
||||
@Override |
||||
public Quaternion getOrientation(int index) { |
||||
return cq(handPoses[index].Orientation()); |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getAngularVelocity(int controllerIndex) { |
||||
return cv(handStates[controllerIndex].AngularVelocity()); |
||||
} |
||||
|
||||
@Override |
||||
public Quaternion getFinalObserverRotation(int index) { |
||||
// Copied from OpenVRInput
|
||||
|
||||
VREnvironment env = hardware.getEnvironment(); |
||||
VRViewManagerOculus vrvm = (VRViewManagerOculus) hardware.getEnvironment().getVRViewManager(); |
||||
|
||||
Object obs = env.getObserver(); |
||||
Quaternion tempq = new Quaternion(); // TODO move to class scope?
|
||||
if (obs instanceof Camera) { |
||||
tempq.set(((Camera) obs).getRotation()); |
||||
} else { |
||||
tempq.set(((Spatial) obs).getWorldRotation()); |
||||
} |
||||
|
||||
return tempq.multLocal(getOrientation(index)); |
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getFinalObserverPosition(int index) { |
||||
// Copied from OpenVRInput
|
||||
|
||||
VREnvironment env = hardware.getEnvironment(); |
||||
VRViewManagerOculus vrvm = (VRViewManagerOculus) hardware.getEnvironment().getVRViewManager(); |
||||
|
||||
Object obs = env.getObserver(); |
||||
Vector3f pos = getPosition(index); |
||||
if (obs instanceof Camera) { |
||||
((Camera) obs).getRotation().mult(pos, pos); |
||||
return pos.addLocal(((Camera) obs).getLocation()); |
||||
} else { |
||||
((Spatial) obs).getWorldRotation().mult(pos, pos); |
||||
return pos.addLocal(((Spatial) obs).getWorldTranslation()); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public boolean isInputDeviceTracking(int index) { |
||||
int flags = trackingState.HandStatusFlags(index); |
||||
return (flags & ovrStatus_PositionTracked) != 0; // TODO do we require orientation as well?
|
||||
} |
||||
|
||||
// Input Getters
|
||||
|
||||
@Override |
||||
public Vector2f getAxis(int controllerIndex, VRInputType forAxis) { |
||||
Vector2f result = getAxisRaw(controllerIndex, forAxis); |
||||
return result == null ? null : result.multLocal(axisMultiplier); |
||||
} |
||||
|
||||
@Override |
||||
public Vector2f getAxisRaw(int controllerIndex, VRInputType forAxis) { |
||||
switch (forAxis) { |
||||
case OculusThumbstickAxis: |
||||
return cv(inputState.Thumbstick(controllerIndex)); |
||||
case OculusTriggerAxis: |
||||
return axis(inputState.IndexTrigger(controllerIndex)); |
||||
case OculusGripAxis: |
||||
return axis(inputState.HandTrigger(controllerIndex)); |
||||
default: |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public boolean isButtonDown(int controllerIndex, VRInputType checkButton) { |
||||
return isButtonDownForStatus(controllerIndex, checkButton, buttons, touch); |
||||
} |
||||
|
||||
public boolean isButtonDownForStatus(int controllerIndex, VRInputType checkButton, int buttons, int touch) { |
||||
int buttonMask = (controllerIndex == ovrHand_Left) ? ovrButton_LMask : ovrButton_RMask; |
||||
int touchMask = (controllerIndex == ovrHand_Left) ? |
||||
(ovrTouch_LButtonMask + ovrTouch_LPoseMask) : |
||||
(ovrTouch_RButtonMask + ovrTouch_RPoseMask); |
||||
|
||||
switch (checkButton) { |
||||
default: |
||||
return false; |
||||
|
||||
case OculusTopButton: // Physical buttons
|
||||
case OculusBottomButton: |
||||
case OculusThumbstickButton: |
||||
case OculusMenuButton: |
||||
return (buttons & buttonMask & checkButton.getValue()) != 0; |
||||
|
||||
case OculusTopTouch: // Standard capacitive buttons
|
||||
case OculusBottomTouch: |
||||
case OculusThumbstickTouch: |
||||
case OculusThumbrestTouch: |
||||
case OculusIndexTouch: |
||||
case OculusThumbUp: // Calculated/virtual capacitive buttons
|
||||
case OculusIndexPointing: |
||||
return (touch & touchMask & checkButton.getValue()) != 0; |
||||
} |
||||
} |
||||
|
||||
// Since-last-call stuff
|
||||
|
||||
@Override |
||||
public void resetInputSinceLastCall() { |
||||
lastButtons = 0; |
||||
lastTouch = 0; |
||||
} |
||||
|
||||
@Override |
||||
public boolean wasButtonPressedSinceLastCall(int controllerIndex, VRInputType checkButton) { |
||||
boolean wasPressed = isButtonDownForStatus(controllerIndex, checkButton, lastButtons, lastTouch); |
||||
lastButtons = buttons; |
||||
lastTouch = touch; |
||||
return !wasPressed && isButtonDown(controllerIndex, checkButton); |
||||
} |
||||
|
||||
@Override |
||||
public Vector2f getAxisDeltaSinceLastCall(int controllerIndex, VRInputType forAxis) { |
||||
int index; |
||||
switch (forAxis) { |
||||
case OculusTriggerAxis: |
||||
index = 0; |
||||
break; |
||||
case OculusGripAxis: |
||||
index = 1; |
||||
break; |
||||
case OculusThumbstickAxis: |
||||
index = 2; |
||||
break; |
||||
default: |
||||
return null; |
||||
} |
||||
|
||||
Vector2f last = lastAxises[controllerIndex][index]; |
||||
if (last == null) { |
||||
last = lastAxises[controllerIndex][index] = new Vector2f(); |
||||
} |
||||
|
||||
Vector2f current = getAxis(controllerIndex, forAxis); |
||||
|
||||
// TODO could this lead to accuracy problems?
|
||||
current.subtractLocal(last); |
||||
last.addLocal(current); |
||||
|
||||
return current; |
||||
} |
||||
|
||||
// Misc
|
||||
|
||||
@Override |
||||
public boolean init() { |
||||
throw new UnsupportedOperationException("Input initialized at creation time"); |
||||
} |
||||
|
||||
@Override |
||||
public void updateConnectedControllers() { |
||||
throw new UnsupportedOperationException("Automatically done by LibOVR (I think?)"); |
||||
} |
||||
|
||||
@Override |
||||
public float getAxisMultiplier() { |
||||
return axisMultiplier; |
||||
} |
||||
|
||||
@Override |
||||
public void setAxisMultiplier(float axisMultiplier) { |
||||
this.axisMultiplier = axisMultiplier; |
||||
} |
||||
|
||||
@Override |
||||
public void triggerHapticPulse(int controllerIndex, float seconds) { |
||||
// TODO: How do we time so we can turn the feedback off?
|
||||
} |
||||
|
||||
@Override |
||||
public boolean isInputFocused() { |
||||
return sessionStatus.IsVisible(); // TODO do we need HmdMounted, or is it counted in IsVisible
|
||||
} |
||||
|
||||
@Override |
||||
public Object getRawControllerState(int index) { |
||||
throw new UnsupportedOperationException("Cannot get raw controller state!"); |
||||
} |
||||
|
||||
@Override |
||||
public void swapHands() { |
||||
// Do nothing.
|
||||
// TODO although OSVR and OpenVR if it has more than two controllers both do nothing, shouldn't we be
|
||||
// TODO throwing an exception or something?
|
||||
} |
||||
|
||||
@Override |
||||
public int getTrackedControllerCount() { |
||||
// TODO: Shouldn't we be seeing if the user has the touch controllers first?
|
||||
return 2; |
||||
} |
||||
|
||||
@Override |
||||
public VRTrackedController getTrackedController(int index) { |
||||
return controllers[index]; |
||||
} |
||||
|
||||
/** |
||||
* The object form representation of a controller. |
||||
*/ |
||||
public class OculusController implements VRTrackedController { |
||||
|
||||
/** |
||||
* The ID of the hand to track |
||||
*/ |
||||
private int hand; |
||||
|
||||
public OculusController(int hand) { |
||||
this.hand = hand; |
||||
} |
||||
|
||||
@Override |
||||
public String getControllerName() { |
||||
return "Touch"; // TODO
|
||||
} |
||||
|
||||
@Override |
||||
public String getControllerManufacturer() { |
||||
return "Oculus"; // TODO
|
||||
} |
||||
|
||||
@Override |
||||
public Vector3f getPosition() { |
||||
return OculusVRInput.this.getPosition(hand); |
||||
} |
||||
|
||||
@Override |
||||
public Quaternion getOrientation() { |
||||
return OculusVRInput.this.getOrientation(hand); |
||||
} |
||||
|
||||
@Override |
||||
public Matrix4f getPose() { |
||||
Matrix4f mat = new Matrix4f(); |
||||
mat.setRotationQuaternion(getOrientation()); |
||||
mat.setTranslation(getPosition()); |
||||
return mat; |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,340 @@ |
||||
/* |
||||
* Copyright (c) 2009-2017 jMonkeyEngine |
||||
* All rights reserved. |
||||
* |
||||
* Redistribution and use in source and binary forms, with or without |
||||
* modification, are permitted provided that the following conditions are |
||||
* met: |
||||
* |
||||
* * Redistributions of source code must retain the above copyright |
||||
* notice, this list of conditions and the following disclaimer. |
||||
* |
||||
* * Redistributions in binary form must reproduce the above copyright |
||||
* notice, this list of conditions and the following disclaimer in the |
||||
* documentation and/or other materials provided with the distribution. |
||||
* |
||||
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors |
||||
* may be used to endorse or promote products derived from this software |
||||
* without specific prior written permission. |
||||
* |
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED |
||||
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
||||
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR |
||||
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
||||
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
||||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF |
||||
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
||||
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS |
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
*/ |
||||
package com.jme3.util; |
||||
|
||||
import com.jme3.app.VREnvironment; |
||||
import com.jme3.input.vr.OculusVR; |
||||
import com.jme3.input.vr.VRAPI; |
||||
import com.jme3.math.*; |
||||
import com.jme3.renderer.Camera; |
||||
import com.jme3.renderer.ViewPort; |
||||
import com.jme3.scene.Spatial; |
||||
import com.jme3.texture.*; |
||||
|
||||
import java.nio.IntBuffer; |
||||
import java.util.Iterator; |
||||
import java.util.Objects; |
||||
import java.util.logging.Logger; |
||||
|
||||
import org.lwjgl.ovr.*; |
||||
|
||||
import static org.lwjgl.ovr.OVR.*; |
||||
import static org.lwjgl.ovr.OVRErrorCode.*; |
||||
|
||||
/** |
||||
* A rendering system for Oculus's LibOVR API. |
||||
* |
||||
* @author Campbell Suter <znix@znix.xyz> |
||||
*/ |
||||
public class VRViewManagerOculus extends AbstractVRViewManager { |
||||
|
||||
private static final Logger LOG = Logger.getLogger(VRViewManagerOculus.class.getName()); |
||||
|
||||
private final VREnvironment environment; |
||||
private final OculusVR hardware; |
||||
|
||||
// Copied from OSVR
|
||||
//final & temp values for camera calculations
|
||||
private final Vector3f finalPosition = new Vector3f(); |
||||
private final Quaternion finalRotation = new Quaternion(); |
||||
private final Vector3f hmdPos = new Vector3f(); |
||||
private final Quaternion hmdRot = new Quaternion(); |
||||
|
||||
public VRViewManagerOculus(VREnvironment environment) { |
||||
this.environment = environment; |
||||
|
||||
VRAPI hardware = environment.getVRHardware(); |
||||
Objects.requireNonNull(hardware, "Attached VR Hardware cannot be null"); |
||||
if (!(hardware instanceof OculusVR)) { |
||||
throw new IllegalStateException("Cannot use Oculus VR view manager on non-Oculus hardware state!"); |
||||
} |
||||
|
||||
this.hardware = (OculusVR) hardware; |
||||
|
||||
if (!environment.compositorAllowed()) { |
||||
throw new UnsupportedOperationException("Cannot render without compositor on LibOVR"); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void initialize() { |
||||
setupCamerasAndViews(); |
||||
|
||||
if (environment.hasTraditionalGUIOverlay()) { |
||||
|
||||
environment.getVRMouseManager().initialize(); |
||||
|
||||
// update the pose to position the gui correctly on start
|
||||
update(0f); |
||||
environment.getVRGUIManager().positionGui(); |
||||
} |
||||
} |
||||
|
||||
private long session() { |
||||
return hardware.getSessionPointer(); |
||||
} |
||||
|
||||
@Override |
||||
public void update(float tpf) { |
||||
// TODO
|
||||
|
||||
hardware.updatePose(); |
||||
|
||||
// TODO deduplicate
|
||||
if (environment == null) { |
||||
throw new IllegalStateException("This VR view manager is not attached to any VR environment."); |
||||
} |
||||
|
||||
// grab the observer
|
||||
Object obs = environment.getObserver(); |
||||
Quaternion objRot; |
||||
Vector3f objPos; |
||||
if (obs instanceof Camera) { |
||||
objRot = ((Camera) obs).getRotation(); |
||||
objPos = ((Camera) obs).getLocation(); |
||||
} else { |
||||
objRot = ((Spatial) obs).getWorldRotation(); |
||||
objPos = ((Spatial) obs).getWorldTranslation(); |
||||
} |
||||
|
||||
// update the HMD's position & orientation
|
||||
hardware.getPositionAndOrientation(hmdPos, hmdRot); |
||||
if (obs != null) { |
||||
// update hmdPos based on obs rotation
|
||||
finalRotation.set(objRot); |
||||
finalRotation.mult(hmdPos, hmdPos); |
||||
finalRotation.multLocal(hmdRot); |
||||
} |
||||
|
||||
// Update both eye cameras
|
||||
finalizeCamera(hardware.getHMDVectorPoseLeftEye(), objPos, leftCamera); |
||||
finalizeCamera(hardware.getHMDVectorPoseRightEye(), objPos, rightCamera); |
||||
|
||||
// Update the main camera, so it shows the same basic view the HMD is getting
|
||||
// TODO: Do this in VRAppState, so it works on all HMDs.
|
||||
// I only have a Rift, so I can't test it on anything else.
|
||||
if(!environment.isInstanceRendering()) { // We use the app camera as the left camera here
|
||||
// TODO: Double up on rendering and use one eye, to reduce GPU load rendering the scene again.
|
||||
// TODO: Snip at the image to remove the distorted corners from a very high FOV.
|
||||
finalizeCamera(Vector3f.ZERO, objPos, environment.getApplication().getCamera()); |
||||
} |
||||
|
||||
if (environment.hasTraditionalGUIOverlay()) { |
||||
// update the mouse?
|
||||
environment.getVRMouseManager().update(tpf); |
||||
|
||||
// update GUI position?
|
||||
if (environment.getVRGUIManager().wantsReposition || environment.getVRGUIManager().getPositioningMode() != VRGUIPositioningMode.MANUAL) { |
||||
environment.getVRGUIManager().positionGuiNow(tpf); |
||||
environment.getVRGUIManager().updateGuiQuadGeometricState(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Place the camera within the scene. |
||||
* |
||||
* @param eyePos the eye position. |
||||
* @param obsPosition the observer position. |
||||
* @param cam the camera to place. |
||||
*/ |
||||
private void finalizeCamera(Vector3f eyePos, Vector3f obsPosition, Camera cam) { |
||||
finalRotation.mult(eyePos, finalPosition); |
||||
finalPosition.addLocal(hmdPos); |
||||
if (obsPosition != null) { |
||||
finalPosition.addLocal(obsPosition); |
||||
} |
||||
finalPosition.y += getHeightAdjustment(); |
||||
cam.setFrame(finalPosition, finalRotation); |
||||
} |
||||
|
||||
@Override |
||||
public void render() { |
||||
|
||||
// Calculate the render pose (translation/rotation) for each eye.
|
||||
// LibOVR takes the difference between this and the real position of each eye at display time
|
||||
// to apply AZW (timewarp).
|
||||
|
||||
OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2); |
||||
hmdToEyeOffsets.put(0, hardware.getEyePose(ovrEye_Left)); |
||||
hmdToEyeOffsets.put(1, hardware.getEyePose(ovrEye_Right)); |
||||
|
||||
//calculate eye poses
|
||||
OVRUtil.ovr_CalcEyePoses(hardware.getHeadPose(), hmdToEyeOffsets, hardware.getLayer0().RenderPose()); |
||||
hmdToEyeOffsets.free(); |
||||
|
||||
for (int eye = 0; eye < 2; eye++) { |
||||
IntBuffer currentIndexB = BufferUtils.createIntBuffer(1); |
||||
ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(eye), currentIndexB); |
||||
int index = currentIndexB.get(); |
||||
|
||||
// Constantly (each frame) rotating through a series of
|
||||
// frame buffers, so make sure we write into the correct one.
|
||||
(eye == ovrEye_Left ? leftViewPort : rightViewPort).setOutputFrameBuffer(hardware.getFramebuffers(eye)[index]); |
||||
} |
||||
|
||||
// Now the game will render into the buffers given to us by LibOVR
|
||||
} |
||||
|
||||
@Override |
||||
public void postRender() { |
||||
// We're done with our textures now - the game is done drawing into them.
|
||||
for (int eye = 0; eye < 2; eye++) { |
||||
ovr_CommitTextureSwapChain(session(), hardware.getChain(eye)); |
||||
} |
||||
|
||||
// Send the result to the HMD
|
||||
int result = ovr_SubmitFrame(session(), 0, null, hardware.getLayers()); |
||||
if (result != ovrSuccess) { |
||||
throw new IllegalStateException("Failed to submit frame!"); |
||||
} |
||||
} |
||||
|
||||
/* |
||||
********************************************************* |
||||
* Show's over, now it's just boring camera stuff etc. * |
||||
********************************************************* |
||||
*/ |
||||
|
||||
/** |
||||
* Set up the cameras and views for each eye and the mirror display. |
||||
*/ |
||||
private void setupCamerasAndViews() { |
||||
// TODO: Use LobOVR IPD etc
|
||||
if (environment != null) { |
||||
// get desired frustrum from original camera
|
||||
Camera origCam = environment.getCamera(); |
||||
float fFar = origCam.getFrustumFar(); |
||||
float fNear = origCam.getFrustumNear(); |
||||
|
||||
// restore frustrum on distortion scene cam, if needed
|
||||
if (environment.isInstanceRendering()) { |
||||
leftCamera = origCam; |
||||
} else { |
||||
leftCamera = origCam.clone(); |
||||
} |
||||
|
||||
OVRFovPort fp = hardware.getFovPort(); |
||||
float hFov = fp.LeftTan() + fp.RightTan(); |
||||
float vFov = fp.UpTan() + fp.DownTan(); |
||||
getLeftCamera().setFrustumPerspective(hFov / FastMath.TWO_PI * 360, vFov / hFov, fNear, fFar); |
||||
|
||||
prepareCameraSize(getLeftCamera(), 1f); |
||||
if (environment.getVRHardware() != null) { |
||||
getLeftCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionLeftEye(getLeftCamera())); |
||||
} |
||||
//org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_FRAMEBUFFER_SRGB);
|
||||
|
||||
if (!environment.isInstanceRendering()) { |
||||
leftViewPort = setupViewBuffers(getLeftCamera(), LEFT_VIEW_NAME); |
||||
rightCamera = getLeftCamera().clone(); |
||||
if (environment.getVRHardware() != null) { |
||||
getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera())); |
||||
} |
||||
rightViewPort = setupViewBuffers(getRightCamera(), RIGHT_VIEW_NAME); |
||||
} else if (environment.getApplication() != null) { |
||||
throw new UnsupportedOperationException("Not yet implemented!"); |
||||
} else { |
||||
throw new IllegalStateException("This VR environment is not attached to any application."); |
||||
} |
||||
|
||||
// setup gui
|
||||
environment.getVRGUIManager().setupGui(getLeftCamera(), getRightCamera(), getLeftViewPort(), getRightViewPort()); |
||||
} else { |
||||
throw new IllegalStateException("This VR view manager is not attached to any VR environment."); |
||||
} |
||||
} |
||||
|
||||
private void prepareCameraSize(Camera cam, float xMult) { |
||||
// TODO this function is identical to that in VRViewManagerOpenVR; merge the two.
|
||||
if (environment != null) { |
||||
if (environment.getApplication() != null) { |
||||
Vector2f size = new Vector2f(); |
||||
VRAPI vrhmd = environment.getVRHardware(); |
||||
|
||||
if (vrhmd == null) { |
||||
size.x = 1280f; |
||||
size.y = 720f; |
||||
} else { |
||||
vrhmd.getRenderSize(size); |
||||
} |
||||
|
||||
if (size.x < environment.getApplication().getContext().getSettings().getWidth()) { |
||||
size.x = environment.getApplication().getContext().getSettings().getWidth(); |
||||
} |
||||
if (size.y < environment.getApplication().getContext().getSettings().getHeight()) { |
||||
size.y = environment.getApplication().getContext().getSettings().getHeight(); |
||||
} |
||||
|
||||
if (environment.isInstanceRendering()) { |
||||
size.x *= 2f; |
||||
} |
||||
|
||||
// other adjustments
|
||||
size.x *= xMult; |
||||
size.x *= getResolutionMuliplier(); |
||||
size.y *= getResolutionMuliplier(); |
||||
|
||||
if (cam.getWidth() != size.x || cam.getHeight() != size.y) { |
||||
cam.resize((int) size.x, (int) size.y, false); |
||||
} |
||||
} else { |
||||
throw new IllegalStateException("This VR environment is not attached to any application."); |
||||
} |
||||
} else { |
||||
throw new IllegalStateException("This VR view manager is not attached to any VR environment."); |
||||
} |
||||
} |
||||
|
||||
private ViewPort setupViewBuffers(Camera cam, String viewName) { |
||||
// TODO this function is identical to that in VRViewManagerOpenVR; merge the two.
|
||||
if (environment != null) { |
||||
if (environment.getApplication() != null) { |
||||
ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam); |
||||
viewPort.setClearFlags(true, true, true); |
||||
viewPort.setBackgroundColor(ColorRGBA.Black); |
||||
|
||||
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator(); |
||||
while (spatialIter.hasNext()) { |
||||
viewPort.attachScene(spatialIter.next()); |
||||
} |
||||
|
||||
// The viewbuffer to render into will be set during prerender.
|
||||
return viewPort; |
||||
} else { |
||||
throw new IllegalStateException("This VR environment is not attached to any application."); |
||||
} |
||||
} else { |
||||
throw new IllegalStateException("This VR view manager is not attached to any VR environment."); |
||||
} |
||||
} |
||||
} |
Loading…
Reference in new issue