Make OculusVR.getHMDVectorPoseLeftEye return the HMD relative, not world relative, eye positions.

empirephoenix-patch-1
Campbell Suter 7 years ago
parent d7fc716065
commit 760277f61d
No known key found for this signature in database
GPG Key ID: 754A66CCF3F73C0F
  1. 51
      jme3-vr/src/main/java/com/jme3/input/vr/OculusVR.java
  2. 17
      jme3-vr/src/main/java/com/jme3/util/VRViewManagerOculus.java

@ -95,14 +95,11 @@ public class OculusVR implements VRAPI {
private final Matrix4f[] hmdRelativeEyePoses = new Matrix4f[2]; private final Matrix4f[] hmdRelativeEyePoses = new Matrix4f[2];
/** /**
* The eye poses relative to the world, as used during rendering. * Store the positions for each eye, relative to the HMD.
*/ *
private final OVRPosef eyePosesPtr[] = new OVRPosef[2]; * @see #getHMDVectorPoseLeftEye()
/**
* The eye positions relative to the world, as used by jME.
*/ */
private final Vector3f eyePositions[] = new Vector3f[2]; private final Vector3f[] hmdRelativeEyePositions = new Vector3f[2];
/** /**
* The position and orientation of the user's head. * The position and orientation of the user's head.
@ -227,6 +224,9 @@ public class OculusVR implements VRAPI {
for (int eye = 0; eye < 2; eye++) { for (int eye = 0; eye < 2; eye++) {
projections[eye] = OVRMatrix4f.malloc(); projections[eye] = OVRMatrix4f.malloc();
//1.3 was right handed, now none flag //1.3 was right handed, now none flag
hmdRelativeEyePoses[eye] = new Matrix4f();
hmdRelativeEyePositions[eye] = new Vector3f();
} }
// step 6 - render desc // step 6 - render desc
@ -240,12 +240,11 @@ public class OculusVR implements VRAPI {
OVRPosef pose = eyeRenderDesc[eye].HmdToEyePose(); OVRPosef pose = eyeRenderDesc[eye].HmdToEyePose();
Matrix4f jPose = new Matrix4f(); vecO2J(pose.Position(), hmdRelativeEyePositions[eye]);
jPose.setTranslation(vecO2J(pose.Position(), new Vector3f()));
jPose.setRotationQuaternion(quatO2J(pose.Orientation(), new Quaternion()));
hmdRelativeEyePoses[eye] = jPose; hmdRelativeEyePoses[eye].loadIdentity();
eyePositions[eye] = new Vector3f(); // Set the absolute position up for later. hmdRelativeEyePoses[eye].setTranslation(hmdRelativeEyePositions[eye]);
hmdRelativeEyePoses[eye].setRotationQuaternion(quatO2J(pose.Orientation(), new Quaternion()));
} }
// step 7 - recenter // step 7 - recenter
@ -268,22 +267,6 @@ public class OculusVR implements VRAPI {
//get head pose //get head pose
headPose = hmdState.HeadPose().ThePose(); headPose = hmdState.HeadPose().ThePose();
hmdState.free(); hmdState.free();
//build view offsets struct
OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2);
hmdToEyeOffsets.put(0, eyeRenderDesc[ovrEye_Left].HmdToEyePose());
hmdToEyeOffsets.put(1, eyeRenderDesc[ovrEye_Right].HmdToEyePose());
//calculate eye poses
OVRPosef.Buffer outEyePoses = OVRPosef.create(2);
OVRUtil.ovr_CalcEyePoses(headPose, hmdToEyeOffsets, outEyePoses);
hmdToEyeOffsets.free();
eyePosesPtr[ovrEye_Left] = outEyePoses.get(0);
eyePosesPtr[ovrEye_Right] = outEyePoses.get(1);
for (int i = 0; i < eyePosesPtr.length; i++) {
vecO2J(eyePosesPtr[i].Position(), eyePositions[i]);
}
} }
@Override @Override
@ -383,12 +366,12 @@ public class OculusVR implements VRAPI {
@Override @Override
public Vector3f getHMDVectorPoseLeftEye() { public Vector3f getHMDVectorPoseLeftEye() {
return eyePositions[ovrEye_Left]; return hmdRelativeEyePositions[ovrEye_Left];
} }
@Override @Override
public Vector3f getHMDVectorPoseRightEye() { public Vector3f getHMDVectorPoseRightEye() {
return eyePositions[ovrEye_Right]; return hmdRelativeEyePositions[ovrEye_Right];
} }
@Override @Override
@ -640,8 +623,12 @@ public class OculusVR implements VRAPI {
return fovPorts[ovrEye_Left]; // TODO checking the left and right eyes match return fovPorts[ovrEye_Left]; // TODO checking the left and right eyes match
} }
public OVRPosef[] getEyePosesPtr() { public OVRPosef getHeadPose() {
return eyePosesPtr; return headPose;
}
public OVRPosef getEyePose(int eye) {
return eyeRenderDesc[eye].HmdToEyePose();
} }
} }

@ -169,11 +169,20 @@ public class VRViewManagerOculus extends AbstractVRViewManager {
@Override @Override
public void render() { public void render() {
for (int eye = 0; eye < 2; eye++) {
// TODO do we need this? Don't we set the camera positions ourselves?
OVRPosef eyePose = hardware.getEyePosesPtr()[eye];
hardware.getLayer0().RenderPose(eye, eyePose);
// Calculate the render pose (translation/rotation) for each eye.
// LibOVR takes the difference between this and the real position of each eye at display time
// to apply AZW (timewarp).
OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2);
hmdToEyeOffsets.put(0, hardware.getEyePose(ovrEye_Left));
hmdToEyeOffsets.put(1, hardware.getEyePose(ovrEye_Right));
//calculate eye poses
OVRUtil.ovr_CalcEyePoses(hardware.getHeadPose(), hmdToEyeOffsets, hardware.getLayer0().RenderPose());
hmdToEyeOffsets.free();
for (int eye = 0; eye < 2; eye++) {
IntBuffer currentIndexB = BufferUtils.createIntBuffer(1); IntBuffer currentIndexB = BufferUtils.createIntBuffer(1);
ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(eye), currentIndexB); ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(eye), currentIndexB);
int index = currentIndexB.get(); int index = currentIndexB.get();

Loading…
Cancel
Save