Merge pull request #752 from ZNixian/master

Add Oculus Rift support
empirephoenix-patch-1
Rickard Edén 7 years ago committed by GitHub
commit c460593000
  1. 6
      jme3-vr/build.gradle
  2. 21
      jme3-vr/src/main/java/com/jme3/app/VRAppState.java
  3. 7
      jme3-vr/src/main/java/com/jme3/app/VRConstants.java
  4. 8
      jme3-vr/src/main/java/com/jme3/app/VREnvironment.java
  5. 661
      jme3-vr/src/main/java/com/jme3/input/vr/OculusVR.java
  6. 367
      jme3-vr/src/main/java/com/jme3/input/vr/OculusVRInput.java
  7. 3
      jme3-vr/src/main/java/com/jme3/input/vr/VRAPI.java
  8. 100
      jme3-vr/src/main/java/com/jme3/input/vr/VRInputType.java
  9. 4
      jme3-vr/src/main/java/com/jme3/util/AbstractVRViewManager.java
  10. 1
      jme3-vr/src/main/java/com/jme3/util/VRMouseManager.java
  11. 6
      jme3-vr/src/main/java/com/jme3/util/VRViewManager.java
  12. 340
      jme3-vr/src/main/java/com/jme3/util/VRViewManagerOculus.java

@ -2,7 +2,7 @@ if (!hasProperty('mainClass')) {
ext.mainClass = ''
}
def lwjglVersion = '3.0.0'
def lwjglVersion = '3.1.3'
sourceCompatibility = '1.8'
@ -14,4 +14,8 @@ dependencies {
// https://mvnrepository.com/artifact/net.java.dev.jna/jna
compile group: 'net.java.dev.jna', name: 'jna', version: '4.3.0'
compile 'com.nativelibs4java:jnaerator-runtime:0.12'
// Native LibOVR/Oculus support
compile "org.lwjgl:lwjgl-ovr:${lwjglVersion}"
runtime "org.lwjgl:lwjgl-ovr:${lwjglVersion}:natives-windows"
}

@ -34,6 +34,8 @@ package com.jme3.app;
import com.jme3.app.Application;
import com.jme3.app.state.AbstractAppState;
import com.jme3.app.state.AppStateManager;
import com.jme3.input.vr.OculusVR;
import com.jme3.input.vr.OpenVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.input.vr.VRInputAPI;
import com.jme3.math.ColorRGBA;
@ -399,7 +401,7 @@ public class VRAppState extends AbstractAppState {
//FIXME: check if this code is necessary.
// Updates scene and gui states.
Iterator<Spatial> spatialIter = application.getViewPort().getScenes().iterator();
Iterator<Spatial> spatialIter = getLeftViewPort().getScenes().iterator();
Spatial spatial = null;
while(spatialIter.hasNext()){
spatial = spatialIter.next();
@ -419,8 +421,11 @@ public class VRAppState extends AbstractAppState {
}
// use the analog control on the first tracked controller to push around the mouse
// FIXME crashes on Rift/Touch (and probably OSVR), as it assumes the presence of the Vive touchpads
if(getVRHardware() instanceof OpenVR) {
environment.getVRMouseManager().updateAnalogAsMouse(0, null, null, null, tpf);
}
}
@Override
public void postRender() {
@ -432,6 +437,16 @@ public class VRAppState extends AbstractAppState {
}
}
@Override
public void render(RenderManager rm) {
super.render(rm);
// update compositor
if( environment.getVRViewManager() != null ) {
environment.getVRViewManager().render();
}
}
@Override
public void initialize(AppStateManager stateManager, Application app) {
super.initialize(stateManager, app);
@ -598,8 +613,12 @@ public class VRAppState extends AbstractAppState {
settings.setFrequency(environment.getVRHardware().getDisplayFrequency());
settings.setFullscreen(false);
settings.setVSync(false); // stop vsyncing on primary monitor!
// TODO: Is this preventing desktop display on _ALL_ HMDs?
if(!(getVRHardware() instanceof OculusVR)) {
settings.setSwapBuffers(environment.isSwapBuffers());
}
}
// Updating application settings
stateManager.getApplication().setSettings(settings);

@ -142,6 +142,13 @@ public class VRConstants {
*/
public static final int SETTING_VRAPI_OPENVR_LWJGL_VALUE = 3;
/**
* The identifier of the LibOVR (Oculus) system.
*
* @see #SETTING_VRAPI
*/
public static final int SETTING_VRAPI_OCULUSVR_VALUE = 4;
}

@ -7,6 +7,7 @@ import java.util.logging.Logger;
import com.jme3.app.state.AppState;
import com.jme3.input.vr.OSVR;
import com.jme3.input.vr.OpenVR;
import com.jme3.input.vr.OculusVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.input.vr.VRBounds;
import com.jme3.input.vr.VRInputAPI;
@ -18,6 +19,7 @@ import com.jme3.util.VRGuiManager;
import com.jme3.util.VRMouseManager;
import com.jme3.util.VRViewManager;
import com.jme3.util.VRViewManagerOSVR;
import com.jme3.util.VRViewManagerOculus;
import com.jme3.util.VRViewManagerOpenVR;
public class VREnvironment {
@ -388,6 +390,8 @@ public class VREnvironment {
viewmanager = new VRViewManagerOpenVR(this);
} else if (vrBinding == VRConstants.SETTING_VRAPI_OSVR_VALUE){
viewmanager = new VRViewManagerOSVR(this);
} else if (vrBinding == VRConstants.SETTING_VRAPI_OCULUSVR_VALUE) {
viewmanager = new VRViewManagerOculus(this);
} else {
logger.severe("Cannot instanciate view manager, unknown VRAPI type: "+vrBinding);
}
@ -419,6 +423,10 @@ public class VREnvironment {
hardware = new OpenVR(this);
initialized = true;
logger.config("Creating OpenVR wrapper [SUCCESS]");
} else if (vrBinding == VRConstants.SETTING_VRAPI_OCULUSVR_VALUE) {
hardware = new OculusVR(this);
initialized = true;
logger.config("Creating LibOVR wrapper [SUCCESS]");
} else {
logger.config("Cannot create VR binding: "+vrBinding+" [FAILED]");
logger.log(Level.SEVERE, "Cannot initialize VR environment [FAILED]");

@ -0,0 +1,661 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.input.vr;
import com.jme3.app.VREnvironment;
import com.jme3.math.*;
import com.jme3.renderer.Camera;
import com.jme3.texture.*;
import org.lwjgl.*;
import org.lwjgl.ovr.*;
import java.nio.IntBuffer;
import java.util.logging.Logger;
import static org.lwjgl.BufferUtils.createPointerBuffer;
import static org.lwjgl.ovr.OVR.*;
import static org.lwjgl.ovr.OVRErrorCode.ovrSuccess;
import static org.lwjgl.ovr.OVRUtil.ovr_Detect;
import static org.lwjgl.system.MemoryUtil.*;
/**
* Oculus VR (LibOVR 1.3.0) Native support.
* <p>
* A few notes about the Oculus coordinate system:
* <ul>
* <li>Matrices should be transposed</li>
* <li>Quaternions should be inverted<li/>
* <li>Vectors should have their X and Z axes flipped, but apparently not Y.</li>
* </ul>
*
* @author Campbell Suter <znix@znix.xyz>
*/
public class OculusVR implements VRAPI {
private static final Logger LOGGER = Logger.getLogger(OculusVR.class.getName());
private final VREnvironment environment;
private boolean initialized;
/**
* Pointer to the HMD object
*/
private long session;
/**
* Information about the VR session (should the app quit, is
* it visible or is the universal menu open, etc)
*/
private OVRSessionStatus sessionStatus;
/**
* HMD information, such as product name and manufacturer.
*/
private OVRHmdDesc hmdDesc;
/**
* The horizontal resolution of the HMD
*/
private int resolutionW;
/**
* The vertical resolution of the HMD
*/
private int resolutionH;
/**
* Field-of-view data for each eye (how many degrees from the
* center can the user see).
*/
private final OVRFovPort fovPorts[] = new OVRFovPort[2];
/**
* Data about each eye to be rendered - in particular, the
* offset from the center of the HMD to the eye.
*/
private final OVREyeRenderDesc eyeRenderDesc[] = new OVREyeRenderDesc[2];
/**
* Store the projections for each eye, so we don't have to malloc
* and recalculate them each frame.
*/
private final OVRMatrix4f[] projections = new OVRMatrix4f[2];
/**
* Store the poses for each eye, relative to the HMD.
*
* @see #getHMDMatrixPoseLeftEye()
*/
private final Matrix4f[] hmdRelativeEyePoses = new Matrix4f[2];
/**
* Store the positions for each eye, relative to the HMD.
*
* @see #getHMDVectorPoseLeftEye()
*/
private final Vector3f[] hmdRelativeEyePositions = new Vector3f[2];
/**
* The current state of the tracked components (HMD, touch)
*/
private OVRTrackingState trackingState;
/**
* The position and orientation of the user's head.
*/
private OVRPosef headPose;
/**
* The state of the Touch controllers.
*/
private OculusVRInput input;
// The size of the texture drawn onto the HMD
private int textureW;
private int textureH;
// Layers to render into
private PointerBuffer layers;
private OVRLayerEyeFov layer0;
/**
* Chain texture set thing.
*/
private long chains[];
/**
* Frame buffers we can draw into.
*/
private FrameBuffer framebuffers[][];
public OculusVR(VREnvironment environment) {
this.environment = environment;
}
@Override
public OculusVRInput getVRinput() {
return input;
}
@Override
public String getName() {
return "OVR";
}
@Override
public int getDisplayFrequency() {
// TODO find correct frequency. I'm not sure
// if LibOVR has a way to do that, though.
return 60;
}
@Override
public boolean initialize() {
// Check to make sure the HMD is connected
OVRDetectResult detect = OVRDetectResult.calloc();
ovr_Detect(0, detect);
boolean connected = detect.IsOculusHMDConnected();
LOGGER.config("OVRDetectResult.IsOculusHMDConnected = " + connected);
LOGGER.config("OVRDetectResult.IsOculusServiceRunning = " + detect.IsOculusServiceRunning());
detect.free();
if (!connected) {
LOGGER.info("Oculus Rift not connected");
return false;
}
initialized = true;
// Set up the HMD
OVRLogCallback callback = new OVRLogCallback() {
@Override
public void invoke(long userData, int level, long message) {
LOGGER.fine("LibOVR [" + userData + "] [" + level + "] " + memASCII(message));
}
};
OVRInitParams initParams = OVRInitParams.calloc();
initParams.LogCallback(callback);
if (ovr_Initialize(initParams) != ovrSuccess) {
LOGGER.severe("LibOVR Init Failed");
return false; // TODO fix memory leak - destroy() is not called
}
LOGGER.config("LibOVR Version " + ovr_GetVersionString());
initParams.free();
// Get access to the HMD
LOGGER.info("Initialize HMD Session");
PointerBuffer pHmd = memAllocPointer(1);
OVRGraphicsLuid luid = OVRGraphicsLuid.calloc();
if (ovr_Create(pHmd, luid) != ovrSuccess) {
LOGGER.severe("Failed to create HMD");
return false; // TODO fix memory leak - destroy() is not called
}
session = pHmd.get(0);
memFree(pHmd);
luid.free();
sessionStatus = OVRSessionStatus.calloc();
// Get the information about the HMD
LOGGER.fine("Get HMD properties");
hmdDesc = OVRHmdDesc.malloc();
ovr_GetHmdDesc(session, hmdDesc);
if (hmdDesc.Type() == ovrHmd_None) {
LOGGER.warning("No HMD connected");
return false; // TODO fix memory leak - destroy() is not called
}
resolutionW = hmdDesc.Resolution().w();
resolutionH = hmdDesc.Resolution().h();
LOGGER.config("HMD Properties: "
+ "\t Manufacturer: " + hmdDesc.ManufacturerString()
+ "\t Product: " + hmdDesc.ProductNameString()
+ "\t Serial: <hidden>" // + hmdDesc.SerialNumberString() // Hidden for privacy reasons
+ "\t Type: " + hmdDesc.Type()
+ "\t Resolution (total): " + resolutionW + "," + resolutionH);
if (resolutionW == 0) {
LOGGER.severe("HMD witdth=0 : aborting");
return false; // TODO fix memory leak - destroy() is not called
}
// Find the FOV for each eye
for (int eye = 0; eye < 2; eye++) {
fovPorts[eye] = hmdDesc.DefaultEyeFov(eye);
}
// Get the pose for each eye, and cache it for later.
for (int eye = 0; eye < 2; eye++) {
// Create the projection objects
projections[eye] = OVRMatrix4f.malloc();
hmdRelativeEyePoses[eye] = new Matrix4f();
hmdRelativeEyePositions[eye] = new Vector3f();
// Find the eye render information - we use this in the
// view manager for giving LibOVR it's timewarp information.
eyeRenderDesc[eye] = OVREyeRenderDesc.malloc();
ovr_GetRenderDesc(session, eye, fovPorts[eye], eyeRenderDesc[eye]);
// Get the pose of the eye
OVRPosef pose = eyeRenderDesc[eye].HmdToEyePose();
// Get the position and rotation of the eye
vecO2J(pose.Position(), hmdRelativeEyePositions[eye]);
Quaternion rotation = quatO2J(pose.Orientation(), new Quaternion());
// Put it into a matrix for the get eye pose functions
hmdRelativeEyePoses[eye].loadIdentity();
hmdRelativeEyePoses[eye].setTranslation(hmdRelativeEyePositions[eye]);
hmdRelativeEyePoses[eye].setRotationQuaternion(rotation);
}
// Recenter the HMD. The game itself should do this too, but just in case / before they do.
reset();
// Do this so others relying on our texture size (the GUI in particular) get it correct.
findHMDTextureSize();
// Allocate the memory for the tracking state - we actually
// set it up later, but Input uses it so calloc it now.
trackingState = OVRTrackingState.calloc();
// Set up the input
input = new OculusVRInput(this, session, sessionStatus, trackingState);
// TODO find some way to get in ovrTrackingOrigin_FloorLevel
// throw new UnsupportedOperationException("Not yet implemented!");
return true;
}
@Override
public void updatePose() {
double ftiming = ovr_GetPredictedDisplayTime(session, 0);
ovr_GetTrackingState(session, ftiming, true, trackingState);
ovr_GetSessionStatus(session, sessionStatus);
input.updateControllerStates();
headPose = trackingState.HeadPose().ThePose();
}
@Override
public boolean isInitialized() {
return initialized;
}
@Override
public void destroy() {
// fovPorts: contents are managed by LibOVR, no need to do anything.
// Clean up the input
input.dispose();
// Check if we've set up rendering - if so, clean that up.
if (chains != null) {
// Destroy our set of huge buffer images.
for (long chain : chains) {
ovr_DestroyTextureSwapChain(session, chain);
}
// Free up the layer
layer0.free();
// The layers array apparently takes care of itself (and crashes if we try to free it)
}
for (OVREyeRenderDesc eye : eyeRenderDesc) {
eye.free();
}
for (OVRMatrix4f projection : projections) {
projection.free();
}
hmdDesc.free();
trackingState.free();
sessionStatus.free();
// Wrap everything up
ovr_Destroy(session);
ovr_Shutdown();
}
@Override
public void reset() {
// Reset the coordinate system - where the user's head is now is facing forwards from [0,0,0]
ovr_RecenterTrackingOrigin(session);
}
@Override
public void getRenderSize(Vector2f store) {
if (!isInitialized()) {
throw new IllegalStateException("Cannot call getRenderSize() before initialized!");
}
store.x = textureW;
store.y = textureH;
}
@Override
public float getInterpupillaryDistance() {
return 0.065f; // TODO
}
@Override
public Quaternion getOrientation() {
return quatO2J(headPose.Orientation(), new Quaternion());
}
@Override
public Vector3f getPosition() {
return vecO2J(headPose.Position(), new Vector3f());
}
@Override
public void getPositionAndOrientation(Vector3f storePos, Quaternion storeRot) {
storePos.set(getPosition());
storeRot.set(getOrientation());
}
private Matrix4f calculateProjection(int eye, Camera cam) {
Matrix4f mat = new Matrix4f();
// Get LibOVR to find the correct projection
OVRUtil.ovrMatrix4f_Projection(fovPorts[eye], cam.getFrustumNear(), cam.getFrustumFar(), OVRUtil.ovrProjection_None, projections[eye]);
matrixO2J(projections[eye], mat);
return mat;
}
@Override
public Matrix4f getHMDMatrixProjectionLeftEye(Camera cam) {
return calculateProjection(ovrEye_Left, cam);
}
@Override
public Matrix4f getHMDMatrixProjectionRightEye(Camera cam) {
return calculateProjection(ovrEye_Right, cam);
}
@Override
public Vector3f getHMDVectorPoseLeftEye() {
return hmdRelativeEyePositions[ovrEye_Left];
}
@Override
public Vector3f getHMDVectorPoseRightEye() {
return hmdRelativeEyePositions[ovrEye_Right];
}
@Override
public Vector3f getSeatedToAbsolutePosition() {
throw new UnsupportedOperationException();
}
@Override
public Matrix4f getHMDMatrixPoseLeftEye() {
return hmdRelativeEyePoses[ovrEye_Left];
}
@Override
public Matrix4f getHMDMatrixPoseRightEye() {
return hmdRelativeEyePoses[ovrEye_Left];
}
@Override
public HmdType getType() {
return HmdType.OCULUS_RIFT;
}
public boolean initVRCompositor(boolean set) {
if (!set) {
throw new UnsupportedOperationException("Cannot use LibOVR without compositor!");
}
setupLayers();
framebuffers = new FrameBuffer[2][];
for (int eye = 0; eye < 2; eye++)
setupFramebuffers(eye);
// TODO move initialization code here from VRViewManagerOculus
return true;
}
public void printLatencyInfoToConsole(boolean set) {
throw new UnsupportedOperationException("Not yet implemented!");
}
public void setFlipEyes(boolean set) {
throw new UnsupportedOperationException("Not yet implemented!");
}
public Void getCompositor() {
throw new UnsupportedOperationException("Not yet implemented!");
}
public Void getVRSystem() {
throw new UnsupportedOperationException("Not yet implemented!");
}
// Rendering-type stuff
public void findHMDTextureSize() {
// Texture sizes
float pixelScaling = 1.0f; // pixelsPerDisplayPixel
OVRSizei leftTextureSize = OVRSizei.malloc();
ovr_GetFovTextureSize(session, ovrEye_Left, fovPorts[ovrEye_Left], pixelScaling, leftTextureSize);
OVRSizei rightTextureSize = OVRSizei.malloc();
ovr_GetFovTextureSize(session, ovrEye_Right, fovPorts[ovrEye_Right], pixelScaling, rightTextureSize);
if (leftTextureSize.w() != rightTextureSize.w()) {
throw new IllegalStateException("Texture sizes do not match [horizontal]");
}
if (leftTextureSize.h() != rightTextureSize.h()) {
throw new IllegalStateException("Texture sizes do not match [vertical]");
}
textureW = leftTextureSize.w();
textureH = leftTextureSize.h();
leftTextureSize.free();
rightTextureSize.free();
}
private long setupTextureChain() {
// Set up the information for the texture buffer chain thing
OVRTextureSwapChainDesc swapChainDesc = OVRTextureSwapChainDesc.calloc()
.Type(ovrTexture_2D)
.ArraySize(1)
.Format(OVR_FORMAT_R8G8B8A8_UNORM_SRGB)
.Width(textureW)
.Height(textureH)
.MipLevels(1)
.SampleCount(1)
.StaticImage(false); // ovrFalse
// Create the chain
PointerBuffer textureSetPB = createPointerBuffer(1);
if (OVRGL.ovr_CreateTextureSwapChainGL(session, swapChainDesc, textureSetPB) != ovrSuccess) {
throw new RuntimeException("Failed to create Swap Texture Set");
}
swapChainDesc.free();
return textureSetPB.get(); // TODO is this a memory leak?
}
public void setupLayers() {
//Layers
layer0 = OVRLayerEyeFov.calloc();
layer0.Header().Type(ovrLayerType_EyeFov);
layer0.Header().Flags(ovrLayerFlag_TextureOriginAtBottomLeft);
chains = new long[2];
for (int eye = 0; eye < 2; eye++) {
long eyeChain = setupTextureChain();
chains[eye] = eyeChain;
OVRRecti viewport = OVRRecti.calloc();
viewport.Pos().x(0);
viewport.Pos().y(0);
viewport.Size().w(textureW);
viewport.Size().h(textureH);
layer0.ColorTexture(eye, eyeChain);
layer0.Viewport(eye, viewport);
layer0.Fov(eye, fovPorts[eye]);
viewport.free();
// we update pose only when we have it in the render loop
}
layers = createPointerBuffer(1);
layers.put(0, layer0);
}
/**
* Create a framebuffer for an eye.
*/
public void setupFramebuffers(int eye) {
// Find the chain length
IntBuffer length = BufferUtils.createIntBuffer(1);
ovr_GetTextureSwapChainLength(session, chains[eye], length);
int chainLength = length.get();
LOGGER.fine("HMD Eye #" + eye + " texture chain length: " + chainLength);
// Create the frame buffers
framebuffers[eye] = new FrameBuffer[chainLength];
for (int i = 0; i < chainLength; i++) {
// find the GL texture ID for this texture
IntBuffer textureIdB = BufferUtils.createIntBuffer(1);
OVRGL.ovr_GetTextureSwapChainBufferGL(session, chains[eye], i, textureIdB);
int textureId = textureIdB.get();
// TODO less hacky way of getting our texture into JMonkeyEngine
Image img = new Image();
img.setId(textureId);
img.setFormat(Image.Format.RGBA8);
img.setWidth(textureW);
img.setHeight(textureH);
Texture2D tex = new Texture2D(img);
FrameBuffer buffer = new FrameBuffer(textureW, textureH, 1);
buffer.setDepthBuffer(Image.Format.Depth);
buffer.setColorTexture(tex);
framebuffers[eye][i] = buffer;
}
}
// UTILITIES
// TODO move to helper class
/**
* Copy the values from a LibOVR matrix into a jMonkeyEngine matrix.
*
* @param from The matrix to copy from.
* @param to The matrix to copy to.
* @return The {@code to} argument.
*/
public static Matrix4f matrixO2J(OVRMatrix4f from, Matrix4f to) {
to.loadIdentity(); // For the additional columns (unless I'm badly misunderstanding matricies)
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
float val = from.M(x + y * 4); // TODO verify this
to.set(x, y, val);
}
}
to.transposeLocal(); // jME vs LibOVR coordinate spaces - Yay!
return to;
}
/**
* Copy the values from a LibOVR quaternion into a jMonkeyEngine quaternion.
*
* @param from The quaternion to copy from.
* @param to The quaternion to copy to.
* @return The {@code to} argument.
*/
public static Quaternion quatO2J(OVRQuatf from, Quaternion to) {
// jME and LibOVR do their coordinate spaces differently for rotations, so flip Y and W (thanks, jMonkeyVR).
to.set(
from.x(),
-from.y(),
from.z(),
-from.w()
);
to.normalizeLocal();
return to;
}
/**
* Copy the values from a LibOVR vector into a jMonkeyEngine vector.
*
* @param from The vector to copy from.
* @param to The vector to copy to.
* @return The {@code to} argument.
*/
public static Vector3f vecO2J(OVRVector3f from, Vector3f to) {
// jME and LibOVR disagree on which way X and Z is, too.
to.set(
-from.x(),
from.y(),
-from.z()
);
return to;
}
// Getters, intended for VRViewManager.
public long getSessionPointer() {
return session;
}
public long getChain(int eye) {
return chains[eye];
}
public FrameBuffer[] getFramebuffers(int eye) {
return framebuffers[eye];
}
public PointerBuffer getLayers() {
return layers;
}
public OVRLayerEyeFov getLayer0() {
return layer0;
}
public OVRFovPort getFovPort() {
return fovPorts[ovrEye_Left]; // TODO checking the left and right eyes match
}
public OVRPosef getHeadPose() {
return headPose;
}
public OVRPosef getEyePose(int eye) {
return eyeRenderDesc[eye].HmdToEyePose();
}
public VREnvironment getEnvironment() {
return environment;
}
}
/* vim: set ts=4 softtabstop=0 sw=4 expandtab: */

@ -0,0 +1,367 @@
package com.jme3.input.vr;
import com.jme3.app.VREnvironment;
import com.jme3.math.*;
import com.jme3.renderer.Camera;
import com.jme3.scene.Spatial;
import com.jme3.util.VRViewManagerOculus;
import org.lwjgl.ovr.*;
import static org.lwjgl.ovr.OVR.*;
public class OculusVRInput implements VRInputAPI {
// State control
private final OVRInputState inputState;
private final OVRSessionStatus sessionStatus;
private final OVRTrackingState trackingState;
private final OculusVR hardware;
private long session;
// Setup values
private float axisMultiplier = 1;
// Cached stuff
private int buttons, touch;
// Used to calculate sinceLastCall stuff
private int lastButtons, lastTouch;
private final Vector2f[][] lastAxises;
/**
* The state data (linear and angular velocity and acceleration) for each hand
*/
private OVRPoseStatef[] handStates;
/**
* The position and orientation of the Touch controllers.
*/
private OVRPosef[] handPoses;
/**
* The object forms of the tracked controllers.
*/
private final OculusController[] controllers = {
new OculusController(0),
new OculusController(1)
};
public OculusVRInput(OculusVR hardware, long session,
OVRSessionStatus sessionStatus, OVRTrackingState trackingState) {
this.hardware = hardware;
this.session = session;
this.sessionStatus = sessionStatus;
this.trackingState = trackingState;
inputState = OVRInputState.calloc();
handStates = new OVRPoseStatef[ovrHand_Count];
handPoses = new OVRPosef[handStates.length];
lastAxises = new Vector2f[handStates.length][3]; // trigger+grab+thumbstick for each hand.
}
public void dispose() {
inputState.free();
session = 0; // Crashing > undefined behaviour if this object is incorrectly accessed again.
}
@Override
public void updateControllerStates() {
// Handle buttons, axies
ovr_GetInputState(session, ovrControllerType_Touch, inputState);
buttons = inputState.Buttons();
touch = inputState.Touches();
// Get the touch controller poses
// TODO what if no touch controllers are available?
for (int hand = 0; hand < handPoses.length; hand++) {
handStates[hand] = trackingState.HandPoses(hand);
handPoses[hand] = handStates[hand].ThePose();
}
}
private Vector3f cv(OVRVector3f in) {
// TODO do we want to reuse vectors rather than making new ones?
// TODO OpenVRInput does this, but it will probably cause some bugs.
return OculusVR.vecO2J(in, new Vector3f()); // This also fixes the coordinate space transform issues.
}
private Vector2f cv(OVRVector2f in) {
// TODO do we want to reuse vectors rather than making new ones?
// TODO OpenVRInput does this, but it will probably cause some bugs.
return new Vector2f(in.x(), in.y());
}
private Quaternion cq(OVRQuatf in) {
// TODO do we want to reuse quaternions rather than making new ones?
// TODO OpenVRInput does this, but it will probably cause some bugs.
return OculusVR.quatO2J(in, new Quaternion()); // This also fixes the coordinate space transform issues.
}
private Vector2f axis(float input) {
// See above comments about reusing vectors
return new Vector2f(input, input);
}
// Tracking (position, rotation, velocity, status)
@Override
public Vector3f getPosition(int index) {
return cv(handPoses[index].Position());
}
@Override
public Vector3f getVelocity(int controllerIndex) {
return cv(handStates[controllerIndex].LinearVelocity());
}
@Override
public Quaternion getOrientation(int index) {
return cq(handPoses[index].Orientation());
}
@Override
public Vector3f getAngularVelocity(int controllerIndex) {
return cv(handStates[controllerIndex].AngularVelocity());
}
@Override
public Quaternion getFinalObserverRotation(int index) {
// Copied from OpenVRInput
VREnvironment env = hardware.getEnvironment();
VRViewManagerOculus vrvm = (VRViewManagerOculus) hardware.getEnvironment().getVRViewManager();
Object obs = env.getObserver();
Quaternion tempq = new Quaternion(); // TODO move to class scope?
if (obs instanceof Camera) {
tempq.set(((Camera) obs).getRotation());
} else {
tempq.set(((Spatial) obs).getWorldRotation());
}
return tempq.multLocal(getOrientation(index));
}
@Override
public Vector3f getFinalObserverPosition(int index) {
// Copied from OpenVRInput
VREnvironment env = hardware.getEnvironment();
VRViewManagerOculus vrvm = (VRViewManagerOculus) hardware.getEnvironment().getVRViewManager();
Object obs = env.getObserver();
Vector3f pos = getPosition(index);
if (obs instanceof Camera) {
((Camera) obs).getRotation().mult(pos, pos);
return pos.addLocal(((Camera) obs).getLocation());
} else {
((Spatial) obs).getWorldRotation().mult(pos, pos);
return pos.addLocal(((Spatial) obs).getWorldTranslation());
}
}
@Override
public boolean isInputDeviceTracking(int index) {
int flags = trackingState.HandStatusFlags(index);
return (flags & ovrStatus_PositionTracked) != 0; // TODO do we require orientation as well?
}
// Input Getters
@Override
public Vector2f getAxis(int controllerIndex, VRInputType forAxis) {
Vector2f result = getAxisRaw(controllerIndex, forAxis);
return result == null ? null : result.multLocal(axisMultiplier);
}
@Override
public Vector2f getAxisRaw(int controllerIndex, VRInputType forAxis) {
switch (forAxis) {
case OculusThumbstickAxis:
return cv(inputState.Thumbstick(controllerIndex));
case OculusTriggerAxis:
return axis(inputState.IndexTrigger(controllerIndex));
case OculusGripAxis:
return axis(inputState.HandTrigger(controllerIndex));
default:
return null;
}
}
@Override
public boolean isButtonDown(int controllerIndex, VRInputType checkButton) {
return isButtonDownForStatus(controllerIndex, checkButton, buttons, touch);
}
public boolean isButtonDownForStatus(int controllerIndex, VRInputType checkButton, int buttons, int touch) {
int buttonMask = (controllerIndex == ovrHand_Left) ? ovrButton_LMask : ovrButton_RMask;
int touchMask = (controllerIndex == ovrHand_Left) ?
(ovrTouch_LButtonMask + ovrTouch_LPoseMask) :
(ovrTouch_RButtonMask + ovrTouch_RPoseMask);
switch (checkButton) {
default:
return false;
case OculusTopButton: // Physical buttons
case OculusBottomButton:
case OculusThumbstickButton:
case OculusMenuButton:
return (buttons & buttonMask & checkButton.getValue()) != 0;
case OculusTopTouch: // Standard capacitive buttons
case OculusBottomTouch:
case OculusThumbstickTouch:
case OculusThumbrestTouch:
case OculusIndexTouch:
case OculusThumbUp: // Calculated/virtual capacitive buttons
case OculusIndexPointing:
return (touch & touchMask & checkButton.getValue()) != 0;
}
}
// Since-last-call stuff
@Override
public void resetInputSinceLastCall() {
lastButtons = 0;
lastTouch = 0;
}
@Override
public boolean wasButtonPressedSinceLastCall(int controllerIndex, VRInputType checkButton) {
boolean wasPressed = isButtonDownForStatus(controllerIndex, checkButton, lastButtons, lastTouch);
lastButtons = buttons;
lastTouch = touch;
return !wasPressed && isButtonDown(controllerIndex, checkButton);
}
@Override
public Vector2f getAxisDeltaSinceLastCall(int controllerIndex, VRInputType forAxis) {
int index;
switch (forAxis) {
case OculusTriggerAxis:
index = 0;
break;
case OculusGripAxis:
index = 1;
break;
case OculusThumbstickAxis:
index = 2;
break;
default:
return null;
}
Vector2f last = lastAxises[controllerIndex][index];
if (last == null) {
last = lastAxises[controllerIndex][index] = new Vector2f();
}
Vector2f current = getAxis(controllerIndex, forAxis);
// TODO could this lead to accuracy problems?
current.subtractLocal(last);
last.addLocal(current);
return current;
}
// Misc
@Override
public boolean init() {
throw new UnsupportedOperationException("Input initialized at creation time");
}
@Override
public void updateConnectedControllers() {
throw new UnsupportedOperationException("Automatically done by LibOVR (I think?)");
}
@Override
public float getAxisMultiplier() {
return axisMultiplier;
}
@Override
public void setAxisMultiplier(float axisMultiplier) {
this.axisMultiplier = axisMultiplier;
}
@Override
public void triggerHapticPulse(int controllerIndex, float seconds) {
// TODO: How do we time so we can turn the feedback off?
}
@Override
public boolean isInputFocused() {
return sessionStatus.IsVisible(); // TODO do we need HmdMounted, or is it counted in IsVisible
}
@Override
public Object getRawControllerState(int index) {
throw new UnsupportedOperationException("Cannot get raw controller state!");
}
@Override
public void swapHands() {
// Do nothing.
// TODO although OSVR and OpenVR if it has more than two controllers both do nothing, shouldn't we be
// TODO throwing an exception or something?
}
@Override
public int getTrackedControllerCount() {
// TODO: Shouldn't we be seeing if the user has the touch controllers first?
return 2;
}
@Override
public VRTrackedController getTrackedController(int index) {
return controllers[index];
}
/**
* The object form representation of a controller.
*/
public class OculusController implements VRTrackedController {
/**
* The ID of the hand to track
*/
private int hand;
public OculusController(int hand) {
this.hand = hand;
}
@Override
public String getControllerName() {
return "Touch"; // TODO
}
@Override
public String getControllerManufacturer() {
return "Oculus"; // TODO
}
@Override
public Vector3f getPosition() {
return OculusVRInput.this.getPosition(hand);
}
@Override
public Quaternion getOrientation() {
return OculusVRInput.this.getOrientation(hand);
}
@Override
public Matrix4f getPose() {
Matrix4f mat = new Matrix4f();
mat.setRotationQuaternion(getOrientation());
mat.setTranslation(getPosition());
return mat;
}
}
}

@ -86,7 +86,8 @@ public interface VRAPI {
public boolean isInitialized();
/**
* Reset the VR system.
* Reset (recenter) the VR system. The current position of the HMD is
* now considered the origin (observer+[0,0,0]).
*/
public void reset();

@ -1,5 +1,7 @@
package com.jme3.input.vr;
import static org.lwjgl.ovr.OVR.*; // For the button constants
/**
* The type of a VR input. This enumeration enables to determine which part of the VR device is involved within input callback.
* @author reden - phr00t - https://github.com/phr00t
@ -26,7 +28,103 @@ public enum VRInputType {
/**
* an HTC vive menu button (about <a href="https://www.vive.com/us/support/category_howto/720435.html">Vive controller</a>).
*/
ViveMenuButton(3);
ViveMenuButton(3),
/**
* The thumbstick on the Oculus Touch controllers.
*
* Unlike the Vive controllers where the touchpad is commonly used
* as a virtual DPad, you should avoid using the thumbstick for purposes
* that do not require analog input.
*/
OculusThumbstickAxis(0),
/**
* The trigger button on the Oculus Touch controllers.
*
* This is the button under the user's index finger, and should not be used to
* pick up objects. See the
* <a href="https://developer.oculus.com/documentation/pcsdk/latest/concepts/dg-input-touch-overview/"
* >Oculus Developer</a> documentation.
*/
OculusTriggerAxis(0),
/**
* The 'grab' button on the Oculus Touch controllers.
*
* This button should only (unless you have a compelling reason otherwise) be used to pick up objects.
*/
OculusGripAxis(0),
/**
* The upper buttons on the Oculus Touch controllers - B on the right controller, and Y on the left.
*/
OculusTopButton(ovrButton_B | ovrButton_Y),
/**
* The lower (not counting menu) buttons on the Oculus Touch
* controllers - A on the right controller, and X on the left.
*/
OculusBottomButton(ovrButton_A | ovrButton_X),
/**
* The 'click' button on the Oculus Touch thumbsticks.
*/
OculusThumbstickButton(ovrButton_LThumb | ovrButton_RThumb),
/**
* The game-usable menu button, under and to the left of the 'X' button on the left controller.
*
* Most games use this to pause - it preferably should be used for at least that purpose, and is
* uncomfortable to rest your thumb on (in games where you suddenly have to pause/open a menu).
*/
OculusMenuButton(ovrButton_Enter),
/**
* The capacitive touch sensors on the top buttons (Y and B) of the Oculus Touch.
*/
OculusTopTouch(ovrTouch_B | ovrTouch_Y),
/**
* The capacitive touch sensors on the lower buttons (X and A) of the Oculus Touch.
*/
OculusBottomTouch(ovrTouch_A | ovrTouch_X),
/**
* The capacitive touch sensors on the thumbsticks of the Oculus Touch.
*/
OculusThumbstickTouch(ovrTouch_LThumb | ovrTouch_RThumb),
/**
* The capacitive touch sensors on the thumbrests of the Oculus Touch - this is a textured pad
* on the Oculus Touch controller next to the ABXY buttons for users to reset their thumbs on.
*
* While it probably goes without saying, only use this for gesture support and do not bind game
* elements to it.
*/
OculusThumbrestTouch(ovrTouch_LThumbRest | ovrTouch_RThumbRest),
/**
* The state of a software calculation based on the capacitive touch sensor values that determine if
* the user has lifted their thumb off the controller, and can be used for gesture support.
*
* This should be used instead of calculating this yourself based on the touch results of all the other
* parts of the controller.
*/
OculusThumbUp(ovrTouch_LThumbUp | ovrTouch_RThumbUp),
/**
* Is the user resting their finger on the trigger of an Oculus Touch controller?
*/
OculusIndexTouch(ovrTouch_LIndexPointing | ovrTouch_RIndexPointing),
/**
* Is the user pointing their finger forwards, as if to press a button?
*
* This is internally calculated from proximity and filtering is applied - it should be used rather
* than !OculusIndexTouch, as it will probably lead to better results.
*/
OculusIndexPointing(ovrTouch_LIndexPointing | ovrTouch_RIndexPointing);
/**
* The value that codes the input type.

@ -129,6 +129,10 @@ public abstract class AbstractVRViewManager implements VRViewManager {
return environment;
}
@Override
public void render() {
}
/**
* Handles moving filters from the main view to each eye
*/

@ -182,6 +182,7 @@ public class VRMouseManager {
}
Vector2f tpDelta;
// TODO option to use Touch joysticks
if( thumbstickMode ) {
tpDelta = environment.getVRinput().getAxis(inputIndex, VRInputType.ViveTrackpadAxis);
} else {

@ -150,6 +150,12 @@ public interface VRViewManager {
*/
public void update(float tpf);
/**
* Set up the scene for rendering.
* This method should be called before any rendering takes place.
*/
public void render();
/**
* Send the rendering result as textures to the two eyes.
* This method should be called after all the rendering operations

@ -0,0 +1,340 @@
/*
* Copyright (c) 2009-2017 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.util;
import com.jme3.app.VREnvironment;
import com.jme3.input.vr.OculusVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.math.*;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Spatial;
import com.jme3.texture.*;
import java.nio.IntBuffer;
import java.util.Iterator;
import java.util.Objects;
import java.util.logging.Logger;
import org.lwjgl.ovr.*;
import static org.lwjgl.ovr.OVR.*;
import static org.lwjgl.ovr.OVRErrorCode.*;
/**
* A rendering system for Oculus's LibOVR API.
*
* @author Campbell Suter <znix@znix.xyz>
*/
public class VRViewManagerOculus extends AbstractVRViewManager {
private static final Logger LOG = Logger.getLogger(VRViewManagerOculus.class.getName());
private final VREnvironment environment;
private final OculusVR hardware;
// Copied from OSVR
//final & temp values for camera calculations
private final Vector3f finalPosition = new Vector3f();
private final Quaternion finalRotation = new Quaternion();
private final Vector3f hmdPos = new Vector3f();
private final Quaternion hmdRot = new Quaternion();
public VRViewManagerOculus(VREnvironment environment) {
this.environment = environment;
VRAPI hardware = environment.getVRHardware();
Objects.requireNonNull(hardware, "Attached VR Hardware cannot be null");
if (!(hardware instanceof OculusVR)) {
throw new IllegalStateException("Cannot use Oculus VR view manager on non-Oculus hardware state!");
}
this.hardware = (OculusVR) hardware;
if (!environment.compositorAllowed()) {
throw new UnsupportedOperationException("Cannot render without compositor on LibOVR");
}
}
@Override
public void initialize() {
setupCamerasAndViews();
if (environment.hasTraditionalGUIOverlay()) {
environment.getVRMouseManager().initialize();
// update the pose to position the gui correctly on start
update(0f);
environment.getVRGUIManager().positionGui();
}
}
private long session() {
return hardware.getSessionPointer();
}
@Override
public void update(float tpf) {
// TODO
hardware.updatePose();
// TODO deduplicate
if (environment == null) {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
// grab the observer
Object obs = environment.getObserver();
Quaternion objRot;
Vector3f objPos;
if (obs instanceof Camera) {
objRot = ((Camera) obs).getRotation();
objPos = ((Camera) obs).getLocation();
} else {
objRot = ((Spatial) obs).getWorldRotation();
objPos = ((Spatial) obs).getWorldTranslation();
}
// update the HMD's position & orientation
hardware.getPositionAndOrientation(hmdPos, hmdRot);
if (obs != null) {
// update hmdPos based on obs rotation
finalRotation.set(objRot);
finalRotation.mult(hmdPos, hmdPos);
finalRotation.multLocal(hmdRot);
}
// Update both eye cameras
finalizeCamera(hardware.getHMDVectorPoseLeftEye(), objPos, leftCamera);
finalizeCamera(hardware.getHMDVectorPoseRightEye(), objPos, rightCamera);
// Update the main camera, so it shows the same basic view the HMD is getting
// TODO: Do this in VRAppState, so it works on all HMDs.
// I only have a Rift, so I can't test it on anything else.
if(!environment.isInstanceRendering()) { // We use the app camera as the left camera here
// TODO: Double up on rendering and use one eye, to reduce GPU load rendering the scene again.
// TODO: Snip at the image to remove the distorted corners from a very high FOV.
finalizeCamera(Vector3f.ZERO, objPos, environment.getApplication().getCamera());
}
if (environment.hasTraditionalGUIOverlay()) {
// update the mouse?
environment.getVRMouseManager().update(tpf);
// update GUI position?
if (environment.getVRGUIManager().wantsReposition || environment.getVRGUIManager().getPositioningMode() != VRGUIPositioningMode.MANUAL) {
environment.getVRGUIManager().positionGuiNow(tpf);
environment.getVRGUIManager().updateGuiQuadGeometricState();
}
}
}
/**
* Place the camera within the scene.
*
* @param eyePos the eye position.
* @param obsPosition the observer position.
* @param cam the camera to place.
*/
private void finalizeCamera(Vector3f eyePos, Vector3f obsPosition, Camera cam) {
finalRotation.mult(eyePos, finalPosition);
finalPosition.addLocal(hmdPos);
if (obsPosition != null) {
finalPosition.addLocal(obsPosition);
}
finalPosition.y += getHeightAdjustment();
cam.setFrame(finalPosition, finalRotation);
}
@Override
public void render() {
// Calculate the render pose (translation/rotation) for each eye.
// LibOVR takes the difference between this and the real position of each eye at display time
// to apply AZW (timewarp).
OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2);
hmdToEyeOffsets.put(0, hardware.getEyePose(ovrEye_Left));
hmdToEyeOffsets.put(1, hardware.getEyePose(ovrEye_Right));
//calculate eye poses
OVRUtil.ovr_CalcEyePoses(hardware.getHeadPose(), hmdToEyeOffsets, hardware.getLayer0().RenderPose());
hmdToEyeOffsets.free();
for (int eye = 0; eye < 2; eye++) {
IntBuffer currentIndexB = BufferUtils.createIntBuffer(1);
ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(eye), currentIndexB);
int index = currentIndexB.get();
// Constantly (each frame) rotating through a series of
// frame buffers, so make sure we write into the correct one.
(eye == ovrEye_Left ? leftViewPort : rightViewPort).setOutputFrameBuffer(hardware.getFramebuffers(eye)[index]);
}
// Now the game will render into the buffers given to us by LibOVR
}
@Override
public void postRender() {
// We're done with our textures now - the game is done drawing into them.
for (int eye = 0; eye < 2; eye++) {
ovr_CommitTextureSwapChain(session(), hardware.getChain(eye));
}
// Send the result to the HMD
int result = ovr_SubmitFrame(session(), 0, null, hardware.getLayers());
if (result != ovrSuccess) {
throw new IllegalStateException("Failed to submit frame!");
}
}
/*
*********************************************************
* Show's over, now it's just boring camera stuff etc. *
*********************************************************
*/
/**
* Set up the cameras and views for each eye and the mirror display.
*/
private void setupCamerasAndViews() {
// TODO: Use LobOVR IPD etc
if (environment != null) {
// get desired frustrum from original camera
Camera origCam = environment.getCamera();
float fFar = origCam.getFrustumFar();
float fNear = origCam.getFrustumNear();
// restore frustrum on distortion scene cam, if needed
if (environment.isInstanceRendering()) {
leftCamera = origCam;
} else {
leftCamera = origCam.clone();
}
OVRFovPort fp = hardware.getFovPort();
float hFov = fp.LeftTan() + fp.RightTan();
float vFov = fp.UpTan() + fp.DownTan();
getLeftCamera().setFrustumPerspective(hFov / FastMath.TWO_PI * 360, vFov / hFov, fNear, fFar);
prepareCameraSize(getLeftCamera(), 1f);
if (environment.getVRHardware() != null) {
getLeftCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionLeftEye(getLeftCamera()));
}
//org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_FRAMEBUFFER_SRGB);
if (!environment.isInstanceRendering()) {
leftViewPort = setupViewBuffers(getLeftCamera(), LEFT_VIEW_NAME);
rightCamera = getLeftCamera().clone();
if (environment.getVRHardware() != null) {
getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera()));
}
rightViewPort = setupViewBuffers(getRightCamera(), RIGHT_VIEW_NAME);
} else if (environment.getApplication() != null) {
throw new UnsupportedOperationException("Not yet implemented!");
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
// setup gui
environment.getVRGUIManager().setupGui(getLeftCamera(), getRightCamera(), getLeftViewPort(), getRightViewPort());
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private void prepareCameraSize(Camera cam, float xMult) {
// TODO this function is identical to that in VRViewManagerOpenVR; merge the two.
if (environment != null) {
if (environment.getApplication() != null) {
Vector2f size = new Vector2f();
VRAPI vrhmd = environment.getVRHardware();
if (vrhmd == null) {
size.x = 1280f;
size.y = 720f;
} else {
vrhmd.getRenderSize(size);
}
if (size.x < environment.getApplication().getContext().getSettings().getWidth()) {
size.x = environment.getApplication().getContext().getSettings().getWidth();
}
if (size.y < environment.getApplication().getContext().getSettings().getHeight()) {
size.y = environment.getApplication().getContext().getSettings().getHeight();
}
if (environment.isInstanceRendering()) {
size.x *= 2f;
}
// other adjustments
size.x *= xMult;
size.x *= getResolutionMuliplier();
size.y *= getResolutionMuliplier();
if (cam.getWidth() != size.x || cam.getHeight() != size.y) {
cam.resize((int) size.x, (int) size.y, false);
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private ViewPort setupViewBuffers(Camera cam, String viewName) {
// TODO this function is identical to that in VRViewManagerOpenVR; merge the two.
if (environment != null) {
if (environment.getApplication() != null) {
ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while (spatialIter.hasNext()) {
viewPort.attachScene(spatialIter.next());
}
// The viewbuffer to render into will be set during prerender.
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
}
Loading…
Cancel
Save