Merge pull request #618 from jseinturier/jme3-vr-dev

Deprecated VRApplication. Now VRAppState should be used.
fix-456
empirephoenix 8 years ago committed by GitHub
commit ed4e614722
  1. 552
      jme3-vr/src/main/java/com/jme3/app/VRAppState.java
  2. 35
      jme3-vr/src/main/java/com/jme3/app/VRApplication.java
  3. 485
      jme3-vr/src/main/java/com/jme3/app/VREnvironment.java
  4. 53
      jme3-vr/src/main/java/com/jme3/input/vr/OSVR.java
  5. 47
      jme3-vr/src/main/java/com/jme3/input/vr/OSVRInput.java
  6. 37
      jme3-vr/src/main/java/com/jme3/input/vr/OpenVR.java
  7. 191
      jme3-vr/src/main/java/com/jme3/input/vr/OpenVRInput.java
  8. 13
      jme3-vr/src/main/java/com/jme3/input/vr/VRAPI.java
  9. 7
      jme3-vr/src/main/java/com/jme3/input/vr/VRInputAPI.java
  10. 2
      jme3-vr/src/main/java/com/jme3/post/FilterUtil.java
  11. 2
      jme3-vr/src/main/java/com/jme3/scene/CenterQuad.java
  12. 8
      jme3-vr/src/main/java/com/jme3/shadow/AbstractShadowRendererVR.java
  13. 8
      jme3-vr/src/main/java/com/jme3/shadow/DirectionalLightShadowRendererVR.java
  14. 41
      jme3-vr/src/main/java/com/jme3/shadow/InstancedDirectionalShadowFilter.java
  15. 2
      jme3-vr/src/main/java/com/jme3/system/lwjgl/LwjglWindowVR.java
  16. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientkit/OsvrClientKitLibrary.java
  17. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_AccelerationReport.java
  18. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_AccelerationState.java
  19. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_AnalogReport.java
  20. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_AngularAccelerationReport.java
  21. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_AngularVelocityReport.java
  22. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_ButtonReport.java
  23. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_DirectionReport.java
  24. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_EyeTracker2DReport.java
  25. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_EyeTracker3DReport.java
  26. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_EyeTracker3DState.java
  27. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_EyeTrackerBlinkReport.java
  28. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_IncrementalQuaternion.java
  29. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_LinearAccelerationReport.java
  30. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_LinearVelocityReport.java
  31. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_Location2DReport.java
  32. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_NaviPositionReport.java
  33. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_NaviVelocityReport.java
  34. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_OrientationReport.java
  35. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_Pose3.java
  36. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_PoseReport.java
  37. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_PositionReport.java
  38. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_Quaternion.java
  39. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_Vec2.java
  40. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_Vec3.java
  41. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_VelocityReport.java
  42. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OSVR_VelocityState.java
  43. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrclientreporttypes/OsvrClientReportTypesLibrary.java
  44. 5
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrdisplay/OsvrDisplayLibrary.java
  45. 8
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrinterface/OsvrInterfaceLibrary.java
  46. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrmatrixconventions/OSVR_Pose3.java
  47. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrmatrixconventions/OSVR_Quaternion.java
  48. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrmatrixconventions/OSVR_Vec3.java
  49. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrmatrixconventions/OsvrMatrixConventionsLibrary.java
  50. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanager/OSVR_ProjectionMatrix.java
  51. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanager/OSVR_RGB.java
  52. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanager/OSVR_RenderParams.java
  53. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanager/OSVR_ViewportDescription.java
  54. 22
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanager/OsvrRenderManagerLibrary.java
  55. 6
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_GraphicsLibraryOpenGL.java
  56. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_OpenGLContextParams.java
  57. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_OpenGLToolkitFunctions.java
  58. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_OpenResultsOpenGL.java
  59. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_ProjectionMatrix.java
  60. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_RGB.java
  61. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_RenderBufferOpenGL.java
  62. 4
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_RenderInfoOpenGL.java
  63. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_RenderParams.java
  64. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OSVR_ViewportDescription.java
  65. 34
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrrendermanageropengl/OsvrRenderManagerOpenGLLibrary.java
  66. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrtimevalue/OSVR_TimeValue.java
  67. 2
      jme3-vr/src/main/java/com/jme3/system/osvr/osvrtimevalue/OsvrTimeValueLibrary.java
  68. 223
      jme3-vr/src/main/java/com/jme3/util/AbstractVRViewManager.java
  69. 15
      jme3-vr/src/main/java/com/jme3/util/VRGUIPositioningMode.java
  70. 474
      jme3-vr/src/main/java/com/jme3/util/VRGuiManager.java
  71. 334
      jme3-vr/src/main/java/com/jme3/util/VRMouseManager.java
  72. 7
      jme3-vr/src/main/java/com/jme3/util/VRUtil.java
  73. 156
      jme3-vr/src/main/java/com/jme3/util/VRViewManager.java
  74. 957
      jme3-vr/src/main/java/com/jme3/util/VRViewManagerOSVR.java
  75. 732
      jme3-vr/src/main/java/com/jme3/util/VRViewManagerOpenVR.java
  76. 107
      jme3-vr/src/main/java/jmevr/util/MeshUtil.java
  77. 334
      jme3-vr/src/main/java/jmevr/util/VRGuiManager.java
  78. 234
      jme3-vr/src/main/java/jmevr/util/VRMouseManager.java
  79. 863
      jme3-vr/src/main/java/jmevr/util/VRViewManager.java

@ -1,23 +1,53 @@
package com.jme3.app;
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.jme3.app.Application;
import com.jme3.app.state.AbstractAppState;
import com.jme3.app.state.AppStateManager;
import com.jme3.input.vr.OSVR;
import com.jme3.input.vr.OpenVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.input.vr.VRInputAPI;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector3f;
import com.jme3.post.PreNormalCaching;
import com.jme3.renderer.Camera;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Spatial;
import com.jme3.system.AppSettings;
import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.util.VRGUIPositioningMode;
import com.jme3.util.VRGuiManager;
import com.jme3.util.VRMouseManager;
import com.jme3.util.VRViewManager;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
@ -30,11 +60,6 @@ import java.util.Locale;
import java.util.logging.Level;
import java.util.logging.Logger;
import jmevr.util.VRGuiManager;
import jmevr.util.VRMouseManager;
import jmevr.util.VRViewManager;
import jmevr.util.VRGuiManager.POSITIONING_MODE;
/**
* A JMonkey app state dedicated to Virtual Reality.
* An application that want to use VR devices (HTC vive, ...) has to use this app state.<br>
@ -53,50 +78,20 @@ public class VRAppState extends AbstractAppState {
private static final Logger logger = Logger.getLogger(VRAppState.class.getName());
/**
* The underlying system VR API. By default set to {@link VRConstants#SETTING_VRAPI_OPENVR_VALUE}.
*/
public int vrBinding = VRConstants.SETTING_VRAPI_OPENVR_VALUE;
/**
* Is the application has not to start within VR mode (default is <code>false</code>).
*/
public boolean DISABLE_VR = false;
private VRAPI VRhardware = null;
private VRGuiManager guiManager = null;
private VRMouseManager mouseManager = null;
private VRViewManager viewmanager = null;
private String OS;
private Camera dummyCam;
private Spatial observer = null;
private boolean VRSupportedOS;
private boolean forceVR = false;;
private boolean disableSwapBuffers = true;
private boolean disableVR = false;
private boolean seated;
private boolean nogui;
private boolean instanceVR = false;
private float defaultFOV = 108f;
private float defaultAspect = 1f;
private float fFar = 1000f;
private float fNear = 0.1f;
private int xWin = 1920;
private int yWin = 1080;
private float resMult = 1f;
private boolean useCompositor = true;
private boolean compositorOS;
/*
where is the headset pointing, after all rotations are combined?
depends on observer rotation, if any
@ -107,32 +102,27 @@ public class VRAppState extends AbstractAppState {
private AppStateManager stateManager = null;
private AppSettings settings = null;
private VREnvironment environment = null;
/**
* Create a new default VR app state.
* Create a new default VR app state that relies on the given {@link VREnvironment VR environment}.
* @param environment the {@link VREnvironment VR environment} that this app state is using.
*/
public VRAppState() {
super();
public VRAppState(VREnvironment environment) {
super();
dummyCam = new Camera();
// Create the GUI manager.
guiManager = new VRGuiManager();
// Create a new view manager.
viewmanager = new VRViewManager();
// Create a new mouse manager.
mouseManager = new VRMouseManager();
this.environment = environment;
this.setSettings(environment.getSettings());
}
/**
* Create a new VR app state with given settings.
* Create a new VR app state with given settings. The app state relies on the the given {@link VREnvironment VR environment}.
* @param settings the settings to use.
* @param environment the {@link VREnvironment VR environment} that this app state is using.
*/
public VRAppState(AppSettings settings){
this();
public VRAppState(AppSettings settings, VREnvironment environment){
this(environment);
this.settings = settings;
processSettings(settings);
}
@ -152,7 +142,7 @@ public class VRAppState extends AbstractAppState {
* @param renderManager the {@link RenderManager render manager}.
*/
public void simpleRender(RenderManager renderManager) {
PreNormalCaching.resetCache(isInVR());
PreNormalCaching.resetCache(environment.isInVR());
}
/**
@ -181,163 +171,18 @@ public class VRAppState extends AbstractAppState {
*/
public void setResolutionMultiplier(float val) {
resMult = val;
if( viewmanager != null ){
viewmanager.setResolutionMultiplier(resMult);
if( environment.getVRViewManager() != null ){
environment.getVRViewManager().setResolutionMultiplier(resMult);
}
}
/**
* Is the VR compositor is active.
* @return <code>true</code> if the VR compositor is active and <code>false</code> otherwise.
*/
public boolean compositorAllowed() {
return useCompositor && compositorOS;
}
/**
* Get if the system currently support VR.
* @return <code>true</code> if the system currently support VR and <code>false</Code> otherwise.
*/
public boolean isVRSupported() {
return VRSupportedOS;
}
/**
* Get the {@link Camera camera} attached to this application state.
* If the VR mode is {@link #isInVR() active}, this method return a dummy camera, otherwise,
* this method return the camera of the attached application.
* @return the camera attached to this application state.
*/
public Camera getCamera() {
if( isInVR() && viewmanager != null && viewmanager.getLeftCamera() != null ) {
return dummyCam;
}
return application.getCamera();
}
/**
* Can be used to change seated experience during runtime.
* @param isSeated <code>true</code> if designed for sitting, <code>false</code> for standing/roomscale
* @see #isSeatedExperience()
*/
public void setSeatedExperience(boolean isSeated) {
seated = isSeated;
if( VRhardware instanceof OpenVR ) {
if( VRhardware.getCompositor() == null ) return;
if( seated ) {
((OpenVR)VRhardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated);
} else {
((OpenVR)VRhardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding);
}
}
}
/**
* Check if the application is configured as a seated experience.
* @return <code>true</code> if the application is configured as a seated experience and <code>false</code> otherwise.
* @see #setSeatedExperience(boolean)
*/
public boolean isSeatedExperience() {
return seated;
}
/**
* Reset headset pose if seating experience.
*/
public void resetSeatedPose(){
if( VRSupportedOS == false || isSeatedExperience() == false ) return;
VRhardware.reset();
}
/**
* Check if the rendering is instanced (see <a href="https://en.wikipedia.org/wiki/Geometry_instancing">Geometry instancing</a>).
* @return <code>true</code> if the rendering is instanced and <code>false</code> otherwise.
*/
public boolean isInstanceVRRendering() {
return instanceVR && isInVR();
}
/**
* Check if the VR mode is enabled.
* @return <code>true</code> if the VR mode is enabled and <code>false</code> otherwise.
*/
public boolean isInVR() {
return DISABLE_VR == false && (forceVR || VRSupportedOS && VRhardware != null && VRhardware.isInitialized());
}
/**
* Get the default Field Of View (FOV) value.
* @return the default Field Of View (FOV) value.
* @see #setDefaultFOV(float)
*/
public float getDefaultFOV() {
return defaultFOV;
}
/**
* Set the default Field Of View (FOV) value.
* @param defaultFOV the default Field Of View (FOV) value.
* @see #getDefaultFOV()
*/
public void setDefaultFOV(float defaultFOV) {
this.defaultFOV = defaultFOV;
}
/**
* Get the default aspect ratio.
* @return the default aspect ratio.
* @see #setDefaultAspect(float)
*/
public float getDefaultAspect() {
return defaultAspect;
}
/**
* Set the default aspect ratio.
* @param defaultAspect the default aspect ratio.
* @see #getDefaultAspect()
*/
public void setDefaultAspect(float defaultAspect) {
this.defaultAspect = defaultAspect;
}
/**
* Move filters from the main scene into the eye's.
* This removes filters from the main scene.
*/
public void moveScreenProcessingToVR() {
if( isInVR() ) {
viewmanager.moveScreenProcessingToEyes();
}
}
/**
* Check if the application has a GUI overlay attached.
* @return <code>true</code> if the application has a GUI overlay attached and <code>false</code> otherwise.
*/
public boolean hasTraditionalGUIOverlay() {
return !nogui;
}
/**
* Get the scene observer. If no observer has been set, this method return the application {@link #getCamera() camera}.
* @return the scene observer.
* @see #setObserver(Spatial)
*/
public Object getObserver() {
if( observer == null ) {
return getCamera();
}
return observer;
}
/**
* Set the scene observer. The VR headset will be linked to it. If no observer is set, the VR headset is linked to the the application {@link #getCamera() camera}.
* @param observer the scene observer.
*/
public void setObserver(Spatial observer) {
this.observer = observer;
environment.getVRViewManager().moveScreenProcessingToEyes();
}
/**
@ -346,17 +191,20 @@ public class VRAppState extends AbstractAppState {
* @see #getFinalObserverPosition()
*/
public Quaternion getFinalObserverRotation() {
if( viewmanager == null ) {
if( observer == null ) {
return getCamera().getRotation();
} else return observer.getWorldRotation();
}
if( observer == null ) {
tempq.set(dummyCam.getRotation());
if( environment.getVRViewManager() == null ) {
if( environment.getObserver() == null ) {
return environment.getCamera().getRotation();
} else {
return ((Spatial)environment.getObserver()).getWorldRotation();
}
}
if( environment.getObserver() == null ) {
tempq.set(environment.getDummyCamera().getRotation());
} else {
tempq.set(observer.getWorldRotation());
tempq.set(((Spatial)environment.getObserver()).getWorldRotation());
}
return tempq.multLocal(VRhardware.getOrientation());
return tempq.multLocal(environment.getVRHardware().getOrientation());
}
/**
@ -365,50 +213,35 @@ public class VRAppState extends AbstractAppState {
* @see #getFinalObserverRotation()
*/
public Vector3f getFinalObserverPosition() {
if( viewmanager == null ) {
if( observer == null ) {
return getCamera().getLocation();
} else return observer.getWorldTranslation();
if( environment.getVRViewManager() == null ) {
if( environment.getObserver() == null ) {
return environment.getCamera().getLocation();
} else{
return ((Spatial)environment.getObserver()).getWorldTranslation();
}
}
Vector3f pos = VRhardware.getPosition();
if( observer == null ) {
dummyCam.getRotation().mult(pos, pos);
return pos.addLocal(dummyCam.getLocation());
Vector3f pos = environment.getVRHardware().getPosition();
if( environment.getObserver() == null ) {
environment.getDummyCamera().getRotation().mult(pos, pos);
return pos.addLocal(environment.getDummyCamera().getLocation());
} else {
observer.getWorldRotation().mult(pos, pos);
return pos.addLocal(observer.getWorldTranslation());
((Spatial)environment.getObserver()).getWorldRotation().mult(pos, pos);
return pos.addLocal(((Spatial)environment.getObserver()).getWorldTranslation());
}
}
/**
* Set the VR headset height from the ground.
* @param amount the VR headset height from the ground.
* @see #getVRHeightAdjustment()
*/
public void setVRHeightAdjustment(float amount) {
if( viewmanager != null ) viewmanager.setHeightAdjustment(amount);
}
/**
* Get the VR headset height from the ground.
* @return the VR headset height from the ground.
* @see #setVRHeightAdjustment(float)
*/
public float getVRHeightAdjustment() {
if( viewmanager != null ){
return viewmanager.getHeightAdjustment();
}
return 0f;
}
/**
* Get the VR headset left viewport.
* @return the VR headset left viewport.
* @see #getRightViewPort()
*/
public ViewPort getLeftViewPort() {
if( viewmanager == null ) return application.getViewPort();
return viewmanager.getLeftViewport();
if( environment.getVRViewManager() == null ){
return application.getViewPort();
}
return environment.getVRViewManager().getLeftViewport();
}
/**
@ -417,8 +250,10 @@ public class VRAppState extends AbstractAppState {
* @see #getLeftViewPort()
*/
public ViewPort getRightViewPort() {
if( viewmanager == null ) return application.getViewPort();
return viewmanager.getRightViewport();
if( environment.getVRViewManager() == null ){
return application.getViewPort();
}
return environment.getVRViewManager().getRightViewport();
}
/**
@ -426,11 +261,15 @@ public class VRAppState extends AbstractAppState {
* @param clr the background color.
*/
public void setBackgroundColors(ColorRGBA clr) {
if( viewmanager == null ) {
if( environment.getVRViewManager() == null ) {
application.getViewPort().setBackgroundColor(clr);
} else if( viewmanager.getLeftViewport() != null ) {
viewmanager.getLeftViewport().setBackgroundColor(clr);
if( viewmanager.getRightViewport() != null ) viewmanager.getRightViewport().setBackgroundColor(clr);
} else if( environment.getVRViewManager().getLeftViewport() != null ) {
environment.getVRViewManager().getLeftViewport().setBackgroundColor(clr);
if( environment.getVRViewManager().getRightViewport() != null ){
environment.getVRViewManager().getRightViewport().setBackgroundColor(clr);
}
}
}
@ -452,12 +291,45 @@ public class VRAppState extends AbstractAppState {
return stateManager;
}
/**
* Get the scene observer. If no observer has been set, this method return the application {@link #getCamera() camera}.
* @return the scene observer.
* @see #setObserver(Spatial)
*/
public Object getObserver() {
return environment.getObserver();
}
/**
* Set the scene observer. The VR headset will be linked to it. If no observer is set, the VR headset is linked to the the application {@link #getCamera() camera}.
* @param observer the scene observer.
*/
public void setObserver(Spatial observer) {
environment.setObserver(observer);
}
/**
* Check if the rendering is instanced (see <a href="https://en.wikipedia.org/wiki/Geometry_instancing">Geometry instancing</a>).
* @return <code>true</code> if the rendering is instanced and <code>false</code> otherwise.
*/
public boolean isInstanceRendering() {
return environment.isInstanceRendering();
}
/**
* Return the {@link VREnvironment VR environment} on which this app state relies.
* @return the {@link VREnvironment VR environment} on which this app state relies.
*/
public VREnvironment getVREnvironment(){
return environment;
}
/**
* Get the VR underlying hardware.
* @return the VR underlying hardware.
*/
public VRAPI getVRHardware() {
return VRhardware;
return getVREnvironment().getVRHardware();
}
/**
@ -465,11 +337,11 @@ public class VRAppState extends AbstractAppState {
* @return the VR dedicated input.
*/
public VRInputAPI getVRinput() {
if( VRhardware == null ){
if( getVREnvironment().getVRHardware() == null ){
return null;
}
return VRhardware.getVRinput();
return getVREnvironment().getVRHardware().getVRinput();
}
/**
@ -477,23 +349,23 @@ public class VRAppState extends AbstractAppState {
* @return the VR view manager.
*/
public VRViewManager getVRViewManager() {
return viewmanager;
return getVREnvironment().getVRViewManager();
}
/**
* Get the GUI manager attached to this application.
* @return the GUI manager attached to this application.
* Get the GUI manager attached to this app state.
* @return the GUI manager attached to this app state.
*/
public VRGuiManager getVRGUIManager(){
return guiManager;
return getVREnvironment().getVRGUIManager();
}
/**
* Get the VR mouse manager attached to this application.
* Get the VR mouse manager attached to this app state.
* @return the VR mouse manager attached to this application.
*/
public VRMouseManager getVRMouseManager(){
return mouseManager;
return getVREnvironment().getVRMouseManager();
}
/**
@ -519,10 +391,10 @@ public class VRAppState extends AbstractAppState {
public void update(float tpf) {
// update VR pose & cameras
if( viewmanager != null ) {
viewmanager.update(tpf);
} else if( observer != null ) {
getCamera().setFrame(observer.getWorldTranslation(), observer.getWorldRotation());
if( environment.getVRViewManager() != null ) {
environment.getVRViewManager().update(tpf);
} else if( environment.getObserver() != null ) {
environment.getCamera().setFrame(((Spatial)environment.getObserver()).getWorldTranslation(), ((Spatial)environment.getObserver()).getWorldRotation());
}
//FIXME: check if this code is necessary.
@ -535,7 +407,7 @@ public class VRAppState extends AbstractAppState {
spatial.updateGeometricState();
}
if( isInVR() == false || guiManager.getPositioningMode() == POSITIONING_MODE.MANUAL ) {
if( environment.isInVR() == false || environment.getVRGUIManager().getPositioningMode() == VRGUIPositioningMode.MANUAL ) {
// only update geometric state here if GUI is in manual mode, or not in VR
// it will get updated automatically in the viewmanager update otherwise
spatialIter = application.getGuiViewPort().getScenes().iterator();
@ -546,17 +418,17 @@ public class VRAppState extends AbstractAppState {
}
}
// use the analog control on the first tracked controller to push around the mouse
getVRMouseManager().updateAnalogAsMouse(0, null, null, null, tpf);
environment.getVRMouseManager().updateAnalogAsMouse(0, null, null, null, tpf);
}
@Override
public void postRender() {
super.postRender();
// update compositor?
if( viewmanager != null ) {
viewmanager.sendTextures();
// update compositor
if( environment.getVRViewManager() != null ) {
environment.getVRViewManager().postRender();
}
}
@ -571,27 +443,21 @@ public class VRAppState extends AbstractAppState {
// for late GUI placement for VR purposes
Logger.getLogger("com.jme3").setLevel(Level.SEVERE);
// VR module attch
guiManager.attach(this, app);
viewmanager.attach(this, app);
mouseManager.attach(this, app);
app.getCamera().setFrustumFar(fFar);
app.getCamera().setFrustumNear(fNear);
dummyCam = app.getCamera().clone();
if( isInVR() ) {
if( environment.isInVR() ) {
logger.config("VR mode enabled.");
if( VRhardware != null ) {
VRhardware.initVRCompositor(compositorAllowed());
if( environment.getVRHardware() != null ) {
environment.getVRHardware().initVRCompositor(environment.compositorAllowed());
} else {
logger.warning("No VR system found.");
}
viewmanager.setResolutionMultiplier(resMult);
environment.getVRViewManager().setResolutionMultiplier(resMult);
//inputManager.addMapping(RESET_HMD, new KeyTrigger(KeyInput.KEY_F9));
//setLostFocusBehavior(LostFocusBehavior.Disabled);
} else {
@ -600,8 +466,8 @@ public class VRAppState extends AbstractAppState {
//guiViewPort.attachScene(guiNode);
}
if( viewmanager != null ) {
viewmanager.initialize();
if( environment.getVRViewManager() != null ) {
environment.getVRViewManager().initialize();
}
}
@ -615,34 +481,21 @@ public class VRAppState extends AbstractAppState {
} else {
logger.config("Using given settings.");
}
// Attach VR environment to the application
if (!environment.isInitialized()){
environment.initialize();
}
// we are going to use OpenVR now, not the Oculus Rift
// OpenVR does support the Rift
OS = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH);
VRSupportedOS = !OS.contains("nux") && System.getProperty("sun.arch.data.model").equalsIgnoreCase("64"); //for the moment, linux/unix causes crashes, 64-bit only
compositorOS = OS.contains("indows");
if( VRSupportedOS && disableVR == false ) {
if( vrBinding == VRConstants.SETTING_VRAPI_OSVR_VALUE ) {
VRhardware = new OSVR(this);
logger.config("Creating OSVR wrapper [SUCCESS]");
} else if( vrBinding == VRConstants.SETTING_VRAPI_OPENVR_VALUE ) {
VRhardware = new OpenVR(this);
logger.config("Creating OpenVR wrapper [SUCCESS]");
} else {
logger.config("Cannot create VR binding: "+vrBinding+" [FAILED]");
}
if( VRhardware.initialize() ) {
logger.config("VR native wrapper initialized [SUCCESS]");
} else {
logger.warning("VR native wrapper initialized [FAILED]");
}
}
if (environment.isInitialized()){
environment.atttach(this, stateManager.getApplication());
} else {
logger.severe("Cannot attach VR environment to the VR app state as its not initialized.");
}
GraphicsDevice defDev = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice();
if( isInVR() && !compositorAllowed() ) {
if( environment.isInVR() && !environment.compositorAllowed() ) {
// "easy extended" mode
// setup experimental JFrame on external device
// first, find the VR device
@ -694,14 +547,14 @@ public class VRAppState extends AbstractAppState {
logger.config("Cannot access to external screen.");
}
} else {
if (!isInVR()){
if (!environment.isInVR()){
logger.config("Cannot switch to VR mode (VR disabled by user).");
} else if (!compositorAllowed()){
} else if (!environment.compositorAllowed()){
logger.warning("Cannot switch to VR mode (VR not supported).");
}
}
if( !isInVR() ) {
if( !environment.isInVR() ) {
//FIXME: Handling GLFW workaround on MacOS
boolean macOs = false;
@ -742,10 +595,10 @@ public class VRAppState extends AbstractAppState {
settings.setHeight(yWin);
settings.setBitsPerPixel(32);
settings.setFrameRate(0);
settings.setFrequency(VRhardware.getDisplayFrequency());
settings.setFrequency(environment.getVRHardware().getDisplayFrequency());
settings.setFullscreen(false);
settings.setVSync(false); // stop vsyncing on primary monitor!
settings.setSwapBuffers(disableSwapBuffers);
settings.setSwapBuffers(environment.isSwapBuffers());
}
// Updating application settings
@ -756,11 +609,9 @@ public class VRAppState extends AbstractAppState {
@Override
public void cleanup() {
if( VRhardware != null ) {
VRhardware.destroy();
VRhardware = null;
if( environment.getVRHardware() != null ) {
environment.getVRHardware().destroy();
}
disableVR = true;
this.application = null;
this.stateManager = null;
@ -777,67 +628,10 @@ public class VRAppState extends AbstractAppState {
*/
protected void processSettings(AppSettings settings){
if (settings != null){
if (settings.get(VRConstants.SETTING_USE_COMPOSITOR) != null){
useCompositor = settings.getBoolean(VRConstants.SETTING_USE_COMPOSITOR);
if( useCompositor == false ){
disableSwapBuffers = false;
}
}
if (settings.get(VRConstants.SETTING_VR_FORCE) != null){
forceVR = settings.getBoolean(VRConstants.SETTING_VR_FORCE);
}
if (settings.get(VRConstants.SETTING_FLIP_EYES) != null){
if( VRhardware != null ){
VRhardware._setFlipEyes(settings.getBoolean(VRConstants.SETTING_FLIP_EYES));
}
}
if (settings.get(VRConstants.SETTING_GUI_OVERDRAW) != null){
guiManager._enableGuiOverdraw(settings.getBoolean(VRConstants.SETTING_GUI_OVERDRAW));
}
if (settings.get(VRConstants.SETTING_GUI_CURVED_SURFACE) != null){
guiManager._enableCurvedSuface(settings.getBoolean(VRConstants.SETTING_GUI_CURVED_SURFACE));
}
if (settings.get(VRConstants.SETTING_ENABLE_MIRROR_WINDOW) != null){
if( useCompositor == false ) {
disableSwapBuffers = false;
} else {
disableSwapBuffers = !settings.getBoolean(VRConstants.SETTING_ENABLE_MIRROR_WINDOW);
}
}
if (settings.get(VRConstants.SETTING_DISABLE_VR) != null){
DISABLE_VR = settings.getBoolean(VRConstants.SETTING_DISABLE_VR);
}
if (settings.get(VRConstants.SETTING_SEATED_EXPERIENCE) != null){
seated = settings.getBoolean(VRConstants.SETTING_SEATED_EXPERIENCE);
}
if (settings.get(VRConstants.SETTING_NO_GUI) != null){
nogui = settings.getBoolean(VRConstants.SETTING_NO_GUI);
}
if (settings.get(VRConstants.SETTING_INSTANCE_RENDERING) != null){
instanceVR = settings.getBoolean(VRConstants.SETTING_INSTANCE_RENDERING);
}
if (settings.get(VRConstants.SETTING_DEFAULT_FOV) != null){
defaultFOV = settings.getFloat(VRConstants.SETTING_DEFAULT_FOV);
}
if (settings.get(VRConstants.SETTING_DEFAULT_ASPECT_RATIO) != null){
defaultAspect = settings.getFloat(VRConstants.SETTING_DEFAULT_ASPECT_RATIO);
}
if (settings.get(VRConstants.SETTING_VRAPI) != null){
vrBinding = settings.getInteger(VRConstants.SETTING_VRAPI);
}
}
}
}

@ -45,10 +45,13 @@ import com.jme3.system.SystemListener;
import com.jme3.system.Timer;
import com.jme3.system.lwjgl.LwjglDisplayVR;
import com.jme3.system.lwjgl.LwjglOffscreenBufferVR;
import com.jme3.util.VRGUIPositioningMode;
import com.jme3.util.VRGuiManager;
import com.jme3.util.VRMouseManager;
import com.jme3.util.VRViewManagerOpenVR;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.HeadlessException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@ -61,18 +64,16 @@ import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.logging.Logger;
import jmevr.util.VRViewManager;
import jmevr.util.VRGuiManager;
import jmevr.util.VRGuiManager.POSITIONING_MODE;
import jmevr.util.VRMouseManager;
import org.lwjgl.system.Platform;
/**
* A JMonkey application dedicated to Virtual Reality. An application that use VR devices (HTC vive, ...) has to extends this one.<br>
* <p>
* <b>This class is no more functional and is deprecated. Please use {@link VRAppState VRAppState} instead.</b>
* @author reden - phr00t - https://github.com/phr00t
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
* @deprecated use {@link VRAppState VRAppState} instead.
*/
public abstract class VRApplication implements Application, SystemListener {
@ -171,7 +172,7 @@ public abstract class VRApplication implements Application, SystemListener {
private VRAPI VRhardware = null;
private VRGuiManager guiManager = null;
private VRMouseManager mouseManager = null;
private VRViewManager viewmanager = null;
private VRViewManagerOpenVR viewmanager = null;
private String OS;
@ -255,13 +256,13 @@ public abstract class VRApplication implements Application, SystemListener {
initStateManager();
// Create the GUI manager.
guiManager = new VRGuiManager();
guiManager = new VRGuiManager(null);
// Create a new view manager.
viewmanager = new VRViewManager();
viewmanager = new VRViewManagerOpenVR(null);
// Create a new mouse manager.
mouseManager = new VRMouseManager();
mouseManager = new VRMouseManager(null);
// we are going to use OpenVR now, not the Oculus Rift
// OpenVR does support the Rift
@ -310,7 +311,7 @@ public abstract class VRApplication implements Application, SystemListener {
* Get the VR view manager.
* @return the VR view manager.
*/
public VRViewManager getVRViewManager() {
public VRViewManagerOpenVR getVRViewManager() {
return viewmanager;
}
@ -841,10 +842,10 @@ public abstract class VRApplication implements Application, SystemListener {
public void preconfigureVRApp(PreconfigParameter parm, boolean value) {
switch( parm ) {
case SET_GUI_OVERDRAW:
guiManager._enableGuiOverdraw(value);
guiManager.setGuiOverdraw(value);
break;
case SET_GUI_CURVED_SURFACE:
guiManager._enableCurvedSuface(value);
guiManager.setCurvedSurface(value);
break;
case FORCE_VR_MODE:
forceVR = value;
@ -858,7 +859,7 @@ public abstract class VRApplication implements Application, SystemListener {
break;
case FLIP_EYES:
if( VRhardware == null ) return;
VRhardware._setFlipEyes(value);
VRhardware.setFlipEyes(value);
break;
case INSTANCE_VR_RENDERING:
instanceVR = value;
@ -1179,7 +1180,7 @@ public abstract class VRApplication implements Application, SystemListener {
rootNode.updateGeometricState();
if( isInVR() == false || guiManager.getPositioningMode() == POSITIONING_MODE.MANUAL ) {
if( isInVR() == false || guiManager.getPositioningMode() == VRGUIPositioningMode.MANUAL ) {
// only update geometric state here if GUI is in manual mode, or not in VR
// it will get updated automatically in the viewmanager update otherwise
guiNode.updateGeometricState();
@ -1191,7 +1192,7 @@ public abstract class VRApplication implements Application, SystemListener {
// update compositor?
if( viewmanager != null ) {
viewmanager.sendTextures();
viewmanager.postRender();
}
}
@ -1358,7 +1359,7 @@ public abstract class VRApplication implements Application, SystemListener {
}
//FIXME: WARNING !!
viewmanager = new VRViewManager();
viewmanager = new VRViewManagerOpenVR(null);
viewmanager.setResolutionMultiplier(resMult);
inputManager.addMapping(RESET_HMD, new KeyTrigger(KeyInput.KEY_F9));
setLostFocusBehavior(LostFocusBehavior.Disabled);

@ -0,0 +1,485 @@
package com.jme3.app;
import java.util.Locale;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme3.app.state.AppState;
import com.jme3.input.vr.OSVR;
import com.jme3.input.vr.OpenVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.input.vr.VRInputAPI;
import com.jme3.renderer.Camera;
import com.jme3.scene.Spatial;
import com.jme3.system.AppSettings;
import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.util.VRGuiManager;
import com.jme3.util.VRMouseManager;
import com.jme3.util.VRViewManager;
import com.jme3.util.VRViewManagerOSVR;
import com.jme3.util.VRViewManagerOpenVR;
public class VREnvironment {
private static final Logger logger = Logger.getLogger(VREnvironment.class.getName());
private VRAPI hardware = null;
private VRGuiManager guiManager = null;
private VRMouseManager mouseManager = null;
private VRViewManager viewmanager = null;
/**
* The underlying system VR API. By default set to {@link VRConstants#SETTING_VRAPI_OPENVR_VALUE}.
*/
public int vrBinding = VRConstants.SETTING_VRAPI_OPENVR_VALUE;
private boolean seated = false;
private Spatial observer = null;
private boolean forceVR = false;
private boolean vrSupportedOS = false;
private boolean nogui = false;
private boolean compositorOS;
private boolean useCompositor = true;
private boolean instanceRendering = false;
private boolean disableSwapBuffers = true;
private float defaultFOV = 108f;
private float defaultAspect = 1f;
private AppSettings settings = null;
private Application application = null;
private Camera dummyCam = null;
private AppState app = null;
private boolean initialized = false;
private boolean attached = false;
public VREnvironment(AppSettings settings){
this.settings = settings;
guiManager = new VRGuiManager(this);
mouseManager = new VRMouseManager(this);
dummyCam = new Camera();
processSettings();
}
/**
* Get the VR underlying hardware.
* @return the VR underlying hardware.
*/
public VRAPI getVRHardware() {
return hardware;
}
/**
* Get the VR dedicated input.
* @return the VR dedicated input.
*/
public VRInputAPI getVRinput() {
if( hardware == null ){
return null;
}
return hardware.getVRinput();
}
/**
* Get the VR view manager.
* @return the VR view manager.
*/
public VRViewManager getVRViewManager() {
return viewmanager;
}
/**
* Get the GUI manager attached to this environment.
* @return the GUI manager attached to this environment.
*/
public VRGuiManager getVRGUIManager(){
return guiManager;
}
/**
* Get the VR mouse manager attached to this environment.
* @return the VR mouse manager attached to this environment.
*/
public VRMouseManager getVRMouseManager(){
return mouseManager;
}
/**
* Can be used to change seated experience during runtime.
* @param isSeated <code>true</code> if designed for sitting, <code>false</code> for standing/roomscale
* @see #isSeatedExperience()
*/
public void setSeatedExperience(boolean isSeated) {
seated = isSeated;
if( hardware instanceof OpenVR ) {
if( hardware.getCompositor() == null ) {
return;
}
if( seated ) {
((OpenVR)hardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated);
} else {
((OpenVR)hardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding);
}
}
}
/**
* Check if the application is configured as a seated experience.
* @return <code>true</code> if the application is configured as a seated experience and <code>false</code> otherwise.
* @see #setSeatedExperience(boolean)
*/
public boolean isSeatedExperience() {
return seated;
}
/**
* Set the VR headset height from the ground.
* @param amount the VR headset height from the ground.
* @see #getVRHeightAdjustment()
*/
public void setVRHeightAdjustment(float amount) {
if( viewmanager != null ){
viewmanager.setHeightAdjustment(amount);
}
}
/**
* Get the VR headset height from the ground.
* @return the VR headset height from the ground.
* @see #setVRHeightAdjustment(float)
*/
public float getVRHeightAdjustment() {
if( viewmanager != null ){
return viewmanager.getHeightAdjustment();
}
return 0f;
}
/**
* Get the scene observer. If no observer has been set, this method return the application {@link #getCamera() camera}.
* @return the scene observer.
* @see #setObserver(Spatial)
*/
public Object getObserver() {
if( observer == null ) {
if (application != null){
return application.getCamera();
} else {
throw new IllegalStateException("VR environment is not attached to any application.");
}
}
return observer;
}
/**
* Set the scene observer. The VR headset will be linked to it. If no observer is set, the VR headset is linked to the the application {@link #getCamera() camera}.
* @param observer the scene observer.
*/
public void setObserver(Spatial observer) {
this.observer = observer;
}
/**
* Get the default Field Of View (FOV) value.
* @return the default Field Of View (FOV) value.
* @see #setDefaultFOV(float)
*/
public float getDefaultFOV() {
return defaultFOV;
}
/**
* Set the default Field Of View (FOV) value.
* @param defaultFOV the default Field Of View (FOV) value.
* @see #getDefaultFOV()
*/
public void setDefaultFOV(float defaultFOV) {
this.defaultFOV = defaultFOV;
}
/**
* Get the default aspect ratio.
* @return the default aspect ratio.
* @see #setDefaultAspect(float)
*/
public float getDefaultAspect() {
return defaultAspect;
}
/**
* Set the default aspect ratio.
* @param defaultAspect the default aspect ratio.
* @see #getDefaultAspect()
*/
public void setDefaultAspect(float defaultAspect) {
this.defaultAspect = defaultAspect;
}
/**
* Get the {@link AppSettings settings} attached to this environment.
* @return the {@link AppSettings settings} attached to this environment.
* @see #setSettings(AppSettings)
*/
public AppSettings getSettings(){
return settings;
}
/**
* Set the {@link AppSettings settings} attached to this environment.
* @param settings the {@link AppSettings settings} attached to this environment.
* @see #getSettings()
*/
public void setSettings(AppSettings settings){
this.settings = settings;
processSettings();
}
/**
* Get if the system currently support VR.
* @return <code>true</code> if the system currently support VR and <code>false</Code> otherwise.
*/
public boolean isVRSupported() {
return vrSupportedOS;
}
/**
* Check if the VR mode is enabled.
* @return <code>true</code> if the VR mode is enabled and <code>false</code> otherwise.
*/
public boolean isInVR() {
return (forceVR || vrSupportedOS && hardware != null && hardware.isInitialized() && isInitialized());
}
/**
* Check if the rendering is instanced (see <a href="https://en.wikipedia.org/wiki/Geometry_instancing">Geometry instancing</a>).
* @return <code>true</code> if the rendering is instanced and <code>false</code> otherwise.
*/
public boolean isInstanceRendering() {
return instanceRendering;
}
public boolean isSwapBuffers(){
return disableSwapBuffers;
}
/**
* Check if the application has a GUI overlay attached.
* @return <code>true</code> if the application has a GUI overlay attached and <code>false</code> otherwise.
*/
public boolean hasTraditionalGUIOverlay() {
return !nogui;
}
/**
* Check if the VR environment is initialized. A call to the {@link #initialize() initialize()} method should set this value to <code>true</code>
* @return <code>true</code> if the VR environment is initialized and <code>false</code> otherwise.
*/
public boolean isInitialized(){
return initialized;
}
/**
* Is the VR compositor is active.
* @return <code>true</code> if the VR compositor is active and <code>false</code> otherwise.
*/
public boolean compositorAllowed() {
return useCompositor && compositorOS;
}
/**
* Reset headset pose if seating experience.
*/
public void resetSeatedPose(){
if( vrSupportedOS == false || isSeatedExperience() == false ){
return;
}
getVRHardware().reset();
}
public AppState getAppState(){
return app;
}
public Application getApplication(){
return application;
}
/**
* Get the {@link Camera camera} used for rendering.
* If the VR mode is {@link #isInVR() active}, this method return a dummy camera, otherwise,
* this method return the camera of the attached application.
* @return the camera attached used for rendering.
*/
public Camera getCamera() {
if( isInVR() && getVRViewManager() != null && getVRViewManager().getLeftCamera() != null ) {
return dummyCam;
}
return application.getCamera();
}
public Camera getDummyCamera(){
if (dummyCam == null){
if (application != null){
if (application.getCamera() != null){
dummyCam = application.getCamera().clone();
} else {
return new Camera();
}
} else {
throw new IllegalStateException("VR environment is not attached to any application.");
}
}
return dummyCam;
}
/**
* Attach the VR environment to the given app state and application.
* This method should be called within the {@link AppState#stateAttached(com.jme3.app.state.AppStateManager) stateAttached(com.jme3.app.state.AppStateManager)} method
* from the app state.
* @param appState the app state to attach.
* @param application the application to attach.
*/
public void atttach(AppState appState, Application application){
this.application = application;
this.app = appState;
// Instanciate view manager
if (vrBinding == VRConstants.SETTING_VRAPI_OPENVR_VALUE){
viewmanager = new VRViewManagerOpenVR(this);
} else if (vrBinding == VRConstants.SETTING_VRAPI_OSVR_VALUE){
viewmanager = new VRViewManagerOSVR(this);
} else {
logger.severe("Cannot instanciate view manager, unknown VRAPI type: "+vrBinding);
}
}
/**
* Initialize this VR environment. This method enable the system bindings and configure all the VR system modules.
* A call to this method has to be made before any use of VR capabilities.
* @return <code>true</code> if the VR environment is successfully initialized and <code>false</code> otherwise.
*/
public boolean initialize(){
logger.config("Initializing VR environment.");
initialized = false;
// we are going to use OpenVR now, not the Oculus Rift
// OpenVR does support the Rift
String OS = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH);
vrSupportedOS = !OS.contains("nux") && System.getProperty("sun.arch.data.model").equalsIgnoreCase("64"); //for the moment, linux/unix causes crashes, 64-bit only
compositorOS = OS.contains("indows");
if( vrSupportedOS) {
if( vrBinding == VRConstants.SETTING_VRAPI_OSVR_VALUE ) {
hardware = new OSVR(this);
initialized = true;
logger.config("Creating OSVR wrapper [SUCCESS]");
} else if( vrBinding == VRConstants.SETTING_VRAPI_OPENVR_VALUE ) {
hardware = new OpenVR(this);
initialized = true;
logger.config("Creating OpenVR wrapper [SUCCESS]");
} else {
logger.config("Cannot create VR binding: "+vrBinding+" [FAILED]");
logger.log(Level.SEVERE, "Cannot initialize VR environment [FAILED]");
}
if( hardware.initialize() ) {
initialized &= true;
logger.config("VR native wrapper initialized [SUCCESS]");
} else {
initialized &= false;
logger.warning("VR native wrapper initialized [FAILED]");
logger.log(Level.SEVERE, "Cannot initialize VR environment [FAILED]");
}
} else {
logger.log(Level.SEVERE, "System does not support VR capabilities.");
logger.log(Level.SEVERE, "Cannot initialize VR environment [FAILED]");
}
return initialized;
}
private void processSettings(){
if (settings != null){
if (settings.get(VRConstants.SETTING_USE_COMPOSITOR) != null){
useCompositor = settings.getBoolean(VRConstants.SETTING_USE_COMPOSITOR);
if( useCompositor == false ){
disableSwapBuffers = false;
}
}
if (settings.get(VRConstants.SETTING_ENABLE_MIRROR_WINDOW) != null){
if( useCompositor == false ) {
disableSwapBuffers = false;
} else {
disableSwapBuffers = !settings.getBoolean(VRConstants.SETTING_ENABLE_MIRROR_WINDOW);
}
}
if (settings.get(VRConstants.SETTING_GUI_OVERDRAW) != null){
getVRGUIManager().setGuiOverdraw(settings.getBoolean(VRConstants.SETTING_GUI_OVERDRAW));
}
if (settings.get(VRConstants.SETTING_GUI_CURVED_SURFACE) != null){
getVRGUIManager().setCurvedSurface(settings.getBoolean(VRConstants.SETTING_GUI_CURVED_SURFACE));
}
if (settings.get(VRConstants.SETTING_NO_GUI) != null){
nogui = settings.getBoolean(VRConstants.SETTING_NO_GUI);
}
if (settings.get(VRConstants.SETTING_VRAPI) != null){
vrBinding = settings.getInteger(VRConstants.SETTING_VRAPI);
}
if (settings.get(VRConstants.SETTING_SEATED_EXPERIENCE) != null){
seated = settings.getBoolean(VRConstants.SETTING_SEATED_EXPERIENCE);
}
if (settings.get(VRConstants.SETTING_INSTANCE_RENDERING) != null){
instanceRendering = settings.getBoolean(VRConstants.SETTING_INSTANCE_RENDERING);
}
if (settings.get(VRConstants.SETTING_DEFAULT_FOV) != null){
defaultFOV = settings.getFloat(VRConstants.SETTING_DEFAULT_FOV);
}
if (settings.get(VRConstants.SETTING_DEFAULT_ASPECT_RATIO) != null){
defaultAspect = settings.getFloat(VRConstants.SETTING_DEFAULT_ASPECT_RATIO);
}
if (settings.get(VRConstants.SETTING_FLIP_EYES) != null){
if( getVRHardware() != null ){
getVRHardware().setFlipEyes(settings.getBoolean(VRConstants.SETTING_FLIP_EYES));
}
}
}
}
}

@ -9,13 +9,22 @@ https://github.com/sensics/OSVR-RenderManager/blob/master/examples/RenderManager
*/
package com.jme3.input.vr;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.VREnvironment;
import com.jme3.math.Matrix4f;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.renderer.Camera;
import com.jme3.system.osvr.osvrclientkit.OsvrClientKitLibrary;
import com.jme3.system.osvr.osvrdisplay.OsvrDisplayLibrary;
import com.jme3.system.osvr.osvrdisplay.OsvrDisplayLibrary.OSVR_DisplayConfig;
import com.jme3.system.osvr.osvrmatrixconventions.OSVR_Pose3;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_OpenResultsOpenGL;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RenderBufferOpenGL;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RenderInfoOpenGL;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RenderParams;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ViewportDescription;
import com.jme3.system.osvr.osvrrendermanageropengl.OsvrRenderManagerOpenGLLibrary;
import com.ochafik.lang.jnaerator.runtime.NativeSize;
import com.ochafik.lang.jnaerator.runtime.NativeSizeByReference;
import com.sun.jna.Pointer;
@ -23,18 +32,6 @@ import com.sun.jna.ptr.PointerByReference;
import java.nio.FloatBuffer;
import java.util.logging.Logger;
import osvrclientkit.OsvrClientKitLibrary;
import osvrdisplay.OsvrDisplayLibrary;
import osvrdisplay.OsvrDisplayLibrary.OSVR_DisplayConfig;
import osvrmatrixconventions.OSVR_Pose3;
import osvrrendermanageropengl.OSVR_OpenResultsOpenGL;
import osvrrendermanageropengl.OSVR_RenderBufferOpenGL;
import osvrrendermanageropengl.OSVR_RenderInfoOpenGL;
import osvrrendermanageropengl.OSVR_RenderParams;
import osvrrendermanageropengl.OSVR_ViewportDescription;
import osvrrendermanageropengl.OsvrRenderManagerOpenGLLibrary;
/**
* A class that wraps an <a href="http://www.osvr.org/">OSVR</a> system.
* @author reden - phr00t - https://github.com/phr00t
@ -87,7 +84,7 @@ public class OSVR implements VRAPI {
OSVR_RenderParams.ByValue renderParams;
OsvrClientKitLibrary.OSVR_ClientContext context;
osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue graphicsLibrary;
com.jme3.system.osvr.osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue graphicsLibrary;
Pointer renderManager, renderManagerOpenGL, renderInfoCollection, registerBufferState;
OSVRInput VRinput;
NativeSize numRenderInfo;
@ -111,14 +108,14 @@ public class OSVR implements VRAPI {
boolean initSuccess = false;
boolean flipEyes = false;
private VRAppState app = null;
private VREnvironment environment = null;
/**
* Create a new <a href="http://www.osvr.org/">OSVR</a> system attached to the given {@link VRAppState app state}.
* @param app the app state to which the input is attached.
* Create a new <a href="http://www.osvr.org/">OSVR</a> system attached to the given {@link VREnvironment VR environment}.
* @param environment the {@link VREnvironment VR environment} to which the input is attached.
*/
public OSVR(VRAppState app){
this.app = app;
public OSVR(VREnvironment environment){
this.environment = environment;
}
/**
@ -150,7 +147,7 @@ public class OSVR implements VRAPI {
hmdPose.setAutoSynch(false);
context = OsvrClientKitLibrary.osvrClientInit(defaultJString, 0);
VRinput = new OSVRInput(app);
VRinput = new OSVRInput(environment);
initSuccess = context != null && VRinput.init();
if( initSuccess ) {
PointerByReference grabDisplay = new PointerByReference();
@ -206,7 +203,7 @@ public class OSVR implements VRAPI {
public boolean initVRCompositor(boolean allowed) {
if( !allowed || renderManager != null ) return false;
grabGLFWContext();
graphicsLibrary = new osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue();
graphicsLibrary = new com.jme3.system.osvr.osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue();
graphicsLibrary.toolkit = null;
graphicsLibrary.setAutoSynch(false);
grabRM = new PointerByReference(); grabRMOGL = new PointerByReference();
@ -272,7 +269,7 @@ public class OSVR implements VRAPI {
}
@Override
public void _setFlipEyes(boolean set) {
public void setFlipEyes(boolean set) {
flipEyes = set;
}
@ -376,7 +373,7 @@ public class OSVR implements VRAPI {
if( eyeLeftInfo == null ) return cam.getProjectionMatrix();
if( eyeMatrix[EYE_LEFT] == null ) {
FloatBuffer tfb = FloatBuffer.allocate(16);
osvrdisplay.OsvrDisplayLibrary.osvrClientGetViewerEyeSurfaceProjectionMatrixf(displayConfig, 0, (byte)EYE_LEFT, 0, cam.getFrustumNear(), cam.getFrustumFar(), (short)0, tfb);
com.jme3.system.osvr.osvrdisplay.OsvrDisplayLibrary.osvrClientGetViewerEyeSurfaceProjectionMatrixf(displayConfig, 0, (byte)EYE_LEFT, 0, cam.getFrustumNear(), cam.getFrustumFar(), (short)0, tfb);
eyeMatrix[EYE_LEFT] = new Matrix4f();
eyeMatrix[EYE_LEFT].set(tfb.get(0), tfb.get(4), tfb.get(8), tfb.get(12),
tfb.get(1), tfb.get(5), tfb.get(9), tfb.get(13),
@ -391,7 +388,7 @@ public class OSVR implements VRAPI {
if( eyeRightInfo == null ) return cam.getProjectionMatrix();
if( eyeMatrix[EYE_RIGHT] == null ) {
FloatBuffer tfb = FloatBuffer.allocate(16);
osvrdisplay.OsvrDisplayLibrary.osvrClientGetViewerEyeSurfaceProjectionMatrixf(displayConfig, 0, (byte)EYE_RIGHT, 0, cam.getFrustumNear(), cam.getFrustumFar(), (short)0, tfb);
com.jme3.system.osvr.osvrdisplay.OsvrDisplayLibrary.osvrClientGetViewerEyeSurfaceProjectionMatrixf(displayConfig, 0, (byte)EYE_RIGHT, 0, cam.getFrustumNear(), cam.getFrustumFar(), (short)0, tfb);
eyeMatrix[EYE_RIGHT] = new Matrix4f();
eyeMatrix[EYE_RIGHT].set(tfb.get(0), tfb.get(4), tfb.get(8), tfb.get(12),
tfb.get(1), tfb.get(5), tfb.get(9), tfb.get(13),
@ -461,10 +458,4 @@ public class OSVR implements VRAPI {
public HmdType getType() {
return HmdType.OSVR;
}
@Override
public VRAppState getVRAppState() {
return app;
}
}

@ -7,26 +7,24 @@ package com.jme3.input.vr;
import java.util.logging.Logger;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.VREnvironment;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.renderer.Camera;
import com.jme3.scene.Spatial;
import com.jme3.system.osvr.osvrclientkit.OsvrClientKitLibrary;
import com.jme3.system.osvr.osvrclientkit.OsvrClientKitLibrary.OSVR_ClientInterface;
import com.jme3.system.osvr.osvrclientreporttypes.OSVR_AnalogReport;
import com.jme3.system.osvr.osvrclientreporttypes.OSVR_ButtonReport;
import com.jme3.system.osvr.osvrclientreporttypes.OSVR_Pose3;
import com.jme3.system.osvr.osvrinterface.OsvrInterfaceLibrary;
import com.jme3.system.osvr.osvrtimevalue.OSVR_TimeValue;
import com.jme3.util.VRViewManagerOSVR;
import com.sun.jna.Callback;
import com.sun.jna.Pointer;
import com.sun.jna.ptr.PointerByReference;
import jmevr.util.VRViewManager;
import osvrclientkit.OsvrClientKitLibrary;
import osvrclientkit.OsvrClientKitLibrary.OSVR_ClientInterface;
import osvrclientreporttypes.OSVR_AnalogReport;
import osvrclientreporttypes.OSVR_ButtonReport;
import osvrclientreporttypes.OSVR_Pose3;
import osvrinterface.OsvrInterfaceLibrary;
import osvrtimevalue.OSVR_TimeValue;
/**
* A class that wraps an <a href="http://www.osvr.org/">OSVR</a> input.
@ -64,7 +62,7 @@ public class OSVRInput implements VRInputAPI {
private static final Vector2f lastCallAxis[] = new Vector2f[16];
private static float axisMultiplier = 1f;
private VRAppState app = null;
private VREnvironment environment = null;
/**
* Get the system String that identifies a controller.
@ -91,11 +89,11 @@ public class OSVRInput implements VRInputAPI {
/**
* Create a new <a href="http://www.osvr.org/">OSVR</a> input attached to the given {@link VRAppState app state}.
* @param app the app state to which the input is attached.
* Create a new <a href="http://www.osvr.org/">OSVR</a> input attached to the given {@link VREnvironment VR environment}.
* @param environment the {@link VREnvironment VR environment} to which the input is attached.
*/
public OSVRInput(VRAppState app){
this.app = app;
public OSVRInput(VREnvironment environment){
this.environment = environment;
}
@ -167,7 +165,7 @@ public class OSVRInput implements VRInputAPI {
private OSVR_ClientInterface getInterface(byte[] str) {
PointerByReference pbr = new PointerByReference();
OsvrClientKitLibrary.osvrClientGetInterface((OsvrClientKitLibrary.OSVR_ClientContext)app.getVRHardware().getVRSystem(), str, pbr);
OsvrClientKitLibrary.osvrClientGetInterface((OsvrClientKitLibrary.OSVR_ClientContext)environment.getVRHardware().getVRSystem(), str, pbr);
return new OSVR_ClientInterface(pbr.getValue());
}
@ -303,9 +301,9 @@ public class OSVRInput implements VRInputAPI {
@Override
public Quaternion getFinalObserverRotation(int index) {
VRViewManager vrvm = app.getVRViewManager();
VRViewManagerOSVR vrvm = (VRViewManagerOSVR)environment.getVRViewManager();
if( vrvm == null || isInputDeviceTracking(index) == false ) return null;
Object obs = app.getObserver();
Object obs = environment.getObserver();
if( obs instanceof Camera ) {
tempq.set(((Camera)obs).getRotation());
} else {
@ -316,9 +314,9 @@ public class OSVRInput implements VRInputAPI {
@Override
public Vector3f getFinalObserverPosition(int index) {
VRViewManager vrvm = app.getVRViewManager();
VRViewManagerOSVR vrvm = (VRViewManagerOSVR) environment.getVRViewManager();
if( vrvm == null || isInputDeviceTracking(index) == false ) return null;
Object obs = app.getObserver();
Object obs = environment.getObserver();
Vector3f pos = getPosition(index);
if( obs instanceof Camera ) {
((Camera)obs).getRotation().mult(pos, pos);
@ -349,13 +347,6 @@ public class OSVRInput implements VRInputAPI {
axisMultiplier = set;
}
@Override
public VRAppState getVRAppState() {
return app;
}
@Override
public VRTrackedController getTrackedController(int index) {
// TODO Auto-generated method stub

@ -5,8 +5,7 @@
*/
package com.jme3.input.vr;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.VREnvironment;
import com.jme3.math.Matrix4f;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
@ -19,22 +18,19 @@ import com.jme3.system.jopenvr.OpenVRUtil;
import com.jme3.system.jopenvr.TrackedDevicePose_t;
import com.jme3.system.jopenvr.VR_IVRCompositor_FnTable;
import com.jme3.system.jopenvr.VR_IVRSystem_FnTable;
import com.jme3.util.VRUtil;
import com.sun.jna.Memory;
import com.sun.jna.Pointer;
import com.sun.jna.ptr.FloatByReference;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.ptr.LongByReference;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.LongBuffer;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import jmevr.util.VRUtil;
/**
* A class that wraps an <a href="https://github.com/ValveSoftware/openvr/wiki/API-Documentation">OpenVR</a> system.
* @author reden - phr00t - https://github.com/phr00t
@ -84,15 +80,15 @@ public class OpenVR implements VRAPI {
private static long frameCount;
private static OpenVRInput VRinput;
private VRAppState app = null;
private VREnvironment environment = null;
/**
* Create a new <a href="https://github.com/ValveSoftware/openvr/wiki/API-Documentation">OpenVR</a> system
* attached to the given {@link VRAppState VR app state}.
* @param appState the VR app state to which the api is attached.
* attached to the given {@link VREnvironment VR environment}.
* @param environment the VR environment to which this API is attached.
*/
public OpenVR(VRAppState appState){
this.app = appState;
public OpenVR(VREnvironment environment){
this.environment = environment;
}
@Override
@ -118,7 +114,7 @@ public class OpenVR implements VRAPI {
private static long latencyWaitTime = 0;
@Override
public void _setFlipEyes(boolean set) {
public void setFlipEyes(boolean set) {
flipEyes = set;
}
@ -180,7 +176,7 @@ public class OpenVR implements VRAPI {
}
// init controllers for the first time
VRinput = new OpenVRInput(app);
VRinput = new OpenVRInput(environment);
VRinput.init();
VRinput.updateConnectedControllers();
@ -206,7 +202,7 @@ public class OpenVR implements VRAPI {
if(compositorFunctions != null && hmdErrorStore.getValue() == 0 ){
compositorFunctions.setAutoSynch(false);
compositorFunctions.read();
if( app.isSeatedExperience() ) {
if( environment.isSeatedExperience() ) {
compositorFunctions.SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated);
} else {
compositorFunctions.SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding);
@ -356,7 +352,7 @@ public class OpenVR implements VRAPI {
frameCount = nowCount;
vrsystemFunctions.GetDeviceToAbsoluteTrackingPose.apply(
app.isSeatedExperience()?JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated:
environment.isSeatedExperience()?JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated:
JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding,
fSecondsUntilPhotons, hmdTrackedDevicePoseReference, JOpenVRLibrary.k_unMaxTrackedDeviceCount);
}
@ -373,7 +369,7 @@ public class OpenVR implements VRAPI {
VRInput._updateConnectedControllers();
}*/
//update controllers pose information
app.getVRinput().updateControllerStates();
environment.getVRinput().updateControllerStates();
// read pose data from native
for (int nDevice = 0; nDevice < JOpenVRLibrary.k_unMaxTrackedDeviceCount; ++nDevice ){
@ -447,7 +443,7 @@ public class OpenVR implements VRAPI {
@Override
public Vector3f getSeatedToAbsolutePosition() {
if( app.isSeatedExperience() == false ) return Vector3f.ZERO;
if( environment.isSeatedExperience() == false ) return Vector3f.ZERO;
if( hmdSeatToStand == null ) {
hmdSeatToStand = new Vector3f();
HmdMatrix34_t mat = vrsystemFunctions.GetSeatedZeroPoseToStandingAbsoluteTrackingPose.apply();
@ -524,10 +520,5 @@ public class OpenVR implements VRAPI {
return VRUtil.convertSteamVRMatrix3ToMatrix4f(mat, hmdPoseRightEye);
}
}
@Override
public VRAppState getVRAppState() {
return app;
}
}

@ -9,8 +9,7 @@ import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.VREnvironment;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
@ -20,9 +19,8 @@ import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.system.jopenvr.OpenVRUtil;
import com.jme3.system.jopenvr.VRControllerState_t;
import com.jme3.system.jopenvr.VR_IVRSystem_FnTable;
import jmevr.util.VRUtil;
import jmevr.util.VRViewManager;
import com.jme3.util.VRUtil;
import com.jme3.util.VRViewManagerOpenVR;
/*
make helper functions to pull the following easily from raw data (DONE)
@ -98,16 +96,16 @@ public class OpenVRInput implements VRInputAPI {
private final Quaternion tempq = new Quaternion();
private VRAppState app;
private VREnvironment environment;
private List<VRTrackedController> trackedControllers = null;
/**
* Create a new <a href="https://github.com/ValveSoftware/openvr/wiki/API-Documentation">OpenVR</a> input attached to the given application.
* @param application the application to which the input is attached.
* Create a new <a href="https://github.com/ValveSoftware/openvr/wiki/API-Documentation">OpenVR</a> input attached to the given VR environment.
* @param environment the VR environment to which the input is attached.
*/
public OpenVRInput(VRAppState appState){
this.app = appState;
public OpenVRInput(VREnvironment environment){
this.environment = environment;
}
@Override
@ -297,18 +295,28 @@ public class OpenVRInput implements VRInputAPI {
@Override
public boolean isInputFocused() {
return ((VR_IVRSystem_FnTable)app.getVRHardware().getVRSystem()).IsInputFocusCapturedByAnotherProcess.apply() == 0;
if (environment != null){
return ((VR_IVRSystem_FnTable)environment.getVRHardware().getVRSystem()).IsInputFocusCapturedByAnotherProcess.apply() == 0;
} else {
throw new IllegalStateException("VR input is not attached to a VR environment.");
}
}
@Override
public boolean isInputDeviceTracking(int index) {
if( index < 0 || index >= controllerCount ) return false;
if( index < 0 || index >= controllerCount ){
return false;
}
return OpenVR.hmdTrackedDevicePoses[controllerIndex[index]].bPoseIsValid != 0;
}
@Override
public Quaternion getOrientation(int index) {
if( isInputDeviceTracking(index) == false ) return null;
if( isInputDeviceTracking(index) == false ){
return null;
}
index = controllerIndex[index];
VRUtil.convertMatrix4toQuat(OpenVR.poseMatrices[index], rotStore[index]);
return rotStore[index];
@ -316,7 +324,10 @@ public class OpenVRInput implements VRInputAPI {
@Override
public Vector3f getPosition(int index) {
if( isInputDeviceTracking(index) == false ) return null;
if( isInputDeviceTracking(index) == false ){
return null;
}
// the hmdPose comes in rotated funny, fix that here
index = controllerIndex[index];
OpenVR.poseMatrices[index].toTranslationVector(posStore[index]);
@ -327,84 +338,122 @@ public class OpenVRInput implements VRInputAPI {
@Override
public Quaternion getFinalObserverRotation(int index) {
VRViewManager vrvm = app.getVRViewManager();
if( vrvm == null || isInputDeviceTracking(index) == false ) return null;
Object obs = app.getObserver();
if( obs instanceof Camera ) {
tempq.set(((Camera)obs).getRotation());
} else {
tempq.set(((Spatial)obs).getWorldRotation());
}
return tempq.multLocal(getOrientation(index));
if (environment != null){
VRViewManagerOpenVR vrvm = (VRViewManagerOpenVR)environment.getVRViewManager();
if (vrvm != null){
if(isInputDeviceTracking(index) == false ){
return null;
}
Object obs = environment.getObserver();
if( obs instanceof Camera ) {
tempq.set(((Camera)obs).getRotation());
} else {
tempq.set(((Spatial)obs).getWorldRotation());
}
return tempq.multLocal(getOrientation(index));
} else {
throw new IllegalStateException("VR environment has no valid view manager.");
}
} else {
throw new IllegalStateException("VR input is not attached to a VR environment.");
}
}
@Override
public Vector3f getFinalObserverPosition(int index) {
VRViewManager vrvm = app.getVRViewManager();
if( vrvm == null || isInputDeviceTracking(index) == false ) return null;
Object obs = app.getObserver();
Vector3f pos = getPosition(index);
if( obs instanceof Camera ) {
((Camera)obs).getRotation().mult(pos, pos);
return pos.addLocal(((Camera)obs).getLocation());
} else {
((Spatial)obs).getWorldRotation().mult(pos, pos);
return pos.addLocal(((Spatial)obs).getWorldTranslation());
}
if (environment != null){
VRViewManagerOpenVR vrvm = (VRViewManagerOpenVR)environment.getVRViewManager();
if (vrvm != null){
if(isInputDeviceTracking(index) == false ){
return null;
}
Object obs = environment.getObserver();
Vector3f pos = getPosition(index);
if( obs instanceof Camera ) {
((Camera)obs).getRotation().mult(pos, pos);
return pos.addLocal(((Camera)obs).getLocation());
} else {
((Spatial)obs).getWorldRotation().mult(pos, pos);
return pos.addLocal(((Spatial)obs).getWorldTranslation());
}
} else {
throw new IllegalStateException("VR environment has no valid view manager.");
}
} else {
throw new IllegalStateException("VR input is not attached to a VR environment.");
}
}
@Override
public void triggerHapticPulse(int controllerIndex, float seconds) {
if( app.isInVR() == false || isInputDeviceTracking(controllerIndex) == false ) return;
if( environment.isInVR() == false || isInputDeviceTracking(controllerIndex) == false ){
return;
}
// apparently only axis ID of 0 works
((VR_IVRSystem_FnTable)app.getVRHardware().getVRSystem()).TriggerHapticPulse.apply(OpenVRInput.controllerIndex[controllerIndex],
((VR_IVRSystem_FnTable)environment.getVRHardware().getVRSystem()).TriggerHapticPulse.apply(OpenVRInput.controllerIndex[controllerIndex],
0, (short)Math.round(3f * seconds / 1e-3f));
}
@Override
public void updateConnectedControllers() {
logger.config("Updating connected controllers.");
controllerCount = 0;
for(int i=0;i<JOpenVRLibrary.k_unMaxTrackedDeviceCount;i++) {
if( ((OpenVR)app.getVRHardware()).getVRSystem().GetTrackedDeviceClass.apply(i) == JOpenVRLibrary.ETrackedDeviceClass.ETrackedDeviceClass_TrackedDeviceClass_Controller ) {
String controllerName = "Unknown";
String manufacturerName = "Unknown";
try {
controllerName = OpenVRUtil.getTrackedDeviceStringProperty(((OpenVR)app.getVRHardware()).getVRSystem(), i, JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_TrackingSystemName_String);
manufacturerName = OpenVRUtil.getTrackedDeviceStringProperty(((OpenVR)app.getVRHardware()).getVRSystem(), i, JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_ManufacturerName_String);
} catch (Exception e) {
logger.log(Level.WARNING, e.getMessage(), e);
}
controllerIndex[controllerCount] = i;
// Send an Haptic pulse to the controller
triggerHapticPulse(controllerCount, 1.0f);
controllerCount++;
logger.config(" Tracked controller "+(i+1)+"/"+JOpenVRLibrary.k_unMaxTrackedDeviceCount+" "+controllerName+" ("+manufacturerName+") attached.");
} else {
logger.config(" Controller "+(i+1)+"/"+JOpenVRLibrary.k_unMaxTrackedDeviceCount+" ignored.");
}
if (environment != null){
controllerCount = 0;
for(int i=0;i<JOpenVRLibrary.k_unMaxTrackedDeviceCount;i++) {
if( ((OpenVR)environment.getVRHardware()).getVRSystem().GetTrackedDeviceClass.apply(i) == JOpenVRLibrary.ETrackedDeviceClass.ETrackedDeviceClass_TrackedDeviceClass_Controller ) {
String controllerName = "Unknown";
String manufacturerName = "Unknown";
try {
controllerName = OpenVRUtil.getTrackedDeviceStringProperty(((OpenVR)environment.getVRHardware()).getVRSystem(), i, JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_TrackingSystemName_String);
manufacturerName = OpenVRUtil.getTrackedDeviceStringProperty(((OpenVR)environment.getVRHardware()).getVRSystem(), i, JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_ManufacturerName_String);
} catch (Exception e) {
logger.log(Level.WARNING, e.getMessage(), e);
}
controllerIndex[controllerCount] = i;
// Send an Haptic pulse to the controller
triggerHapticPulse(controllerCount, 1.0f);
controllerCount++;
logger.config(" Tracked controller "+(i+1)+"/"+JOpenVRLibrary.k_unMaxTrackedDeviceCount+" "+controllerName+" ("+manufacturerName+") attached.");
} else {
logger.config(" Controller "+(i+1)+"/"+JOpenVRLibrary.k_unMaxTrackedDeviceCount+" ignored.");
}
}
} else {
throw new IllegalStateException("VR input is not attached to a VR environment.");
}
}
@Override
public void updateControllerStates() {
for(int i=0;i<controllerCount;i++) {
int index = controllerIndex[i];
((OpenVR)app.getVRHardware()).getVRSystem().GetControllerState.apply(index, cStates[index], 5);
cStates[index].readField("ulButtonPressed");
cStates[index].readField("rAxis");
needsNewVelocity[index] = true;
needsNewAngVelocity[index] = true;
if (environment != null){
for(int i=0;i<controllerCount;i++) {
int index = controllerIndex[i];
((OpenVR)environment.getVRHardware()).getVRSystem().GetControllerState.apply(index, cStates[index], 5);
cStates[index].readField("ulButtonPressed");
cStates[index].readField("rAxis");
needsNewVelocity[index] = true;
needsNewAngVelocity[index] = true;
}
} else {
throw new IllegalStateException("VR input is not attached to a VR environment.");
}
}
@Override
public VRAppState getVRAppState() {
return app;
}
}
}

@ -5,7 +5,6 @@
*/
package com.jme3.input.vr;
import com.jme3.app.VRAppState;
import com.jme3.math.Matrix4f;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
@ -31,13 +30,7 @@ public interface VRAPI {
* @return <code>true</code> if the initialization is a success and <code>false</code> otherwise.
*/
public boolean initVRCompositor(boolean allowed);
/**
* Get the {@link VRAppState VR app state} to which this api is attached.
* @return the VR app state to which this input is attached.
*/
public VRAppState getVRAppState();
/**
* Get the object that wraps natively the VR system.
* @return the object that wraps natively the VR system.
@ -63,10 +56,10 @@ public interface VRAPI {
public VRInputAPI getVRinput();
/**
* Do not use. Prefers the preconfigure routine from the VRApplication.
* Flip the left and right eye..
* @param set <code>true</code> if the eyes has to be flipped and <code>false</code> otherwise.
*/
public void _setFlipEyes(boolean set);
public void setFlipEyes(boolean set);
/**
* Set if latency information has to be logged.

@ -5,8 +5,6 @@
*/
package com.jme3.input.vr;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
@ -197,9 +195,4 @@ public interface VRInputAPI {
*/
public void triggerHapticPulse(int controllerIndex, float seconds);
/**
* Get the {@link VRAppState VR app state} to which this api is attached.
* @return the VR app state to which this input is attached.
*/
public VRAppState getVRAppState();
}

@ -2,7 +2,7 @@
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jmevr.util;
package com.jme3.post;
import com.jme3.asset.AssetManager;
import com.jme3.post.filters.FogFilter;

@ -30,7 +30,7 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package jmevr.util;
package com.jme3.scene;
import com.jme3.scene.Mesh;
import com.jme3.scene.VertexBuffer.Type;

@ -45,6 +45,7 @@ import com.jme3.math.Matrix4f;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.post.SceneProcessor;
import com.jme3.profile.AppProfiler;
import com.jme3.renderer.Camera;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.Renderer;
@ -100,6 +101,8 @@ public abstract class AbstractShadowRendererVR implements SceneProcessor, Savabl
protected RenderState forcedRenderState = new RenderState();
protected Boolean renderBackFacesShadows;
protected AppProfiler profiler = null;
/**
* true if the fallback material should be used, otherwise false
*/
@ -379,6 +382,11 @@ public abstract class AbstractShadowRendererVR implements SceneProcessor, Savabl
*/
protected abstract Camera getShadowCam(int shadowMapIndex);
@Override
public void setProfiler(AppProfiler profiler) {
this.profiler = profiler;
}
/**
* responsible for displaying the frustum of the shadow cam for debug
* purpose

@ -42,7 +42,6 @@ import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.profile.AppProfiler;
import com.jme3.renderer.Camera;
import com.jme3.renderer.queue.GeometryList;
import com.jme3.renderer.queue.RenderQueue;
@ -76,8 +75,6 @@ public class DirectionalLightShadowRendererVR extends AbstractShadowRendererVR {
protected Vector3f[] points = new Vector3f[8];
//Holding the info for fading shadows in the far distance
private boolean stabilize = true;
private AppProfiler profiler = null;
/**
* Used for serialzation use
@ -304,9 +301,4 @@ public class DirectionalLightShadowRendererVR extends AbstractShadowRendererVR {
protected boolean checkCulling(Camera viewCam) {
return true;
}
@Override
public void setProfiler(AppProfiler profiler) {
this.profiler = profiler;
}
}

@ -5,13 +5,13 @@
*/
package com.jme3.shadow;
import com.jme3.app.VRApplication;
import com.jme3.app.Application;
import com.jme3.math.Matrix4f;
import com.jme3.math.Vector4f;
import com.jme3.renderer.Camera;
/**
* An instanced version of the {@link DirectionalLightShadowFilterVR directional light shadow filter}.
* An instanced version of the {@link DirectionalLightShadowFilterVR directional light shadow filter} dedi.
* @author reden - phr00t - https://github.com/phr00t
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*/
@ -19,30 +19,53 @@ public class InstancedDirectionalShadowFilter extends DirectionalLightShadowFilt
private final Vector4f temp4f = new Vector4f(), temp4f2 = new Vector4f();
private VRApplication application;
private boolean instanceRendering = false;
private Camera rightCamera = null;
/**
/**
* Create a new instanced version of the {@link DirectionalLightShadowFilterVR directional light shadow filter}.
* @param application the VR application that this filter is attached to.
* @param application the application that this filter is attached to.
* @param camera
* @param shadowMapSize the size of the rendered shadowmaps (512, 1024, 2048, etc...)
* @param nbSplits the number of shadow maps rendered (the more shadow maps the more quality, the less fps).
* @param instancedRendering <code>true</code> if this filter has to use instance rendering and <code>false</code> otherwise.
* @param rightCamera the camera used as right eye in stereo rendering mode.
*/
public InstancedDirectionalShadowFilter(VRApplication application, Camera camera, int shadowMapSize, int nbSplits, boolean instancedRendering) {
public InstancedDirectionalShadowFilter(Application application, Camera camera, int shadowMapSize, int nbSplits, boolean instancedRendering, Camera rightCamera) {
super(application.getAssetManager(), shadowMapSize, nbSplits, "Common/MatDefs/VR/PostShadowFilter.j3md");
this.instanceRendering = instancedRendering;
this.rightCamera = rightCamera;
}
@Override
protected void preFrame(float tpf) {
shadowRenderer.preFrame(tpf);
if( application.isInstanceVRRendering() ) {
material.setMatrix4("ViewProjectionMatrixInverseRight", application.getVRViewManager().getRightCamera().getViewProjectionMatrix().invert());
Matrix4f m = application.getVRViewManager().getRightCamera().getViewProjectionMatrix();
if( instanceRendering ) {
material.setMatrix4("ViewProjectionMatrixInverseRight", rightCamera.getViewProjectionMatrix().invert());
Matrix4f m = rightCamera.getViewProjectionMatrix();
material.setVector4("ViewProjectionMatrixRow2Right", temp4f2.set(m.m20, m.m21, m.m22, m.m23));
}
material.setMatrix4("ViewProjectionMatrixInverse", viewPort.getCamera().getViewProjectionMatrix().invert());
Matrix4f m = viewPort.getCamera().getViewProjectionMatrix();
material.setVector4("ViewProjectionMatrixRow2", temp4f.set(m.m20, m.m21, m.m22, m.m23));
}
/**
* Get if this filter is using instance rendering.
* @return <code>true</code> if this filter is using instance rendering and <code>false</code> otherwise.
* @see #setInstanceRendering(boolean)
*/
public boolean isInstanceRendering() {
return instanceRendering;
}
/**
* Set if this filter has to use instance rendering.
* @param instanceRendering <code>true</code> if this filter has to use instance rendering and <code>false</code> otherwise.
* @see #isInstanceRendering()
*/
public void setInstanceRendering(boolean instanceRendering) {
this.instanceRendering = instanceRendering;
}
}

@ -39,14 +39,12 @@ import com.jme3.input.TouchInput;
import com.jme3.input.lwjgl.GlfwJoystickInput;
import com.jme3.input.lwjgl.GlfwKeyInputVR;
import com.jme3.input.lwjgl.GlfwMouseInputVR;
import com.jme3.renderer.opengl.GL;
import com.jme3.system.AppSettings;
import com.jme3.system.JmeContext;
import com.jme3.system.JmeSystem;
import com.jme3.system.NanoTimer;
import org.lwjgl.glfw.*;
import org.lwjgl.opengl.GL11;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;

@ -1,4 +1,4 @@
package osvrclientkit;
package com.jme3.system.osvr.osvrclientkit;
import com.sun.jna.Callback;
import com.sun.jna.Library;
import com.sun.jna.Native;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrmatrixconventions;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrclientreporttypes;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLibrary;

@ -1,4 +1,5 @@
package osvrdisplay;
package com.jme3.system.osvr.osvrdisplay;
import com.jme3.system.osvr.osvrclientkit.OsvrClientKitLibrary;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLibrary;
@ -12,7 +13,7 @@ import java.nio.ByteBuffer;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import osvrclientkit.OsvrClientKitLibrary;
/**
* JNA Wrapper for library <b>osvrDisplay</b><br>
* This file was autogenerated by <a href="http://jnaerator.googlecode.com/">JNAerator</a>,<br>

@ -1,10 +1,10 @@
package osvrinterface;
package com.jme3.system.osvr.osvrinterface;
import com.jme3.system.osvr.osvrclientkit.OsvrClientKitLibrary.OSVR_ClientInterface;
import com.jme3.system.osvr.osvrclientreporttypes.OSVR_Pose3;
import com.jme3.system.osvr.osvrtimevalue.OSVR_TimeValue;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLibrary;
import osvrclientkit.OsvrClientKitLibrary.OSVR_ClientInterface;
import osvrclientreporttypes.OSVR_Pose3;
import osvrtimevalue.OSVR_TimeValue;
/**
* JNA Wrapper for library <b>osvrInterface</b><br>
* This file was autogenerated by <a href="http://jnaerator.googlecode.com/">JNAerator</a>,<br>

@ -1,4 +1,4 @@
package osvrmatrixconventions;
package com.jme3.system.osvr.osvrmatrixconventions;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrclientreporttypes;
package com.jme3.system.osvr.osvrmatrixconventions;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrmatrixconventions;
package com.jme3.system.osvr.osvrmatrixconventions;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrmatrixconventions;
package com.jme3.system.osvr.osvrmatrixconventions;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLibrary;

@ -1,4 +1,4 @@
package osvrrendermanager;
package com.jme3.system.osvr.osvrrendermanager;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanager;
package com.jme3.system.osvr.osvrrendermanager;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanager;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanager;
package com.jme3.system.osvr.osvrrendermanager;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanager;
package com.jme3.system.osvr.osvrrendermanager;
import com.ochafik.lang.jnaerator.runtime.NativeSizeByReference;
import com.sun.jna.Library;
import com.sun.jna.Native;
@ -62,7 +62,7 @@ public class OsvrRenderManagerLibrary implements Library {
*/
public static native byte osvrRenderManagerFinishRegisterRenderBuffers(Pointer renderManager, Pointer registerBufferState, byte appWillNotOverwriteBeforeNewPresent);
/** Original signature : <code>OSVR_ReturnCode osvrRenderManagerPresentSolidColorf(OSVR_RenderManager, OSVR_RGB_FLOAT)</code> */
public static native byte osvrRenderManagerPresentSolidColorf(Pointer renderManager, osvrrendermanager.OSVR_RGB.ByValue rgb);
public static native byte osvrRenderManagerPresentSolidColorf(Pointer renderManager, com.jme3.system.osvr.osvrrendermanager.OSVR_RGB.ByValue rgb);
/**
* when you're done.<br>
* Original signature : <code>OSVR_ReturnCode osvrRenderManagerGetRenderInfoCollection(OSVR_RenderManager, OSVR_RenderParams, OSVR_RenderInfoCollection*)</code>
@ -81,37 +81,37 @@ public class OsvrRenderManagerLibrary implements Library {
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_OpenGL(double*, OSVR_ProjectionMatrix)</code><br>
* @deprecated use the safer methods {@link #OSVR_Projection_to_OpenGL(java.nio.DoubleBuffer, osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_OpenGL(com.sun.jna.ptr.DoubleByReference, osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} instead
* @deprecated use the safer methods {@link #OSVR_Projection_to_OpenGL(java.nio.DoubleBuffer, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_OpenGL(com.sun.jna.ptr.DoubleByReference, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} instead
*/
@Deprecated
public static native byte OSVR_Projection_to_OpenGL(DoubleByReference OpenGL_out, osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_OpenGL(DoubleByReference OpenGL_out, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_OpenGL(double*, OSVR_ProjectionMatrix)</code>
*/
public static native byte OSVR_Projection_to_OpenGL(DoubleBuffer OpenGL_out, osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_OpenGL(DoubleBuffer OpenGL_out, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_D3D(float[16], OSVR_ProjectionMatrix)</code><br>
* @deprecated use the safer methods {@link #OSVR_Projection_to_D3D(java.nio.FloatBuffer, osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_D3D(com.sun.jna.ptr.FloatByReference, osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} instead
* @deprecated use the safer methods {@link #OSVR_Projection_to_D3D(java.nio.FloatBuffer, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_D3D(com.sun.jna.ptr.FloatByReference, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} instead
*/
@Deprecated
public static native byte OSVR_Projection_to_D3D(FloatByReference D3D_out, osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_D3D(FloatByReference D3D_out, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_D3D(float[16], OSVR_ProjectionMatrix)</code>
*/
public static native byte OSVR_Projection_to_D3D(FloatBuffer D3D_out, osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_D3D(FloatBuffer D3D_out, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_Unreal(float[16], OSVR_ProjectionMatrix)</code><br>
* @deprecated use the safer methods {@link #OSVR_Projection_to_Unreal(java.nio.FloatBuffer, osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_Unreal(com.sun.jna.ptr.FloatByReference, osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} instead
* @deprecated use the safer methods {@link #OSVR_Projection_to_Unreal(java.nio.FloatBuffer, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_Unreal(com.sun.jna.ptr.FloatByReference, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue)} instead
*/
@Deprecated
public static native byte OSVR_Projection_to_Unreal(FloatByReference Unreal_out, osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_Unreal(FloatByReference Unreal_out, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_Unreal(float[16], OSVR_ProjectionMatrix)</code>
*/
public static native byte OSVR_Projection_to_Unreal(FloatBuffer Unreal_out, osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_Unreal(FloatBuffer Unreal_out, com.jme3.system.osvr.osvrrendermanager.OSVR_ProjectionMatrix.ByValue projection_in);
}

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;
@ -10,7 +10,7 @@ import java.util.List;
*/
public class OSVR_GraphicsLibraryOpenGL extends Structure {
/** C type : const OSVR_OpenGLToolkitFunctions* */
public osvrrendermanageropengl.OSVR_OpenGLToolkitFunctions.ByReference toolkit;
public com.jme3.system.osvr.osvrrendermanageropengl.OSVR_OpenGLToolkitFunctions.ByReference toolkit;
public OSVR_GraphicsLibraryOpenGL() {
super();
}
@ -18,7 +18,7 @@ public class OSVR_GraphicsLibraryOpenGL extends Structure {
return Arrays.asList("toolkit");
}
/** @param toolkit C type : const OSVR_OpenGLToolkitFunctions* */
public OSVR_GraphicsLibraryOpenGL(osvrrendermanageropengl.OSVR_OpenGLToolkitFunctions.ByReference toolkit) {
public OSVR_GraphicsLibraryOpenGL(com.jme3.system.osvr.osvrrendermanageropengl.OSVR_OpenGLToolkitFunctions.ByReference toolkit) {
super();
this.toolkit = toolkit;
}

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.ochafik.lang.jnaerator.runtime.NativeSize;
import com.sun.jna.Callback;
import com.sun.jna.Pointer;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,9 +1,9 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.jme3.system.osvr.osvrmatrixconventions.OSVR_Pose3;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;
import java.util.List;
import osvrmatrixconventions.OSVR_Pose3;
/**
* This file was autogenerated by <a href="http://jnaerator.googlecode.com/">JNAerator</a>,<br>
* a tool written by <a href="http://ochafik.com/">Olivier Chafik</a> that <a href="http://code.google.com/p/jnaerator/wiki/CreditsAndLicense">uses a few opensource projects.</a>.<br>

@ -1,4 +1,4 @@
package osvrrendermanager;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,5 @@
package osvrrendermanageropengl;
package com.jme3.system.osvr.osvrrendermanageropengl;
import com.jme3.system.osvr.osvrclientkit.OsvrClientKitLibrary;
import com.ochafik.lang.jnaerator.runtime.NativeSize;
import com.ochafik.lang.jnaerator.runtime.NativeSizeByReference;
import com.sun.jna.Library;
@ -13,7 +14,6 @@ import com.sun.jna.ptr.PointerByReference;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import osvrclientkit.OsvrClientKitLibrary;
/**
* JNA Wrapper for library <b>osvrRenderManagerOpenGL</b><br>
* This file was autogenerated by <a href="http://jnaerator.googlecode.com/">JNAerator</a>,<br>
@ -67,7 +67,7 @@ public class OsvrRenderManagerOpenGLLibrary implements Library {
*/
public static native byte osvrRenderManagerFinishRegisterRenderBuffers(Pointer renderManager, Pointer registerBufferState, byte appWillNotOverwriteBeforeNewPresent);
/** Original signature : <code>OSVR_ReturnCode osvrRenderManagerPresentSolidColorf(OSVR_RenderManager, OSVR_RGB_FLOAT)</code> */
public static native byte osvrRenderManagerPresentSolidColorf(Pointer renderManager, osvrrendermanageropengl.OSVR_RGB.ByValue rgb);
public static native byte osvrRenderManagerPresentSolidColorf(Pointer renderManager, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RGB.ByValue rgb);
/**
* when you're done.<br>
* Original signature : <code>OSVR_ReturnCode osvrRenderManagerGetRenderInfoCollection(OSVR_RenderManager, OSVR_RenderParams, OSVR_RenderInfoCollection*)</code>
@ -86,55 +86,55 @@ public class OsvrRenderManagerOpenGLLibrary implements Library {
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_OpenGL(double*, OSVR_ProjectionMatrix)</code><br>
* @deprecated use the safer methods {@link #OSVR_Projection_to_OpenGL(java.nio.DoubleBuffer, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_OpenGL(com.sun.jna.ptr.DoubleByReference, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} instead
* @deprecated use the safer methods {@link #OSVR_Projection_to_OpenGL(java.nio.DoubleBuffer, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_OpenGL(com.sun.jna.ptr.DoubleByReference, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} instead
*/
@Deprecated
public static native byte OSVR_Projection_to_OpenGL(DoubleByReference OpenGL_out, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_OpenGL(DoubleByReference OpenGL_out, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_OpenGL(double*, OSVR_ProjectionMatrix)</code>
*/
public static native byte OSVR_Projection_to_OpenGL(DoubleBuffer OpenGL_out, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_OpenGL(DoubleBuffer OpenGL_out, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_D3D(float[16], OSVR_ProjectionMatrix)</code><br>
* @deprecated use the safer methods {@link #OSVR_Projection_to_D3D(java.nio.FloatBuffer, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_D3D(com.sun.jna.ptr.FloatByReference, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} instead
* @deprecated use the safer methods {@link #OSVR_Projection_to_D3D(java.nio.FloatBuffer, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_D3D(com.sun.jna.ptr.FloatByReference, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} instead
*/
@Deprecated
public static native byte OSVR_Projection_to_D3D(FloatByReference D3D_out, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_D3D(FloatByReference D3D_out, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_D3D(float[16], OSVR_ProjectionMatrix)</code>
*/
public static native byte OSVR_Projection_to_D3D(FloatBuffer D3D_out, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_D3D(FloatBuffer D3D_out, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_Unreal(float[16], OSVR_ProjectionMatrix)</code><br>
* @deprecated use the safer methods {@link #OSVR_Projection_to_Unreal(java.nio.FloatBuffer, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_Unreal(com.sun.jna.ptr.FloatByReference, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} instead
* @deprecated use the safer methods {@link #OSVR_Projection_to_Unreal(java.nio.FloatBuffer, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} and {@link #OSVR_Projection_to_Unreal(com.sun.jna.ptr.FloatByReference, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue)} instead
*/
@Deprecated
public static native byte OSVR_Projection_to_Unreal(FloatByReference Unreal_out, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_Unreal(FloatByReference Unreal_out, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* @return True on success, false on failure (null pointer).<br>
* Original signature : <code>OSVR_ReturnCode OSVR_Projection_to_Unreal(float[16], OSVR_ProjectionMatrix)</code>
*/
public static native byte OSVR_Projection_to_Unreal(FloatBuffer Unreal_out, osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
public static native byte OSVR_Projection_to_Unreal(FloatBuffer Unreal_out, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ProjectionMatrix.ByValue projection_in);
/**
* Original signature : <code>OSVR_ReturnCode osvrCreateRenderManagerOpenGL(OSVR_ClientContext, const char[], OSVR_GraphicsLibraryOpenGL, OSVR_RenderManager*, OSVR_RenderManagerOpenGL*)</code><br>
* @deprecated use the safer methods {@link #osvrCreateRenderManagerOpenGL(osvrrendermanageropengl.OsvrRenderManagerOpenGLLibrary.OSVR_ClientContext, byte[], osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue, com.sun.jna.ptr.PointerByReference, com.sun.jna.ptr.PointerByReference)} and {@link #osvrCreateRenderManagerOpenGL(com.sun.jna.Pointer, com.sun.jna.Pointer, osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue, com.sun.jna.ptr.PointerByReference, com.sun.jna.ptr.PointerByReference)} instead
* @deprecated use the safer methods {@link #osvrCreateRenderManagerOpenGL(com.jme3.system.osvr.osvrrendermanageropengl.OsvrRenderManagerOpenGLLibrary.OSVR_ClientContext, byte[], com.jme3.system.osvr.osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue, com.sun.jna.ptr.PointerByReference, com.sun.jna.ptr.PointerByReference)} and {@link #osvrCreateRenderManagerOpenGL(com.sun.jna.Pointer, com.sun.jna.Pointer, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue, com.sun.jna.ptr.PointerByReference, com.sun.jna.ptr.PointerByReference)} instead
*/
@Deprecated
public static native byte osvrCreateRenderManagerOpenGL(Pointer clientContext, Pointer graphicsLibraryName, osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue graphicsLibrary, PointerByReference renderManagerOut, PointerByReference renderManagerOpenGLOut);
public static native byte osvrCreateRenderManagerOpenGL(Pointer clientContext, Pointer graphicsLibraryName, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue graphicsLibrary, PointerByReference renderManagerOut, PointerByReference renderManagerOpenGLOut);
/** Original signature : <code>OSVR_ReturnCode osvrCreateRenderManagerOpenGL(OSVR_ClientContext, const char[], OSVR_GraphicsLibraryOpenGL, OSVR_RenderManager*, OSVR_RenderManagerOpenGL*)</code> */
public static native byte osvrCreateRenderManagerOpenGL(OsvrClientKitLibrary.OSVR_ClientContext clientContext, byte graphicsLibraryName[], osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue graphicsLibrary, PointerByReference renderManagerOut, PointerByReference renderManagerOpenGLOut);
public static native byte osvrCreateRenderManagerOpenGL(OsvrClientKitLibrary.OSVR_ClientContext clientContext, byte graphicsLibraryName[], com.jme3.system.osvr.osvrrendermanageropengl.OSVR_GraphicsLibraryOpenGL.ByValue graphicsLibrary, PointerByReference renderManagerOut, PointerByReference renderManagerOpenGLOut);
/** Original signature : <code>OSVR_ReturnCode osvrRenderManagerGetRenderInfoOpenGL(OSVR_RenderManagerOpenGL, OSVR_RenderInfoCount, OSVR_RenderParams, OSVR_RenderInfoOpenGL*)</code> */
public static native byte osvrRenderManagerGetRenderInfoOpenGL(Pointer renderManager, NativeSize renderInfoIndex, OSVR_RenderParams.ByValue renderParams, OSVR_RenderInfoOpenGL renderInfoOut);
/** Original signature : <code>OSVR_ReturnCode osvrRenderManagerOpenDisplayOpenGL(OSVR_RenderManagerOpenGL, OSVR_OpenResultsOpenGL*)</code> */
public static native byte osvrRenderManagerOpenDisplayOpenGL(Pointer renderManager, OSVR_OpenResultsOpenGL openResultsOut);
/** Original signature : <code>OSVR_ReturnCode osvrRenderManagerPresentRenderBufferOpenGL(OSVR_RenderManagerPresentState, OSVR_RenderBufferOpenGL, OSVR_RenderInfoOpenGL, OSVR_ViewportDescription)</code> */
public static native byte osvrRenderManagerPresentRenderBufferOpenGL(Pointer presentState, osvrrendermanageropengl.OSVR_RenderBufferOpenGL.ByValue buffer, OSVR_RenderInfoOpenGL.ByValue renderInfoUsed, osvrrendermanageropengl.OSVR_ViewportDescription.ByValue normalizedCroppingViewport);
public static native byte osvrRenderManagerPresentRenderBufferOpenGL(Pointer presentState, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RenderBufferOpenGL.ByValue buffer, OSVR_RenderInfoOpenGL.ByValue renderInfoUsed, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ViewportDescription.ByValue normalizedCroppingViewport);
/** Original signature : <code>OSVR_ReturnCode osvrRenderManagerRegisterRenderBufferOpenGL(OSVR_RenderManagerRegisterBufferState, OSVR_RenderBufferOpenGL)</code> */
public static native byte osvrRenderManagerRegisterRenderBufferOpenGL(Pointer registerBufferState, osvrrendermanageropengl.OSVR_RenderBufferOpenGL.ByValue renderBuffer);
public static native byte osvrRenderManagerRegisterRenderBufferOpenGL(Pointer registerBufferState, com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RenderBufferOpenGL.ByValue renderBuffer);
/**
* Gets a given OSVR_RenderInfoOpenGL from an OSVR_RenderInfoCollection.<br>
* Original signature : <code>OSVR_ReturnCode osvrRenderManagerGetRenderInfoFromCollectionOpenGL(OSVR_RenderInfoCollection, OSVR_RenderInfoCount, OSVR_RenderInfoOpenGL*)</code>

@ -1,4 +1,4 @@
package osvrtimevalue;
package com.jme3.system.osvr.osvrtimevalue;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;

@ -1,4 +1,4 @@
package osvrtimevalue;
package com.jme3.system.osvr.osvrtimevalue;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLibrary;

@ -0,0 +1,223 @@
package com.jme3.util;
import com.jme3.app.VREnvironment;
import com.jme3.post.CartoonSSAO;
import com.jme3.post.Filter;
import com.jme3.post.FilterPostProcessor;
import com.jme3.post.FilterUtil;
import com.jme3.post.SceneProcessor;
import com.jme3.post.filters.FogFilter;
import com.jme3.post.filters.TranslucentBucketFilter;
import com.jme3.post.ssao.SSAOFilter;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.shadow.DirectionalLightShadowFilter;
import com.jme3.shadow.VRDirectionalLightShadowRenderer;
import com.jme3.texture.Texture2D;
/**
* A VR view manager. This class holds methods that enable to submit 3D views to the VR compositor.
* System dependent classes should extends from this one.
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*/
public abstract class AbstractVRViewManager implements VRViewManager {
//private static final Logger logger = Logger.getLogger(AbstractVRViewManager.class.getName());
protected VREnvironment environment = null;
protected Camera leftCamera;
protected ViewPort leftViewport;
protected FilterPostProcessor leftPostProcessor;
protected Texture2D leftEyeTexture;
protected Texture2D leftEyeDepth;
protected Camera rightCamera;
protected ViewPort rightViewport;
protected FilterPostProcessor rightPostProcessor;
protected Texture2D rightEyeTexture;
protected Texture2D rightEyeDepth;
private float resMult = 1f;
private float heightAdjustment;
@Override
public Camera getLeftCamera() {
return leftCamera;
}
@Override
public Camera getRightCamera() {
return rightCamera;
}
@Override
public ViewPort getLeftViewport() {
return leftViewport;
}
@Override
public ViewPort getRightViewport() {
return rightViewport;
}
@Override
public Texture2D getLeftTexture(){
return leftEyeTexture;
}
@Override
public Texture2D getRightTexture(){
return rightEyeTexture;
}
@Override
public Texture2D getLeftDepth(){
return leftEyeDepth;
}
@Override
public Texture2D getRightDepth(){
return rightEyeDepth;
}
@Override
public FilterPostProcessor getLeftPostProcessor(){
return leftPostProcessor;
}
@Override
public FilterPostProcessor getRightPostProcessor(){
return rightPostProcessor;
}
@Override
public float getResolutionMuliplier() {
return resMult;
}
@Override
public void setResolutionMultiplier(float resMult) {
this.resMult = resMult;
}
@Override
public float getHeightAdjustment() {
return heightAdjustment;
}
@Override
public void setHeightAdjustment(float amount) {
heightAdjustment = amount;
}
@Override
public VREnvironment getVREnvironment(){
return environment;
}
/**
* Handles moving filters from the main view to each eye
*/
public void moveScreenProcessingToEyes() {
if (environment != null){
if( getRightViewport() == null ){
return;
}
if (environment.getApplication() != null){
syncScreenProcessing(environment.getApplication().getViewPort());
environment.getApplication().getViewPort().clearProcessors();
} else {
throw new IllegalStateException("The VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Sets the two views to use the list of {@link SceneProcessor processors}.
* @param sourceViewport the {@link ViewPort viewport} that contains the processors to use.
*/
public void syncScreenProcessing(ViewPort sourceViewport) {
if (environment != null){
if( getRightViewport() == null ){
return;
}
if (environment.getApplication() != null){
// setup post processing filters
if( getRightPostProcessor() == null ) {
rightPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
leftPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
}
// clear out all filters & processors, to start from scratch
getRightPostProcessor().removeAllFilters();
getLeftPostProcessor().removeAllFilters();
getLeftViewport().clearProcessors();
getRightViewport().clearProcessors();
// if we have no processors to sync, don't add the FilterPostProcessor
if( sourceViewport.getProcessors().isEmpty() ) return;
// add post processors we just made, which are empty
getLeftViewport().addProcessor(getLeftPostProcessor());
getRightViewport().addProcessor(getRightPostProcessor());
// go through all of the filters in the processors list
// add them to the left viewport processor & clone them to the right
for(SceneProcessor sceneProcessor : sourceViewport.getProcessors()) {
if (sceneProcessor instanceof FilterPostProcessor) {
for(Filter f : ((FilterPostProcessor)sceneProcessor).getFilterList() ) {
if( f instanceof TranslucentBucketFilter ) {
// just remove this filter, we will add it at the end manually
((FilterPostProcessor)sceneProcessor).removeFilter(f);
} else {
getLeftPostProcessor().addFilter(f);
// clone to the right
Filter f2;
if(f instanceof FogFilter){
f2 = FilterUtil.cloneFogFilter((FogFilter)f);
} else if (f instanceof CartoonSSAO ) {
f2 = new CartoonSSAO((CartoonSSAO)f);
} else if (f instanceof SSAOFilter){
f2 = FilterUtil.cloneSSAOFilter((SSAOFilter)f);
} else if (f instanceof DirectionalLightShadowFilter){
f2 = FilterUtil.cloneDirectionalLightShadowFilter(environment.getApplication().getAssetManager(), (DirectionalLightShadowFilter)f);
} else {
f2 = f; // dof, bloom, lightscattering etc.
}
getRightPostProcessor().addFilter(f2);
}
}
} else if (sceneProcessor instanceof VRDirectionalLightShadowRenderer) {
// shadow processing
// TODO: make right shadow processor use same left shadow maps for performance
VRDirectionalLightShadowRenderer dlsr = (VRDirectionalLightShadowRenderer) sceneProcessor;
VRDirectionalLightShadowRenderer dlsrRight = dlsr.clone();
dlsrRight.setLight(dlsr.getLight());
getRightViewport().getProcessors().add(0, dlsrRight);
getLeftViewport().getProcessors().add(0, sceneProcessor);
}
}
// make sure each has a translucent filter renderer
getLeftPostProcessor().addFilter(new TranslucentBucketFilter());
getRightPostProcessor().addFilter(new TranslucentBucketFilter());
} else {
throw new IllegalStateException("The VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
}

@ -0,0 +1,15 @@
package com.jme3.util;
/**
* A enumeration that describes the GUI display positioning modes.
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*
*/
public enum VRGUIPositioningMode {
MANUAL,
AUTO_CAM_ALL,
AUTO_CAM_ALL_SKIP_PITCH,
AUTO_OBSERVER_POS_CAM_ROTATION,
AUTO_OBSERVER_ALL,
AUTO_OBSERVER_ALL_CAMHEIGHT
}

@ -0,0 +1,474 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.util;
import com.jme3.app.VREnvironment;
import com.jme3.material.Material;
import com.jme3.material.RenderState.BlendMode;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Matrix3f;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue.Bucket;
import com.jme3.scene.Spatial;
import com.jme3.scene.CenterQuad;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.system.AppSettings;
import com.jme3.texture.FrameBuffer;
import com.jme3.texture.Image.Format;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import java.awt.GraphicsEnvironment;
import java.util.Iterator;
/**
* A class dedicated to the management and the display of a Graphical User Interface (GUI) within a VR environment.
* @author reden - phr00t - https://github.com/phr00t
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*
*/
public class VRGuiManager {
private Camera camLeft, camRight;
private float guiDistance = 1.5f;
private float guiScale = 1f;
private float guiPositioningElastic;
private VRGUIPositioningMode posMode = VRGUIPositioningMode.AUTO_CAM_ALL;
private final Matrix3f orient = new Matrix3f();
private Vector2f screenSize;
protected boolean wantsReposition;
private Vector2f ratio;
private final Vector3f EoldPos = new Vector3f();
private final Quaternion EoldDir = new Quaternion();
private final Vector3f look = new Vector3f();
private final Vector3f left = new Vector3f();
private final Vector3f temppos = new Vector3f();
private final Vector3f up = new Vector3f();
private boolean useCurvedSurface = false;
private boolean overdraw = false;
private Geometry guiQuad;
private Node guiQuadNode;
private ViewPort offView;
private Texture2D guiTexture;
private final Quaternion tempq = new Quaternion();
private VREnvironment environment = null;
/**
* Create a new GUI manager attached to the given app state.
* @param environment the VR environment to which this manager is attached to.
*/
public VRGuiManager(VREnvironment environment){
this.environment = environment;
}
/**
*
* Makes auto GUI positioning happen not immediately, but like an
* elastic connected to the headset. Setting to 0 disables (default)
* Higher settings make it track the headset quicker.
*
* @param elastic amount of elasticity
*/
public void setPositioningElasticity(float elastic) {
guiPositioningElastic = elastic;
}
public float getPositioningElasticity() {
return guiPositioningElastic;
}
/**
* Get the GUI {@link VRGUIPositioningMode positioning mode}.
* @return the GUI {@link VRGUIPositioningMode positioning mode}.
* @see #setPositioningMode(VRGUIPositioningMode)
*/
public VRGUIPositioningMode getPositioningMode() {
return posMode;
}
/**
* Set the GUI {@link VRGUIPositioningMode positioning mode}.
* @param mode the GUI {@link VRGUIPositioningMode positioning mode}.
* @see #getPositioningMode()
*/
public void setPositioningMode(VRGUIPositioningMode mode) {
posMode = mode;
}
/**
* Get the GUI canvas size. This method return the size in pixels of the GUI available area within the VR view.
* @return the GUI canvas size. This method return the size in pixels of the GUI available area within the VR view.
*/
public Vector2f getCanvasSize() {
if (environment != null){
if (environment.getApplication() != null){
if( screenSize == null ) {
if( environment.isInVR() && environment.getVRHardware() != null ) {
screenSize = new Vector2f();
environment.getVRHardware().getRenderSize(screenSize);
screenSize.multLocal(environment.getVRViewManager().getResolutionMuliplier());
} else {
AppSettings as = environment.getApplication().getContext().getSettings();
screenSize = new Vector2f(as.getWidth(), as.getHeight());
}
}
return screenSize;
} else {
throw new IllegalStateException("VR GUI manager underlying environment is not attached to any application.");
}
} else {
throw new IllegalStateException("VR GUI manager is not attached to any environment.");
}
}
/**
* Get the ratio between the {@link #getCanvasSize() GUI canvas size} and the application main windows (if available) or the screen size.
* @return the ratio between the {@link #getCanvasSize() GUI canvas size} and the application main windows (if available).
* @see #getCanvasSize()
*/
public Vector2f getCanvasToWindowRatio() {
if (environment != null){
if (environment.getApplication() != null){
if( ratio == null ) {
ratio = new Vector2f();
Vector2f canvas = getCanvasSize();
int width = Integer.min(GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getWidth(),
environment.getApplication().getContext().getSettings().getWidth());
int height = Integer.min(GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getHeight(),
environment.getApplication().getContext().getSettings().getHeight());
ratio.x = Float.max(1f, canvas.x / width);
ratio.y = Float.max(1f, canvas.y / height);
}
return ratio;
} else {
throw new IllegalStateException("VR GUI manager underlying environment is not attached to any application.");
}
} else {
throw new IllegalStateException("VR GUI manager is not attached to any environment.");
}
}
/**
* Inform this manager that it has to position the GUI.
*/
public void positionGui() {
wantsReposition = true;
}
/**
* Position the GUI to the given location.
* @param pos the position of the GUI.
* @param dir the rotation of the GUI.
* @param tpf the time per frame.
*/
private void positionTo(Vector3f pos, Quaternion dir, float tpf) {
if (environment != null){
Vector3f guiPos = guiQuadNode.getLocalTranslation();
guiPos.set(0f, 0f, guiDistance);
dir.mult(guiPos, guiPos);
guiPos.x += pos.x;
guiPos.y += pos.y + environment.getVRHeightAdjustment();
guiPos.z += pos.z;
if( guiPositioningElastic > 0f && posMode != VRGUIPositioningMode.MANUAL ) {
// mix pos & dir with current pos & dir
guiPos.interpolateLocal(EoldPos, guiPos, Float.min(1f, tpf * guiPositioningElastic));
EoldPos.set(guiPos);
}
} else {
throw new IllegalStateException("VR GUI manager is not attached to any environment.");
}
}
/**
* Update the GUI geometric state. This method should be called after GUI modification.
*/
protected void updateGuiQuadGeometricState() {
guiQuadNode.updateGeometricState();
}
/**
* Position the GUI without delay.
* @param tpf the time per frame.
*/
protected void positionGuiNow(float tpf) {
if (environment != null){
wantsReposition = false;
if( environment.isInVR() == false ){
return;
}
guiQuadNode.setLocalScale(guiDistance * guiScale * 4f, 4f * guiDistance * guiScale, 1f);
switch( posMode ) {
case MANUAL:
case AUTO_CAM_ALL_SKIP_PITCH:
case AUTO_CAM_ALL:
if( camLeft != null && camRight != null ) {
// get middle point
temppos.set(camLeft.getLocation()).interpolateLocal(camRight.getLocation(), 0.5f);
positionTo(temppos, camLeft.getRotation(), tpf);
}
rotateScreenTo(camLeft.getRotation(), tpf);
break;
case AUTO_OBSERVER_POS_CAM_ROTATION:
Object obs = environment.getObserver();
if( obs != null ) {
if( obs instanceof Camera ) {
positionTo(((Camera)obs).getLocation(), camLeft.getRotation(), tpf);
} else {
positionTo(((Spatial)obs).getWorldTranslation(), camLeft.getRotation(), tpf);
}
}
rotateScreenTo(camLeft.getRotation(), tpf);
break;
case AUTO_OBSERVER_ALL:
case AUTO_OBSERVER_ALL_CAMHEIGHT:
obs = environment.getObserver();
if( obs != null ) {
Quaternion q;
if( obs instanceof Camera ) {
q = ((Camera)obs).getRotation();
temppos.set(((Camera)obs).getLocation());
} else {
q = ((Spatial)obs).getWorldRotation();
temppos.set(((Spatial)obs).getWorldTranslation());
}
if( posMode == VRGUIPositioningMode.AUTO_OBSERVER_ALL_CAMHEIGHT ) {
temppos.y = camLeft.getLocation().y;
}
positionTo(temppos, q, tpf);
rotateScreenTo(q, tpf);
}
break;
}
} else {
throw new IllegalStateException("VR GUI manager is not attached to any environment.");
}
}
/**
* Rotate the GUI to the given direction.
* @param dir the direction to rotate to.
* @param tpf the time per frame.
*/
private void rotateScreenTo(Quaternion dir, float tpf) {
dir.getRotationColumn(2, look).negateLocal();
dir.getRotationColumn(0, left).negateLocal();
orient.fromAxes(left, dir.getRotationColumn(1, up), look);
Quaternion rot = tempq.fromRotationMatrix(orient);
if( posMode == VRGUIPositioningMode.AUTO_CAM_ALL_SKIP_PITCH ){
VRUtil.stripToYaw(rot);
}
if( guiPositioningElastic > 0f && posMode != VRGUIPositioningMode.MANUAL ) {
// mix pos & dir with current pos & dir
EoldDir.nlerp(rot, tpf * guiPositioningElastic);
guiQuadNode.setLocalRotation(EoldDir);
} else {
guiQuadNode.setLocalRotation(rot);
}
}
/**
* Get the GUI distance from the observer.
* @return the GUI distance from the observer.
* @see #setGuiDistance(float)
*/
public float getGuiDistance() {
return guiDistance;
}
/**
* Set the GUI distance from the observer.
* @param newGuiDistance the GUI distance from the observer.
* @see #getGuiDistance()
*/
public void setGuiDistance(float newGuiDistance) {
guiDistance = newGuiDistance;
}
/**
* Get the GUI scale.
* @return the GUI scale.
* @see #setGuiScale(float)
*/
public float getGUIScale(){
return guiScale;
}
/**
* Set the GUI scale.
* @param scale the GUI scale.
* @see #getGUIScale()
*/
public void setGuiScale(float scale) {
guiScale = scale;
}
/**
* Adjust the GUI distance from the observer.
* This method increment / decrement the {@link #getGuiDistance() GUI distance} by the given value.
* @param adjustAmount the increment (if positive) / decrement (if negative) value of the GUI distance.
*/
public void adjustGuiDistance(float adjustAmount) {
guiDistance += adjustAmount;
}
/**
* Set up the GUI.
* @param leftcam the left eye camera.
* @param rightcam the right eye camera.
* @param left the left eye viewport.
* @param right the right eye viewport.
*/
protected void setupGui(Camera leftcam, Camera rightcam, ViewPort left, ViewPort right) {
if (environment != null){
if( environment.hasTraditionalGUIOverlay() ) {
camLeft = leftcam;
camRight = rightcam;
Spatial guiScene = getGuiQuad(camLeft);
left.attachScene(guiScene);
if( right != null ) right.attachScene(guiScene);
setPositioningMode(posMode);
}
} else {
throw new IllegalStateException("VR GUI manager is not attached to any environment.");
}
}
/**
* Get if the GUI has to use curved surface.
* @return <code>true</code> if the GUI has to use curved surface and <code>false</code> otherwise.
* @see #setCurvedSurface(boolean)
*/
public boolean isCurverSurface(){
return useCurvedSurface;
}
/**
* Set if the GUI has to use curved surface.
* @param set <code>true</code> if the GUI has to use curved surface and <code>false</code> otherwise.
* @see #isCurverSurface()
*/
public void setCurvedSurface(boolean set) {
useCurvedSurface = set;
}
/**
* Get if the GUI has to be displayed even if it is behind objects.
* @return <code>true</code> if the GUI has to use curved surface and <code>false</code> otherwise.
* @see #setGuiOverdraw(boolean)
*/
public boolean isGuiOverdraw(){
return overdraw;
}
/**
* Set if the GUI has to be displayed even if it is behind objects.
* @param set <code>true</code> if the GUI has to use curved surface and <code>false</code> otherwise.
* @see #isGuiOverdraw()
*/
public void setGuiOverdraw(boolean set) {
overdraw = set;
}
/**
* Create a GUI quad for the given camera.
* @param sourceCam the camera
* @return a GUI quad for the given camera.
*/
private Spatial getGuiQuad(Camera sourceCam){
if (environment != null){
if (environment.getApplication() != null){
if( guiQuadNode == null ) {
Vector2f guiCanvasSize = getCanvasSize();
Camera offCamera = sourceCam.clone();
offCamera.setParallelProjection(true);
offCamera.setLocation(Vector3f.ZERO);
offCamera.lookAt(Vector3f.UNIT_Z, Vector3f.UNIT_Y);
offView = environment.getApplication().getRenderManager().createPreView("GUI View", offCamera);
offView.setClearFlags(true, true, true);
offView.setBackgroundColor(ColorRGBA.BlackNoAlpha);
// create offscreen framebuffer
FrameBuffer offBuffer = new FrameBuffer((int)guiCanvasSize.x, (int)guiCanvasSize.y, 1);
//setup framebuffer's texture
guiTexture = new Texture2D((int)guiCanvasSize.x, (int)guiCanvasSize.y, Format.RGBA8);
guiTexture.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
guiTexture.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setColorTexture(guiTexture);
//set viewport to render to offscreen framebuffer
offView.setOutputFrameBuffer(offBuffer);
// setup framebuffer's scene
Iterator<Spatial> spatialIter = environment.getApplication().getGuiViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
offView.attachScene(spatialIter.next());
}
if( useCurvedSurface ) {
guiQuad = (Geometry)environment.getApplication().getAssetManager().loadModel("Common/Util/gui_mesh.j3o");
} else {
guiQuad = new Geometry("guiQuad", new CenterQuad(1f, 1f));
}
Material mat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/GuiOverlay.j3md");
mat.getAdditionalRenderState().setDepthTest(!overdraw);
mat.getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
mat.getAdditionalRenderState().setDepthWrite(false);
mat.setTexture("ColorMap", guiTexture);
guiQuad.setQueueBucket(Bucket.Translucent);
guiQuad.setMaterial(mat);
guiQuadNode = new Node("gui-quad-node");
guiQuadNode.setQueueBucket(Bucket.Translucent);
guiQuadNode.attachChild(guiQuad);
}
return guiQuadNode;
} else {
throw new IllegalStateException("VR GUI manager underlying environment is not attached to any application.");
}
} else {
throw new IllegalStateException("VR GUI manager is not attached to any environment.");
}
}
}

@ -0,0 +1,334 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.util;
import java.util.logging.Logger;
import org.lwjgl.glfw.GLFW;
import com.jme3.app.VREnvironment;
import com.jme3.input.MouseInput;
import com.jme3.input.controls.AnalogListener;
import com.jme3.input.lwjgl.GlfwMouseInputVR;
import com.jme3.input.vr.VRInputType;
import com.jme3.material.RenderState.BlendMode;
import com.jme3.math.Vector2f;
import com.jme3.scene.Node;
import com.jme3.system.AppSettings;
import com.jme3.system.lwjgl.LwjglWindow;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.ui.Picture;
/**
* A class dedicated to the handling of the mouse within VR environment.
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*
*/
public class VRMouseManager {
private static final Logger logger = Logger.getLogger(VRMouseManager.class.getName());
private VREnvironment environment = null;
private final int AVERAGE_AMNT = 4;
private int avgCounter;
private Picture mouseImage;
private int recentCenterCount = 0;
private final Vector2f cursorPos = new Vector2f();
private float ySize, sensitivity = 8f, acceleration = 2f;
private final float[] lastXmv = new float[AVERAGE_AMNT], lastYmv = new float[AVERAGE_AMNT];
private boolean thumbstickMode;
private float moveScale = 1f;
private float avg(float[] arr) {
float amt = 0f;
for(float f : arr) amt += f;
return amt / arr.length;
}
/**
* Create a new VR mouse manager within the given {@link VREnvironment VR environment}.
* @param environment the VR environment of the mouse manager.
*/
public VRMouseManager(VREnvironment environment){
this.environment = environment;
}
/**
* Initialize the VR mouse manager.
*/
protected void initialize() {
logger.config("Initializing VR mouse manager.");
// load default mouseimage
mouseImage = new Picture("mouse");
setImage("Common/Util/mouse.png");
// hide default cursor by making it invisible
MouseInput mi = environment.getApplication().getContext().getMouseInput();
if( mi instanceof GlfwMouseInputVR ){
((GlfwMouseInputVR)mi).hideActiveCursor();
}
centerMouse();
logger.config("Initialized VR mouse manager [SUCCESS]");
}
public void setThumbstickMode(boolean set) {
thumbstickMode = set;
}
public boolean isThumbstickMode() {
return thumbstickMode;
}
/**
* Set the speed of the mouse.
* @param sensitivity the sensitivity of the mouse.
* @param acceleration the acceleration of the mouse.
* @see #getSpeedAcceleration()
* @see #getSpeedSensitivity()
*/
public void setSpeed(float sensitivity, float acceleration) {
this.sensitivity = sensitivity;
this.acceleration = acceleration;
}
/**
* Get the sensitivity of the mouse.
* @return the sensitivity of the mouse.
* @see #setSpeed(float, float)
*/
public float getSpeedSensitivity() {
return sensitivity;
}
/**
* Get the acceleration of the mouse.
* @return the acceleration of the mouse.
* @see #setSpeed(float, float)
*/
public float getSpeedAcceleration() {
return acceleration;
}
/**
* Set the mouse move scale.
* @param set the mouse move scale.
*/
public void setMouseMoveScale(float set) {
moveScale = set;
}
/**
* Set the image to use as mouse cursor. The given string describe an asset that the underlying application asset manager has to load.
* @param texture the image to use as mouse cursor.
*/
public void setImage(String texture) {
if (environment != null){
if (environment.getApplication() != null){
if( environment.isInVR() == false ){
Texture tex = environment.getApplication().getAssetManager().loadTexture(texture);
mouseImage.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, true);
ySize = tex.getImage().getHeight();
mouseImage.setHeight(ySize);
mouseImage.setWidth(tex.getImage().getWidth());
mouseImage.getMaterial().getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
mouseImage.getMaterial().getAdditionalRenderState().setDepthWrite(false);
} else {
Texture tex = environment.getApplication().getAssetManager().loadTexture(texture);
mouseImage.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, true);
ySize = tex.getImage().getHeight();
mouseImage.setHeight(ySize);
mouseImage.setWidth(tex.getImage().getWidth());
mouseImage.getMaterial().getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
mouseImage.getMaterial().getAdditionalRenderState().setDepthWrite(false);
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Update analog controller as it was a mouse controller.
* @param inputIndex the index of the controller attached to the VR system.
* @param mouseListener the JMonkey mouse listener to trigger.
* @param mouseXName the mouseX identifier.
* @param mouseYName the mouseY identifier
* @param tpf the time per frame.
*/
public void updateAnalogAsMouse(int inputIndex, AnalogListener mouseListener, String mouseXName, String mouseYName, float tpf) {
if (environment != null){
if (environment.getApplication() != null){
// got a tracked controller to use as the "mouse"
if( environment.isInVR() == false ||
environment.getVRinput() == null ||
environment.getVRinput().isInputDeviceTracking(inputIndex) == false ){
return;
}
Vector2f tpDelta;
if( thumbstickMode ) {
tpDelta = environment.getVRinput().getAxis(inputIndex, VRInputType.ViveTrackpadAxis);
} else {
tpDelta = environment.getVRinput().getAxisDeltaSinceLastCall(inputIndex, VRInputType.ViveTrackpadAxis);
}
float Xamount = (float)Math.pow(Math.abs(tpDelta.x) * sensitivity, acceleration);
float Yamount = (float)Math.pow(Math.abs(tpDelta.y) * sensitivity, acceleration);
if( tpDelta.x < 0f ){
Xamount = -Xamount;
}
if( tpDelta.y < 0f ){
Yamount = -Yamount;
}
Xamount *= moveScale; Yamount *= moveScale;
if( mouseListener != null ) {
if( tpDelta.x != 0f && mouseXName != null ) mouseListener.onAnalog(mouseXName, Xamount * 0.2f, tpf);
if( tpDelta.y != 0f && mouseYName != null ) mouseListener.onAnalog(mouseYName, Yamount * 0.2f, tpf);
}
if( environment.getApplication().getInputManager().isCursorVisible() ) {
int index = (avgCounter+1) % AVERAGE_AMNT;
lastXmv[index] = Xamount * 133f;
lastYmv[index] = Yamount * 133f;
cursorPos.x -= avg(lastXmv);
cursorPos.y -= avg(lastYmv);
Vector2f maxsize = environment.getVRGUIManager().getCanvasSize();
if( cursorPos.x > maxsize.x ){
cursorPos.x = maxsize.x;
}
if( cursorPos.x < 0f ){
cursorPos.x = 0f;
}
if( cursorPos.y > maxsize.y ){
cursorPos.y = maxsize.y;
}
if( cursorPos.y < 0f ){
cursorPos.y = 0f;
}
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Get the actual cursor position.
* @return the actual cursor position.
*/
public Vector2f getCursorPosition() {
if (environment != null){
if (environment.getApplication() != null){
if( environment.isInVR() ) {
return cursorPos;
}
return environment.getApplication().getInputManager().getCursorPosition();
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Center the mouse on the display.
*/
public void centerMouse() {
if (environment != null){
if (environment.getApplication() != null){
// set mouse in center of the screen if newly added
Vector2f size = environment.getVRGUIManager().getCanvasSize();
MouseInput mi = environment.getApplication().getContext().getMouseInput();
AppSettings as = environment.getApplication().getContext().getSettings();
if( mi instanceof GlfwMouseInputVR ) ((GlfwMouseInputVR)mi).setCursorPosition((int)(as.getWidth() / 2f), (int)(as.getHeight() / 2f));
if( environment.isInVR() ) {
cursorPos.x = size.x / 2f;
cursorPos.y = size.y / 2f;
recentCenterCount = 2;
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Update the mouse manager. This method should not be called manually.
* The standard behavior for this method is to be called from the {@link VRViewManager#update(float) update method} of the attached {@link VRViewManager VR view manager}.
* @param tpf the time per frame.
*/
protected void update(float tpf) {
// if we are showing the cursor, add our picture as it
if( environment.getApplication().getInputManager().isCursorVisible() ) {
if( mouseImage.getParent() == null ) {
environment.getApplication().getGuiViewPort().attachScene(mouseImage);
centerMouse();
// the "real" mouse pointer should stay hidden
if (environment.getApplication().getContext() instanceof LwjglWindow){
GLFW.glfwSetInputMode(((LwjglWindow)environment.getApplication().getContext()).getWindowHandle(), GLFW.GLFW_CURSOR, GLFW.GLFW_CURSOR_DISABLED);
}
}
// handle mouse movements, which may be in addition to (or exclusive from) tracked movement
MouseInput mi = environment.getApplication().getContext().getMouseInput();
if( mi instanceof GlfwMouseInputVR ) {
if( recentCenterCount <= 0 ) {
//Vector2f winratio = VRGuiManager.getCanvasToWindowRatio();
cursorPos.x += ((GlfwMouseInputVR)mi).getLastDeltaX();// * winratio.x;
cursorPos.y += ((GlfwMouseInputVR)mi).getLastDeltaY();// * winratio.y;
if( cursorPos.x < 0f ) cursorPos.x = 0f;
if( cursorPos.y < 0f ) cursorPos.y = 0f;
if( cursorPos.x > environment.getVRGUIManager().getCanvasSize().x ) cursorPos.x = environment.getVRGUIManager().getCanvasSize().x;
if( cursorPos.y > environment.getVRGUIManager().getCanvasSize().y ) cursorPos.y = environment.getVRGUIManager().getCanvasSize().y;
} else recentCenterCount--;
((GlfwMouseInputVR)mi).clearDeltas();
}
// ok, update the cursor graphic position
Vector2f currentPos = getCursorPosition();
mouseImage.setLocalTranslation(currentPos.x, currentPos.y - ySize, environment.getVRGUIManager().getGuiDistance() + 1f);
mouseImage.updateGeometricState();
} else if( mouseImage.getParent() != null ) {
Node n = mouseImage.getParent();
mouseImage.removeFromParent();
if (n != null){
n.updateGeometricState();
}
}
}
}

@ -2,7 +2,7 @@
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jmevr.util;
package com.jme3.util;
import com.jme3.math.FastMath;
import com.jme3.math.Matrix4f;
@ -12,10 +12,7 @@ import com.jme3.system.jopenvr.HmdMatrix44_t;
import java.util.concurrent.TimeUnit;
/**
*
* @author reden
*/
public class VRUtil {
private static final long SLEEP_PRECISION = TimeUnit.MILLISECONDS.toNanos(4);

@ -0,0 +1,156 @@
package com.jme3.util;
import com.jme3.app.VRAppState;
import com.jme3.app.VREnvironment;
import com.jme3.app.state.AppState;
import com.jme3.post.FilterPostProcessor;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.texture.Texture2D;
/**
* A VR view manager. This interface describes methods that enable to submit 3D views to the VR compositor.
* @author reden - phr00t - https://github.com/phr00t
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*/
public interface VRViewManager {
/**
* The name of the left view.
*/
public final static String LEFT_VIEW_NAME = "Left View";
/**
* The name of the right view.
*/
public final static String RIGHT_VIEW_NAME = "Right View";
/**
* Get the {@link Camera camera} attached to the left eye.
* @return the {@link Camera camera} attached to the left eye.
* @see #getRightCamera()
*/
public Camera getLeftCamera();
/**
* Get the {@link Camera camera} attached to the right eye.
* @return the {@link Camera camera} attached to the right eye.
* @see #getLeftCamera()
*/
public Camera getRightCamera();
/**
* Get the {@link ViewPort viewport} attached to the left eye.
* @return the {@link ViewPort viewport} attached to the left eye.
* @see #getRightViewport()
*/
public ViewPort getLeftViewport();
/**
* Get the {@link ViewPort viewport} attached to the right eye.
* @return the {@link ViewPort viewport} attached to the right eye.
* @see #getLeftViewport()
*/
public ViewPort getRightViewport();
/**
* Get the texture attached to the left eye.
* @return the texture attached to the left eye.
* @see #getRightTexture()
*/
public Texture2D getLeftTexture();
/**
* Get the texture attached to the right eye.
* @return the texture attached to the right eye.
* @see #getLeftTexture()
*/
public Texture2D getRightTexture();
/**
* Get the depth texture attached to the left eye.
* @return the texture attached to the left eye.
* @see #getRightTexture()
*/
public Texture2D getLeftDepth();
/**
* Get the depth texture attached to the right eye.
* @return the texture attached to the right eye.
* @see #getLeftTexture()
*/
public Texture2D getRightDepth();
/**
* Get the {@link FilterPostProcessor filter post processor} attached to the left eye.
* @return the {@link FilterPostProcessor filter post processor} attached to the left eye.
* @see #getRightPostProcessor()
*/
public FilterPostProcessor getLeftPostProcessor();
/**
* Get the {@link FilterPostProcessor filter post processor} attached to the right eye.
* @return the {@link FilterPostProcessor filter post processor} attached to the right eye.
* @see #getLeftPostProcessor()
*/
public FilterPostProcessor getRightPostProcessor();
/**
* Get the resolution multiplier.
* @return the resolution multiplier.
* @see #setResolutionMultiplier(float)
*/
public float getResolutionMuliplier();
/**
* Set the resolution multiplier.
* @param resMult the resolution multiplier.
* @see #getResolutionMuliplier()
*/
public void setResolutionMultiplier(float resMult);
/**
* Get the height adjustment to apply to the cameras before rendering.
* @return the height adjustment to apply to the cameras before rendering.
* @see #setHeightAdjustment(float)
*/
public float getHeightAdjustment();
/**
* Set the height adjustment to apply to the cameras before rendering.
* @param amount the height adjustment to apply to the cameras before rendering.
* @see #getHeightAdjustment()
*/
public void setHeightAdjustment(float amount);
/**
* Get the {@link VREnvironment VR environment} to which the view manager is attached.
* @return the {@link VREnvironment VR environment} to which the view manager is attached.
*/
public VREnvironment getVREnvironment();
/**
* Initialize the VR view manager. This method should be called after the attachment of a {@link VREnvironment VR environment} to an application.
*/
public void initialize();
/**
* Update the VR view manager.
* This method is called by the attached {@link VRAppState app state} and should not be called manually.
* @param tpf the time per frame.
*/
public void update(float tpf);
/**
* Send the rendering result as textures to the two eyes.
* This method should be called after all the rendering operations
* (for example at the end of the {@link AppState#postRender() postRender()} method of the attached app state.)
*/
public void postRender();
/**
* Handles moving filters from the main view to each eye.
*/
public void moveScreenProcessingToEyes();
}

@ -0,0 +1,957 @@
package com.jme3.util;
import java.awt.GraphicsEnvironment;
import java.util.Iterator;
import java.util.logging.Logger;
import com.jme3.app.VREnvironment;
import com.jme3.input.vr.OSVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.post.CartoonSSAO;
import com.jme3.post.Filter;
import com.jme3.post.FilterPostProcessor;
import com.jme3.post.FilterUtil;
import com.jme3.post.SceneProcessor;
import com.jme3.post.filters.FogFilter;
import com.jme3.post.filters.TranslucentBucketFilter;
import com.jme3.post.ssao.SSAOFilter;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue.Bucket;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import com.jme3.scene.VertexBuffer;
import com.jme3.shadow.DirectionalLightShadowFilter;
import com.jme3.shadow.VRDirectionalLightShadowRenderer;
import com.jme3.system.jopenvr.DistortionCoordinates_t;
import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.system.jopenvr.OpenVRUtil;
import com.jme3.system.jopenvr.Texture_t;
import com.jme3.system.jopenvr.VR_IVRSystem_FnTable;
import com.jme3.system.lwjgl.LwjglWindow;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_RenderBufferOpenGL;
import com.jme3.system.osvr.osvrrendermanageropengl.OSVR_ViewportDescription;
import com.jme3.system.osvr.osvrrendermanageropengl.OsvrRenderManagerOpenGLLibrary;
import com.jme3.texture.FrameBuffer;
import com.jme3.texture.Image;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.ui.Picture;
import com.sun.jna.Pointer;
import com.sun.jna.ptr.PointerByReference;
public class VRViewManagerOSVR extends AbstractVRViewManager{
private static final Logger logger = Logger.getLogger(VRViewManagerOpenVR.class.getName());
private Camera leftCamera;
private ViewPort leftViewport;
private FilterPostProcessor leftPostProcessor;
private Texture2D leftEyeTexture;
private Texture2D leftEyeDepth;
private Camera rightCamera;
private ViewPort rightViewport;
private FilterPostProcessor rightPostProcessor;
private Texture2D rightEyeTexture;
private Texture2D rightEyeDepth;
// OpenVR values
private Texture_t leftTextureType;
private Texture_t rightTextureType;
// OSVR values
OSVR_RenderBufferOpenGL.ByValue[] osvr_renderBuffer;
OSVR_ViewportDescription.ByValue osvr_viewDescFull;
OSVR_ViewportDescription.ByValue osvr_viewDescLeft;
OSVR_ViewportDescription.ByValue osvr_viewDescRight;
Pointer osvr_rmBufferState;
//private static boolean useCustomDistortion;
private float heightAdjustment;
private Texture2D dualEyeTex;
private final PointerByReference grabRBS = new PointerByReference();
private float resMult = 1f;
//final & temp values for camera calculations
private final Vector3f finalPosition = new Vector3f();
private final Quaternion finalRotation = new Quaternion();
private final Vector3f hmdPos = new Vector3f();
private final Quaternion hmdRot = new Quaternion();
/**
* Create a new VR view manager attached to the given {@link VREnvironment VR environment}.
* @param environment the {@link VREnvironment VR environment} to which this view manager is attached.
*/
public VRViewManagerOSVR(VREnvironment environment){
this.environment = environment;
}
/**
* Get the {@link Camera camera} attached to the left eye.
* @return the {@link Camera camera} attached to the left eye.
* @see #getRightCamera()
*/
public Camera getLeftCamera() {
return leftCamera;
}
/**
* Get the {@link Camera camera} attached to the right eye.
* @return the {@link Camera camera} attached to the right eye.
* @see #getLeftCamera()
*/
public Camera getRightCamera() {
return rightCamera;
}
/**
* Get the {@link ViewPort viewport} attached to the left eye.
* @return the {@link ViewPort viewport} attached to the left eye.
* @see #getRightViewport()
*/
public ViewPort getLeftViewport() {
return leftViewport;
}
/**
* Get the {@link ViewPort viewport} attached to the right eye.
* @return the {@link ViewPort viewport} attached to the right eye.
* @see #getLeftViewport()
*/
public ViewPort getRightViewport() {
return rightViewport;
}
/**
* Get the identifier of the left eye texture.
* @return the identifier of the left eye texture.
* @see #getRightTexId()
* @see #getFullTexId()
*/
protected int getLeftTexId() {
return (int)leftEyeTexture.getImage().getId();
}
/**
* Get the identifier of the right eye texture.
* @return the identifier of the right eye texture.
* @see #getLeftTexId()
* @see #getFullTexId()
*/
protected int getRightTexId() {
return (int)rightEyeTexture.getImage().getId();
}
/**
* Get the identifier of the full (dual eye) texture.
* @return the identifier of the full (dual eye) texture.
* @see #getLeftTexId()
* @see #getRightTexId()
*/
private int getFullTexId() {
return (int)dualEyeTex.getImage().getId();
}
/**
* Get the height adjustment to apply to the cameras before rendering.
* @return the height adjustment to apply to the cameras before rendering.
* @see #setHeightAdjustment(float)
*/
public float getHeightAdjustment() {
return heightAdjustment;
}
/**
* Set the height adjustment to apply to the cameras before rendering.
* @param amount the height adjustment to apply to the cameras before rendering.
* @see #getHeightAdjustment()
*/
public void setHeightAdjustment(float amount) {
heightAdjustment = amount;
}
/**
* Get the resolution multiplier.
* @return the resolution multiplier.
* @see #setResolutionMultiplier(float)
*/
public float getResolutionMuliplier() {
return resMult;
}
/**
* Set the resolution multiplier.
* @param resMult the resolution multiplier.
* @see #getResolutionMuliplier()
*/
public void setResolutionMultiplier(float resMult) {
this.resMult = resMult;
}
/**
* Initialize the system binds of the textures.
*/
private void initTextureSubmitStructs() {
leftTextureType = new Texture_t();
rightTextureType = new Texture_t();
// must be OSVR
osvr_renderBuffer = new OSVR_RenderBufferOpenGL.ByValue[2];
osvr_renderBuffer[OSVR.EYE_LEFT] = new OSVR_RenderBufferOpenGL.ByValue();
osvr_renderBuffer[OSVR.EYE_RIGHT] = new OSVR_RenderBufferOpenGL.ByValue();
osvr_renderBuffer[OSVR.EYE_LEFT].setAutoSynch(false);
osvr_renderBuffer[OSVR.EYE_RIGHT].setAutoSynch(false);
osvr_viewDescFull = new OSVR_ViewportDescription.ByValue();
osvr_viewDescFull.setAutoSynch(false);
osvr_viewDescFull.left = osvr_viewDescFull.lower = 0.0;
osvr_viewDescFull.width = osvr_viewDescFull.height = 1.0;
osvr_viewDescLeft = new OSVR_ViewportDescription.ByValue();
osvr_viewDescLeft.setAutoSynch(false);
osvr_viewDescLeft.left = osvr_viewDescLeft.lower = 0.0;
osvr_viewDescLeft.width = 0.5;
osvr_viewDescLeft.height = 1.0;
osvr_viewDescRight = new OSVR_ViewportDescription.ByValue();
osvr_viewDescRight.setAutoSynch(false);
osvr_viewDescRight.left = 0.5;
osvr_viewDescRight.lower = 0.0;
osvr_viewDescRight.width = 0.5;
osvr_viewDescRight.height = 1.0;
osvr_viewDescRight.write();
osvr_viewDescLeft.write();
osvr_viewDescFull.write();
osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = -1;
osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = -1;
osvr_renderBuffer[OSVR.EYE_RIGHT].depthStencilBufferName = -1;
osvr_renderBuffer[OSVR.EYE_RIGHT].colorBufferName = -1;
}
/**
* Register the OSVR OpenGL buffer.
* @param buf the OSVR OpenGL buffer.
*/
private void registerOSVRBuffer(OSVR_RenderBufferOpenGL.ByValue buf) {
if (environment != null){
OsvrRenderManagerOpenGLLibrary.osvrRenderManagerStartRegisterRenderBuffers(grabRBS);
OsvrRenderManagerOpenGLLibrary.osvrRenderManagerRegisterRenderBufferOpenGL(grabRBS.getValue(), buf);
OsvrRenderManagerOpenGLLibrary.osvrRenderManagerFinishRegisterRenderBuffers(((OSVR)environment.getVRHardware()).getCompositor(), grabRBS.getValue(), (byte)0);
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Send the textures to the two eyes.
*/
public void postRender() {
if (environment != null){
if( environment.isInVR() ) {
VRAPI api = environment.getVRHardware();
if( api.getCompositor() != null ) {
// using the compositor...
int errl = 0, errr = 0;
if( environment.isInstanceRendering() ) {
if( leftTextureType.handle == -1 || leftTextureType.handle != getFullTexId() ) {
leftTextureType.handle = getFullTexId();
if( leftTextureType.handle != -1 ) {
leftTextureType.write();
if( api instanceof OSVR ) {
osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = leftTextureType.handle;
osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = dualEyeTex.getImage().getId();
osvr_renderBuffer[OSVR.EYE_LEFT].write();
registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_LEFT]);
}
}
} else {
if( api instanceof OSVR ) {
((OSVR)api).handleRenderBufferPresent(osvr_viewDescLeft, osvr_viewDescRight,
osvr_renderBuffer[OSVR.EYE_LEFT], osvr_renderBuffer[OSVR.EYE_LEFT]);
}
}
} else if( leftTextureType.handle == -1 || rightTextureType.handle == -1 ||
leftTextureType.handle != getLeftTexId() || rightTextureType.handle != getRightTexId() ) {
leftTextureType.handle = getLeftTexId();
if( leftTextureType.handle != -1 ) {
logger.fine("Writing Left texture to native memory at " + leftTextureType.getPointer());
leftTextureType.write();
if( api instanceof OSVR ) {
osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = leftTextureType.handle;
if( leftEyeDepth != null ) osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = leftEyeDepth.getImage().getId();
osvr_renderBuffer[OSVR.EYE_LEFT].write();
registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_LEFT]);
}
}
rightTextureType.handle = getRightTexId();
if( rightTextureType.handle != -1 ) {
logger.fine("Writing Right texture to native memory at " + leftTextureType.getPointer());
rightTextureType.write();
if( api instanceof OSVR ) {
osvr_renderBuffer[OSVR.EYE_RIGHT].colorBufferName = rightTextureType.handle;
if( rightEyeDepth != null ) osvr_renderBuffer[OSVR.EYE_RIGHT].depthStencilBufferName = rightEyeDepth.getImage().getId();
osvr_renderBuffer[OSVR.EYE_RIGHT].write();
registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_RIGHT]);
}
}
} else {
if( api instanceof OSVR ) {
((OSVR)api).handleRenderBufferPresent(osvr_viewDescFull, osvr_viewDescFull,
osvr_renderBuffer[OSVR.EYE_LEFT], osvr_renderBuffer[OSVR.EYE_RIGHT]);
}
}
if( errl != 0 ){
logger.severe("Submit to left compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")");
logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType));
logger.severe(" Texture handle: "+leftTextureType.handle);
logger.severe(" Left eye texture "+leftEyeTexture.getName()+" ("+leftEyeTexture.getImage().getId()+")");
logger.severe(" Type: "+leftEyeTexture.getType());
logger.severe(" Size: "+leftEyeTexture.getImage().getWidth()+"x"+leftEyeTexture.getImage().getHeight());
logger.severe(" Image depth: "+leftEyeTexture.getImage().getDepth());
logger.severe(" Image format: "+leftEyeTexture.getImage().getFormat());
logger.severe(" Image color space: "+leftEyeTexture.getImage().getColorSpace());
}
if( errr != 0 ){
logger.severe("Submit to right compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")");
logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType));
logger.severe(" Texture handle: "+rightTextureType.handle);
logger.severe(" Right eye texture "+rightEyeTexture.getName()+" ("+rightEyeTexture.getImage().getId()+")");
logger.severe(" Type: "+rightEyeTexture.getType());
logger.severe(" Size: "+rightEyeTexture.getImage().getWidth()+"x"+rightEyeTexture.getImage().getHeight());
logger.severe(" Image depth: "+rightEyeTexture.getImage().getDepth());
logger.severe(" Image format: "+rightEyeTexture.getImage().getFormat());
logger.severe(" Image color space: "+rightEyeTexture.getImage().getColorSpace());
}
}
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Initialize the VR view manager.
*/
public void initialize() {
logger.config("Initializing VR view manager.");
if (environment != null){
initTextureSubmitStructs();
setupCamerasAndViews();
setupVRScene();
moveScreenProcessingToEyes();
if( environment.hasTraditionalGUIOverlay() ) {
environment.getVRMouseManager().initialize();
// update the pose to position the gui correctly on start
update(0f);
environment.getVRGUIManager().positionGui();
}
if (environment.getApplication() != null){
// if we are OSVR, our primary mirror window needs to be the same size as the render manager's output...
if( environment.getVRHardware() instanceof OSVR ) {
int origWidth = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getWidth();
int origHeight = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getHeight();
long window = ((LwjglWindow)environment.getApplication().getContext()).getWindowHandle();
Vector2f windowSize = new Vector2f();
((OSVR)environment.getVRHardware()).getRenderSize(windowSize);
windowSize.x = Math.max(windowSize.x * 2f, leftCamera.getWidth());
org.lwjgl.glfw.GLFW.glfwSetWindowSize(window, (int)windowSize.x, (int)windowSize.y);
environment.getApplication().getContext().getSettings().setResolution((int)windowSize.x, (int)windowSize.y);
if (environment.getApplication().getRenderManager() != null) {
environment.getApplication().getRenderManager().notifyReshape((int)windowSize.x, (int)windowSize.y);
}
org.lwjgl.glfw.GLFW.glfwSetWindowPos(window, origWidth - (int)windowSize.x, 32);
org.lwjgl.glfw.GLFW.glfwFocusWindow(window);
org.lwjgl.glfw.GLFW.glfwSetCursorPos(window, origWidth / 2.0, origHeight / 2.0);
logger.config("Initialized VR view manager [SUCCESS]");
} else {
throw new IllegalStateException("Underlying VR hardware should be "+OSVR.class.getSimpleName());
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Prepare the size of the given {@link Camera camera} to adapt it to the underlying rendering context.
* @param cam the {@link Camera camera} to prepare.
* @param xMult the camera width multiplier.
*/
private void prepareCameraSize(Camera cam, float xMult) {
if (environment != null){
if (environment.getApplication() != null){
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
Vector2f size = new Vector2f();
VRAPI vrhmd = environment.getVRHardware();
if( vrhmd == null ) {
size.x = 1280f;
size.y = 720f;
} else {
vrhmd.getRenderSize(size);
}
if( size.x < environment.getApplication().getContext().getSettings().getWidth() ) {
size.x = environment.getApplication().getContext().getSettings().getWidth();
}
if( size.y < environment.getApplication().getContext().getSettings().getHeight() ) {
size.y = environment.getApplication().getContext().getSettings().getHeight();
}
if( environment.isInstanceRendering() ){
size.x *= 2f;
}
// other adjustments
size.x *= xMult;
size.x *= resMult;
size.y *= resMult;
if( cam.getWidth() != size.x || cam.getHeight() != size.y ){
cam.resize((int)size.x, (int)size.y, false);
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Replaces rootNode as the main cameras scene with the distortion mesh
*/
private void setupVRScene(){
if (environment != null){
if (environment.getApplication() != null){
// no special scene to setup if we are doing instancing
if( environment.isInstanceRendering() ) {
// distortion has to be done with compositor here... we want only one pass on our end!
if( environment.getApplication().getContext().getSettings().isSwapBuffers() ) {
setupMirrorBuffers(environment.getCamera(), dualEyeTex, true);
}
return;
}
leftEyeTexture = (Texture2D) leftViewport.getOutputFrameBuffer().getColorBuffer().getTexture();
rightEyeTexture = (Texture2D)rightViewport.getOutputFrameBuffer().getColorBuffer().getTexture();
leftEyeDepth = (Texture2D) leftViewport.getOutputFrameBuffer().getDepthBuffer().getTexture();
rightEyeDepth = (Texture2D)rightViewport.getOutputFrameBuffer().getDepthBuffer().getTexture();
// main viewport is either going to be a distortion scene or nothing
// mirroring is handled by copying framebuffers
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
environment.getApplication().getViewPort().detachScene(spatialIter.next());
}
spatialIter = environment.getApplication().getGuiViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
environment.getApplication().getGuiViewPort().detachScene(spatialIter.next());
}
// only setup distortion scene if compositor isn't running (or using custom mesh distortion option)
if( environment.getVRHardware().getCompositor() == null ) {
Node distortionScene = new Node();
Material leftMat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md");
leftMat.setTexture("Texture", leftEyeTexture);
Geometry leftEye = new Geometry("box", setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Left, environment.getVRHardware()));
leftEye.setMaterial(leftMat);
distortionScene.attachChild(leftEye);
Material rightMat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md");
rightMat.setTexture("Texture", rightEyeTexture);
Geometry rightEye = new Geometry("box", setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Right, environment.getVRHardware()));
rightEye.setMaterial(rightMat);
distortionScene.attachChild(rightEye);
distortionScene.updateGeometricState();
environment.getApplication().getViewPort().attachScene(distortionScene);
//if( useCustomDistortion ) setupFinalFullTexture(app.getViewPort().getCamera());
}
if( environment.getApplication().getContext().getSettings().isSwapBuffers() ) {
setupMirrorBuffers(environment.getCamera(), leftEyeTexture, false);
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Update the VR view manager.
* This method is called by the attached {@link VRApplication VR application} and should not be called manually.
* @param tpf the time per frame.
*/
public void update(float tpf) {
if (environment != null){
// grab the observer
Object obs = environment.getObserver();
Quaternion objRot;
Vector3f objPos;
if( obs instanceof Camera ) {
objRot = ((Camera)obs).getRotation();
objPos = ((Camera)obs).getLocation();
} else {
objRot = ((Spatial)obs).getWorldRotation();
objPos = ((Spatial)obs).getWorldTranslation();
}
// grab the hardware handle
VRAPI dev = environment.getVRHardware();
if( dev != null ) {
// update the HMD's position & orientation
dev.updatePose();
dev.getPositionAndOrientation(hmdPos, hmdRot);
if( obs != null ) {
// update hmdPos based on obs rotation
finalRotation.set(objRot);
finalRotation.mult(hmdPos, hmdPos);
finalRotation.multLocal(hmdRot);
}
finalizeCamera(dev.getHMDVectorPoseLeftEye(), objPos, leftCamera);
finalizeCamera(dev.getHMDVectorPoseRightEye(), objPos, rightCamera);
} else {
leftCamera.setFrame(objPos, objRot);
rightCamera.setFrame(objPos, objRot);
}
if( environment.hasTraditionalGUIOverlay() ) {
// update the mouse?
environment.getVRMouseManager().update(tpf);
// update GUI position?
if( environment.getVRGUIManager().wantsReposition || environment.getVRGUIManager().getPositioningMode() != VRGUIPositioningMode.MANUAL ) {
environment.getVRGUIManager().positionGuiNow(tpf);
environment.getVRGUIManager().updateGuiQuadGeometricState();
}
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Place the camera within the scene.
* @param eyePos the eye position.
* @param obsPosition the observer position.
* @param cam the camera to place.
*/
private void finalizeCamera(Vector3f eyePos, Vector3f obsPosition, Camera cam) {
finalRotation.mult(eyePos, finalPosition);
finalPosition.addLocal(hmdPos);
if( obsPosition != null ){
finalPosition.addLocal(obsPosition);
}
finalPosition.y += heightAdjustment;
cam.setFrame(finalPosition, finalRotation);
}
/**
* Handles moving filters from the main view to each eye
*/
public void moveScreenProcessingToEyes() {
if( rightViewport == null ){
return;
}
if (environment != null){
if (environment.getApplication() != null){
syncScreenProcessing(environment.getApplication().getViewPort());
environment.getApplication().getViewPort().clearProcessors();
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Sets the two views to use the list of {@link SceneProcessor processors}.
* @param sourceViewport the {@link ViewPort viewport} that contains the processors to use.
*/
public void syncScreenProcessing(ViewPort sourceViewport) {
if( rightViewport == null ){
return;
}
if (environment != null){
if (environment.getApplication() != null){
// setup post processing filters
if( rightPostProcessor == null ) {
rightPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
leftPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
}
// clear out all filters & processors, to start from scratch
rightPostProcessor.removeAllFilters();
leftPostProcessor.removeAllFilters();
leftViewport.clearProcessors();
rightViewport.clearProcessors();
// if we have no processors to sync, don't add the FilterPostProcessor
if( sourceViewport.getProcessors().isEmpty() ) return;
// add post processors we just made, which are empty
leftViewport.addProcessor(leftPostProcessor);
rightViewport.addProcessor(rightPostProcessor);
// go through all of the filters in the processors list
// add them to the left viewport processor & clone them to the right
for(SceneProcessor sceneProcessor : sourceViewport.getProcessors()) {
if (sceneProcessor instanceof FilterPostProcessor) {
for(Filter f : ((FilterPostProcessor)sceneProcessor).getFilterList() ) {
if( f instanceof TranslucentBucketFilter ) {
// just remove this filter, we will add it at the end manually
((FilterPostProcessor)sceneProcessor).removeFilter(f);
} else {
leftPostProcessor.addFilter(f);
// clone to the right
Filter f2;
if(f instanceof FogFilter){
f2 = FilterUtil.cloneFogFilter((FogFilter)f);
} else if (f instanceof CartoonSSAO ) {
f2 = new CartoonSSAO((CartoonSSAO)f);
} else if (f instanceof SSAOFilter){
f2 = FilterUtil.cloneSSAOFilter((SSAOFilter)f);
} else if (f instanceof DirectionalLightShadowFilter){
f2 = FilterUtil.cloneDirectionalLightShadowFilter(environment.getApplication().getAssetManager(), (DirectionalLightShadowFilter)f);
} else {
f2 = f; // dof, bloom, lightscattering etc.
}
rightPostProcessor.addFilter(f2);
}
}
} else if (sceneProcessor instanceof VRDirectionalLightShadowRenderer) {
// shadow processing
// TODO: make right shadow processor use same left shadow maps for performance
VRDirectionalLightShadowRenderer dlsr = (VRDirectionalLightShadowRenderer) sceneProcessor;
VRDirectionalLightShadowRenderer dlsrRight = dlsr.clone();
dlsrRight.setLight(dlsr.getLight());
rightViewport.getProcessors().add(0, dlsrRight);
leftViewport.getProcessors().add(0, sceneProcessor);
}
}
// make sure each has a translucent filter renderer
leftPostProcessor.addFilter(new TranslucentBucketFilter());
rightPostProcessor.addFilter(new TranslucentBucketFilter());
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private void setupCamerasAndViews() {
if (environment != null){
if (environment.getApplication() != null){
// get desired frustrum from original camera
Camera origCam = environment.getCamera();
float fFar = origCam.getFrustumFar();
float fNear = origCam.getFrustumNear();
// if we are using OSVR get the eye info here
if( environment.getVRHardware() instanceof OSVR ) {
((OSVR)environment.getVRHardware()).getEyeInfo();
}
// restore frustrum on distortion scene cam, if needed
if( environment.isInstanceRendering() ) {
leftCamera = origCam;
} else if( environment.compositorAllowed() == false ) {
origCam.setFrustumFar(100f);
origCam.setFrustumNear(1f);
leftCamera = origCam.clone();
prepareCameraSize(origCam, 2f);
} else {
leftCamera = origCam.clone();
}
leftCamera.setFrustumPerspective(environment.getDefaultFOV(), environment.getDefaultAspect(), fNear, fFar);
prepareCameraSize(leftCamera, 1f);
if( environment.getVRHardware() != null ) leftCamera.setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionLeftEye(leftCamera));
//org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_FRAMEBUFFER_SRGB);
if( !environment.isInstanceRendering()) {
leftViewport = setupViewBuffers(leftCamera, LEFT_VIEW_NAME);
rightCamera = leftCamera.clone();
if( environment.getVRHardware() != null ){
rightCamera.setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(rightCamera));
}
rightViewport = setupViewBuffers(rightCamera, RIGHT_VIEW_NAME);
} else {
System.err.println("[VRViewManager] THIS CODE NEED CHANGES !!!");
leftViewport = environment.getApplication().getViewPort();
//leftViewport.attachScene(app.getRootNode());
rightCamera = leftCamera.clone();
if( environment.getVRHardware() != null ){
rightCamera.setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(rightCamera));
}
org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_CLIP_DISTANCE0);
//FIXME: [jme-vr] Fix with JMonkey next release
//RenderManager._VRInstancing_RightCamProjection = camRight.getViewProjectionMatrix();
setupFinalFullTexture(environment.getApplication().getViewPort().getCamera());
}
// setup gui
environment.getVRGUIManager().setupGui(leftCamera, rightCamera, leftViewport, rightViewport);
if( environment.getVRHardware() != null ) {
// call these to cache the results internally
environment.getVRHardware().getHMDMatrixPoseLeftEye();
environment.getVRHardware().getHMDMatrixPoseRightEye();
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private ViewPort setupMirrorBuffers(Camera cam, Texture tex, boolean expand) {
if (environment != null){
if (environment.getApplication() != null){
Camera clonecam = cam.clone();
ViewPort viewPort = environment.getApplication().getRenderManager().createPostView("MirrorView", clonecam);
clonecam.setParallelProjection(true);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Picture pic = new Picture("fullscene");
pic.setLocalTranslation(-0.75f, -0.5f, 0f);
if( expand ) {
pic.setLocalScale(3f, 1f, 1f);
} else {
pic.setLocalScale(1.5f, 1f, 1f);
}
pic.setQueueBucket(Bucket.Opaque);
pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, false);
viewPort.attachScene(pic);
viewPort.setOutputFrameBuffer(null);
pic.updateGeometricState();
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private void setupFinalFullTexture(Camera cam) {
if (environment != null){
if (environment.getApplication() != null){
// create offscreen framebuffer
FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBuffer.setSrgb(true);
//setup framebuffer's texture
dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
logger.config("Dual eye texture "+dualEyeTex.getName()+" ("+dualEyeTex.getImage().getId()+")");
logger.config(" Type: "+dualEyeTex.getType());
logger.config(" Size: "+dualEyeTex.getImage().getWidth()+"x"+dualEyeTex.getImage().getHeight());
logger.config(" Image depth: "+dualEyeTex.getImage().getDepth());
logger.config(" Image format: "+dualEyeTex.getImage().getFormat());
logger.config(" Image color space: "+dualEyeTex.getImage().getColorSpace());
//setup framebuffer to use texture
out.setDepthBuffer(Image.Format.Depth);
out.setColorTexture(dualEyeTex);
ViewPort viewPort = environment.getApplication().getViewPort();
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
viewPort.setOutputFrameBuffer(out);
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private ViewPort setupViewBuffers(Camera cam, String viewName){
if (environment != null){
if (environment.getApplication() != null){
// create offscreen framebuffer
FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBufferLeft.setSrgb(true);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBufferLeft.setDepthBuffer(Image.Format.Depth);
offBufferLeft.setColorTexture(offTex);
ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
viewPort.attachScene(spatialIter.next());
}
//set viewport to render to offscreen framebuffer
viewPort.setOutputFrameBuffer(offBufferLeft);
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Setup a distortion mesh for the stereo view.
* @param eye the eye to apply.
* @param api the underlying VR api
* @return the distorted mesh.
*/
public static Mesh setupDistortionMesh(int eye, VRAPI api) {
Mesh distortionMesh = new Mesh();
float m_iLensGridSegmentCountH = 43, m_iLensGridSegmentCountV = 43;
float w = 1f / (m_iLensGridSegmentCountH - 1f);
float h = 1f / (m_iLensGridSegmentCountV - 1f);
float u, v;
float verts[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 3];
float texcoordR[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
float texcoordG[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
float texcoordB[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
int vertPos = 0, coordPos = 0;
float Xoffset = eye == JOpenVRLibrary.EVREye.EVREye_Eye_Left ? -1f : 0;
for (int y = 0; y < m_iLensGridSegmentCountV; y++) {
for (int x = 0; x < m_iLensGridSegmentCountH; x++) {
u = x * w;
v = 1 - y * h;
verts[vertPos] = Xoffset + u; // x
verts[vertPos + 1] = -1 + 2 * y * h; // y
verts[vertPos + 2] = 0f; // z
vertPos += 3;
DistortionCoordinates_t dc0 = new DistortionCoordinates_t();
if( api.getVRSystem() == null ) {
// default to no distortion
texcoordR[coordPos] = u;
texcoordR[coordPos + 1] = 1 - v;
texcoordG[coordPos] = u;
texcoordG[coordPos + 1] = 1 - v;
texcoordB[coordPos] = u;
texcoordB[coordPos + 1] = 1 - v;
} else {
((VR_IVRSystem_FnTable)api.getVRSystem()).ComputeDistortion.apply(eye, u, v, dc0);
texcoordR[coordPos] = dc0.rfRed[0];
texcoordR[coordPos + 1] = 1 - dc0.rfRed[1];
texcoordG[coordPos] = dc0.rfGreen[0];
texcoordG[coordPos + 1] = 1 - dc0.rfGreen[1];
texcoordB[coordPos] = dc0.rfBlue[0];
texcoordB[coordPos + 1] = 1 - dc0.rfBlue[1];
}
coordPos += 2;
}
}
// have UV coordinates & positions, now to setup indices
int[] indices = new int[(int) ((m_iLensGridSegmentCountV - 1) * (m_iLensGridSegmentCountH - 1)) * 6];
int indexPos = 0;
int a, b, c, d;
int offset = 0;
for (int y = 0; y < m_iLensGridSegmentCountV - 1; y++) {
for (int x = 0; x < m_iLensGridSegmentCountH - 1; x++) {
a = (int) (m_iLensGridSegmentCountH * y + x + offset);
b = (int) (m_iLensGridSegmentCountH * y + x + 1 + offset);
c = (int) ((y + 1) * m_iLensGridSegmentCountH + x + 1 + offset);
d = (int) ((y + 1) * m_iLensGridSegmentCountH + x + offset);
indices[indexPos] = a;
indices[indexPos + 1] = b;
indices[indexPos + 2] = c;
indices[indexPos + 3] = a;
indices[indexPos + 4] = c;
indices[indexPos + 5] = d;
indexPos += 6;
}
}
// OK, create the mesh
distortionMesh.setBuffer(VertexBuffer.Type.Position, 3, verts);
distortionMesh.setBuffer(VertexBuffer.Type.Index, 1, indices);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord, 2, texcoordR);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord2, 2, texcoordG);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord3, 2, texcoordB);
distortionMesh.setStatic();
return distortionMesh;
}
}

@ -0,0 +1,732 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.util;
import com.jme3.app.VREnvironment;
import com.jme3.input.vr.OpenVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue.Bucket;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import com.jme3.scene.VertexBuffer;
import com.jme3.system.jopenvr.DistortionCoordinates_t;
import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.system.jopenvr.OpenVRUtil;
import com.jme3.system.jopenvr.Texture_t;
import com.jme3.system.jopenvr.VRTextureBounds_t;
import com.jme3.system.jopenvr.VR_IVRSystem_FnTable;
import com.jme3.texture.FrameBuffer;
import com.jme3.texture.Image;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.ui.Picture;
import java.util.Iterator;
import java.util.logging.Logger;
/**
* A VR view manager based on OpenVR. This class enable to submit 3D views to the VR compositor.
* @author reden - phr00t - https://github.com/phr00t
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*/
public class VRViewManagerOpenVR extends AbstractVRViewManager {
private static final Logger logger = Logger.getLogger(VRViewManagerOpenVR.class.getName());
// OpenVR values
private VRTextureBounds_t leftTextureBounds;
private Texture_t leftTextureType;
private VRTextureBounds_t rightTextureBounds;
private Texture_t rightTextureType;
private Texture2D dualEyeTex;
//final & temp values for camera calculations
private final Vector3f finalPosition = new Vector3f();
private final Quaternion finalRotation = new Quaternion();
private final Vector3f hmdPos = new Vector3f();
private final Quaternion hmdRot = new Quaternion();
/**
* Create a new VR view manager attached to the given {@link VREnvironment VR environment}.
* @param environment the {@link VREnvironment VR environment} to which this view manager is attached.
*/
public VRViewManagerOpenVR(VREnvironment environment){
this.environment = environment;
}
/**
* Get the identifier of the left eye texture.
* @return the identifier of the left eye texture.
* @see #getRightTexId()
* @see #getFullTexId()
*/
protected int getLeftTexId() {
return (int)getLeftTexture().getImage().getId();
}
/**
* Get the identifier of the right eye texture.
* @return the identifier of the right eye texture.
* @see #getLeftTexId()
* @see #getFullTexId()
*/
protected int getRightTexId() {
return (int)getRightTexture().getImage().getId();
}
/**
* Get the identifier of the full (dual eye) texture.
* @return the identifier of the full (dual eye) texture.
* @see #getLeftTexId()
* @see #getRightTexId()
*/
private int getFullTexId() {
return (int)dualEyeTex.getImage().getId();
}
/**
* Initialize the system binds of the textures.
*/
private void initTextureSubmitStructs() {
leftTextureType = new Texture_t();
rightTextureType = new Texture_t();
if (environment != null){
if( environment.getVRHardware() instanceof OpenVR ) {
leftTextureBounds = new VRTextureBounds_t();
rightTextureBounds = new VRTextureBounds_t();
// left eye
leftTextureBounds.uMax = 0.5f;
leftTextureBounds.uMin = 0f;
leftTextureBounds.vMax = 1f;
leftTextureBounds.vMin = 0f;
leftTextureBounds.setAutoSynch(false);
leftTextureBounds.setAutoRead(false);
leftTextureBounds.setAutoWrite(false);
leftTextureBounds.write();
// right eye
rightTextureBounds.uMax = 1f;
rightTextureBounds.uMin = 0.5f;
rightTextureBounds.vMax = 1f;
rightTextureBounds.vMin = 0f;
rightTextureBounds.setAutoSynch(false);
rightTextureBounds.setAutoRead(false);
rightTextureBounds.setAutoWrite(false);
rightTextureBounds.write();
// texture type
leftTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Gamma;
leftTextureType.eType = JOpenVRLibrary.ETextureType.ETextureType_TextureType_OpenGL;
leftTextureType.setAutoSynch(false);
leftTextureType.setAutoRead(false);
leftTextureType.setAutoWrite(false);
leftTextureType.handle = -1;
rightTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Gamma;
rightTextureType.eType = JOpenVRLibrary.ETextureType.ETextureType_TextureType_OpenGL;
rightTextureType.setAutoSynch(false);
rightTextureType.setAutoRead(false);
rightTextureType.setAutoWrite(false);
rightTextureType.handle = -1;
logger.config("Init eyes native texture binds");
logger.config(" Left eye texture");
logger.config(" address: "+leftTextureType.getPointer());
logger.config(" size: "+leftTextureType.size()+" bytes");
logger.config(" color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
logger.config(" type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType));
logger.config(" auto read: "+leftTextureType.getAutoRead());
logger.config(" auto write: "+leftTextureType.getAutoWrite());
logger.config(" handle address: "+leftTextureType.handle);
logger.config(" handle value: "+leftTextureType.handle);
logger.config("");
logger.config(" Right eye texture");
logger.config(" address: "+rightTextureType.getPointer());
logger.config(" size: "+rightTextureType.size()+" bytes");
logger.config(" color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
logger.config(" type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType));
logger.config(" auto read: "+rightTextureType.getAutoRead());
logger.config(" auto write: "+rightTextureType.getAutoWrite());
logger.config(" handle address: "+rightTextureType.handle);
logger.config(" handle value: "+rightTextureType.handle);
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
@Override
public void postRender() {
if (environment != null){
if( environment.isInVR() ) {
VRAPI api = environment.getVRHardware();
if( api.getCompositor() != null ) {
// using the compositor...
int errl = 0, errr = 0;
if( environment.isInstanceRendering() ) {
if( leftTextureType.handle == -1 || leftTextureType.handle != getFullTexId() ) {
leftTextureType.handle = getFullTexId();
if( leftTextureType.handle != -1 ) {
leftTextureType.write();
}
} else {
if( api instanceof OpenVR ) {
int submitFlag = JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default;
errr = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, leftTextureType, rightTextureBounds, submitFlag);
errl = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, leftTextureBounds, submitFlag);
}
}
} else if( leftTextureType.handle == -1 || rightTextureType.handle == -1 ||
leftTextureType.handle != getLeftTexId() || rightTextureType.handle != getRightTexId() ) {
leftTextureType.handle = getLeftTexId();
if( leftTextureType.handle != -1 ) {
logger.fine("Writing Left texture to native memory at " + leftTextureType.getPointer());
leftTextureType.write();
}
rightTextureType.handle = getRightTexId();
if( rightTextureType.handle != -1 ) {
logger.fine("Writing Right texture to native memory at " + leftTextureType.getPointer());
rightTextureType.write();
}
} else {
if( api instanceof OpenVR ) {
errl = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, null,
JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default);
errr = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, rightTextureType, null,
JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default);
} else {
}
}
if( errl != 0 ){
logger.severe("Submit to left compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")");
logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType));
logger.severe(" Texture handle: "+leftTextureType.handle);
logger.severe(" Left eye texture "+leftEyeTexture.getName()+" ("+leftEyeTexture.getImage().getId()+")");
logger.severe(" Type: "+leftEyeTexture.getType());
logger.severe(" Size: "+leftEyeTexture.getImage().getWidth()+"x"+leftEyeTexture.getImage().getHeight());
logger.severe(" Image depth: "+leftEyeTexture.getImage().getDepth());
logger.severe(" Image format: "+leftEyeTexture.getImage().getFormat());
logger.severe(" Image color space: "+leftEyeTexture.getImage().getColorSpace());
}
if( errr != 0 ){
logger.severe("Submit to right compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")");
logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType));
logger.severe(" Texture handle: "+rightTextureType.handle);
logger.severe(" Right eye texture "+rightEyeTexture.getName()+" ("+rightEyeTexture.getImage().getId()+")");
logger.severe(" Type: "+rightEyeTexture.getType());
logger.severe(" Size: "+rightEyeTexture.getImage().getWidth()+"x"+rightEyeTexture.getImage().getHeight());
logger.severe(" Image depth: "+rightEyeTexture.getImage().getDepth());
logger.severe(" Image format: "+rightEyeTexture.getImage().getFormat());
logger.severe(" Image color space: "+rightEyeTexture.getImage().getColorSpace());
}
}
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
@Override
public void initialize() {
logger.config("Initializing VR view manager.");
if (environment != null){
initTextureSubmitStructs();
setupCamerasAndViews();
setupVRScene();
moveScreenProcessingToEyes();
if( environment.hasTraditionalGUIOverlay() ) {
environment.getVRMouseManager().initialize();
// update the pose to position the gui correctly on start
update(0f);
environment.getVRGUIManager().positionGui();
}
logger.config("Initialized VR view manager [SUCCESS]");
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Prepare the size of the given {@link Camera camera} to adapt it to the underlying rendering context.
* @param cam the {@link Camera camera} to prepare.
* @param xMult the camera width multiplier.
*/
private void prepareCameraSize(Camera cam, float xMult) {
if (environment != null){
if (environment.getApplication() != null){
Vector2f size = new Vector2f();
VRAPI vrhmd = environment.getVRHardware();
if( vrhmd == null ) {
size.x = 1280f;
size.y = 720f;
} else {
vrhmd.getRenderSize(size);
}
if( size.x < environment.getApplication().getContext().getSettings().getWidth() ) {
size.x = environment.getApplication().getContext().getSettings().getWidth();
}
if( size.y < environment.getApplication().getContext().getSettings().getHeight() ) {
size.y = environment.getApplication().getContext().getSettings().getHeight();
}
if( environment.isInstanceRendering() ){
size.x *= 2f;
}
// other adjustments
size.x *= xMult;
size.x *= getResolutionMuliplier();
size.y *= getResolutionMuliplier();
if( cam.getWidth() != size.x || cam.getHeight() != size.y ){
cam.resize((int)size.x, (int)size.y, false);
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Replaces rootNode as the main cameras scene with the distortion mesh
*/
private void setupVRScene(){
if (environment != null){
if (environment.getApplication() != null){
// no special scene to setup if we are doing instancing
if( environment.isInstanceRendering() ) {
// distortion has to be done with compositor here... we want only one pass on our end!
if( environment.getApplication().getContext().getSettings().isSwapBuffers() ) {
setupMirrorBuffers(environment.getCamera(), dualEyeTex, true);
}
return;
}
leftEyeTexture = (Texture2D) getLeftViewport().getOutputFrameBuffer().getColorBuffer().getTexture();
rightEyeTexture = (Texture2D)getRightViewport().getOutputFrameBuffer().getColorBuffer().getTexture();
leftEyeDepth = (Texture2D) getLeftViewport().getOutputFrameBuffer().getDepthBuffer().getTexture();
rightEyeDepth = (Texture2D)getRightViewport().getOutputFrameBuffer().getDepthBuffer().getTexture();
// main viewport is either going to be a distortion scene or nothing
// mirroring is handled by copying framebuffers
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
environment.getApplication().getViewPort().detachScene(spatialIter.next());
}
spatialIter = environment.getApplication().getGuiViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
environment.getApplication().getGuiViewPort().detachScene(spatialIter.next());
}
// only setup distortion scene if compositor isn't running (or using custom mesh distortion option)
if( environment.getVRHardware().getCompositor() == null ) {
Node distortionScene = new Node();
Material leftMat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md");
leftMat.setTexture("Texture", leftEyeTexture);
Geometry leftEye = new Geometry("box", setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Left, environment.getVRHardware()));
leftEye.setMaterial(leftMat);
distortionScene.attachChild(leftEye);
Material rightMat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md");
rightMat.setTexture("Texture", rightEyeTexture);
Geometry rightEye = new Geometry("box", setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Right, environment.getVRHardware()));
rightEye.setMaterial(rightMat);
distortionScene.attachChild(rightEye);
distortionScene.updateGeometricState();
environment.getApplication().getViewPort().attachScene(distortionScene);
//if( useCustomDistortion ) setupFinalFullTexture(app.getViewPort().getCamera());
}
if( environment.getApplication().getContext().getSettings().isSwapBuffers() ) {
setupMirrorBuffers(environment.getCamera(), leftEyeTexture, false);
}
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
@Override
public void update(float tpf) {
if (environment != null){
// grab the observer
Object obs = environment.getObserver();
Quaternion objRot;
Vector3f objPos;
if( obs instanceof Camera ) {
objRot = ((Camera)obs).getRotation();
objPos = ((Camera)obs).getLocation();
} else {
objRot = ((Spatial)obs).getWorldRotation();
objPos = ((Spatial)obs).getWorldTranslation();
}
// grab the hardware handle
VRAPI dev = environment.getVRHardware();
if( dev != null ) {
// update the HMD's position & orientation
dev.updatePose();
dev.getPositionAndOrientation(hmdPos, hmdRot);
if( obs != null ) {
// update hmdPos based on obs rotation
finalRotation.set(objRot);
finalRotation.mult(hmdPos, hmdPos);
finalRotation.multLocal(hmdRot);
}
finalizeCamera(dev.getHMDVectorPoseLeftEye(), objPos, getLeftCamera());
finalizeCamera(dev.getHMDVectorPoseRightEye(), objPos, getRightCamera());
} else {
getLeftCamera().setFrame(objPos, objRot);
getRightCamera().setFrame(objPos, objRot);
}
if( environment.hasTraditionalGUIOverlay() ) {
// update the mouse?
environment.getVRMouseManager().update(tpf);
// update GUI position?
if( environment.getVRGUIManager().wantsReposition || environment.getVRGUIManager().getPositioningMode() != VRGUIPositioningMode.MANUAL ) {
environment.getVRGUIManager().positionGuiNow(tpf);
environment.getVRGUIManager().updateGuiQuadGeometricState();
}
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Place the camera within the scene.
* @param eyePos the eye position.
* @param obsPosition the observer position.
* @param cam the camera to place.
*/
private void finalizeCamera(Vector3f eyePos, Vector3f obsPosition, Camera cam) {
finalRotation.mult(eyePos, finalPosition);
finalPosition.addLocal(hmdPos);
if( obsPosition != null ) finalPosition.addLocal(obsPosition);
finalPosition.y += getHeightAdjustment();
cam.setFrame(finalPosition, finalRotation);
}
private void setupCamerasAndViews() {
if (environment != null){
// get desired frustrum from original camera
Camera origCam = environment.getCamera();
float fFar = origCam.getFrustumFar();
float fNear = origCam.getFrustumNear();
// restore frustrum on distortion scene cam, if needed
if( environment.isInstanceRendering() ) {
leftCamera = origCam;
} else if( environment.compositorAllowed() == false ) {
origCam.setFrustumFar(100f);
origCam.setFrustumNear(1f);
leftCamera = origCam.clone();
prepareCameraSize(origCam, 2f);
} else {
leftCamera = origCam.clone();
}
getLeftCamera().setFrustumPerspective(environment.getDefaultFOV(), environment.getDefaultAspect(), fNear, fFar);
prepareCameraSize(getLeftCamera(), 1f);
if( environment.getVRHardware() != null ) {
getLeftCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionLeftEye(getLeftCamera()));
}
//org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_FRAMEBUFFER_SRGB);
if( !environment.isInstanceRendering()) {
leftViewport = setupViewBuffers(getLeftCamera(), LEFT_VIEW_NAME);
rightCamera = getLeftCamera().clone();
if( environment.getVRHardware() != null ){
getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera()));
}
rightViewport = setupViewBuffers(getRightCamera(), RIGHT_VIEW_NAME);
} else {
if (environment.getApplication() != null){
logger.severe("THIS CODE NEED CHANGES !!!");
leftViewport = environment.getApplication().getViewPort();
//leftViewport.attachScene(app.getRootNode());
rightCamera = getLeftCamera().clone();
if( environment.getVRHardware() != null ){
getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera()));
}
org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_CLIP_DISTANCE0);
//FIXME: [jme-vr] Fix with JMonkey next release
//RenderManager._VRInstancing_RightCamProjection = camRight.getViewProjectionMatrix();
setupFinalFullTexture(environment.getApplication().getViewPort().getCamera());
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
}
// setup gui
environment.getVRGUIManager().setupGui(getLeftCamera(), getRightCamera(), getLeftViewport(), getRightViewport());
if( environment.getVRHardware() != null ) {
// call these to cache the results internally
environment.getVRHardware().getHMDMatrixPoseLeftEye();
environment.getVRHardware().getHMDMatrixPoseRightEye();
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private ViewPort setupMirrorBuffers(Camera cam, Texture tex, boolean expand) {
if (environment != null){
if (environment.getApplication() != null){
Camera clonecam = cam.clone();
ViewPort viewPort = environment.getApplication().getRenderManager().createPostView("MirrorView", clonecam);
clonecam.setParallelProjection(true);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Picture pic = new Picture("fullscene");
pic.setLocalTranslation(-0.75f, -0.5f, 0f);
if( expand ) {
pic.setLocalScale(3f, 1f, 1f);
} else {
pic.setLocalScale(1.5f, 1f, 1f);
}
pic.setQueueBucket(Bucket.Opaque);
pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, false);
viewPort.attachScene(pic);
viewPort.setOutputFrameBuffer(null);
pic.updateGeometricState();
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private void setupFinalFullTexture(Camera cam) {
if (environment != null){
if (environment.getApplication() != null){
// create offscreen framebuffer
FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBuffer.setSrgb(true);
//setup framebuffer's texture
dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
logger.config("Dual eye texture "+dualEyeTex.getName()+" ("+dualEyeTex.getImage().getId()+")");
logger.config(" Type: "+dualEyeTex.getType());
logger.config(" Size: "+dualEyeTex.getImage().getWidth()+"x"+dualEyeTex.getImage().getHeight());
logger.config(" Image depth: "+dualEyeTex.getImage().getDepth());
logger.config(" Image format: "+dualEyeTex.getImage().getFormat());
logger.config(" Image color space: "+dualEyeTex.getImage().getColorSpace());
//setup framebuffer to use texture
out.setDepthBuffer(Image.Format.Depth);
out.setColorTexture(dualEyeTex);
ViewPort viewPort = environment.getApplication().getViewPort();
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
viewPort.setOutputFrameBuffer(out);
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
private ViewPort setupViewBuffers(Camera cam, String viewName){
if (environment != null){
if (environment.getApplication() != null){
// create offscreen framebuffer
FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBufferLeft.setSrgb(true);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBufferLeft.setDepthBuffer(Image.Format.Depth);
offBufferLeft.setColorTexture(offTex);
ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
viewPort.attachScene(spatialIter.next());
}
//set viewport to render to offscreen framebuffer
viewPort.setOutputFrameBuffer(offBufferLeft);
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
/**
* Setup a distortion mesh for the stereo view.
* @param eye the eye to apply.
* @param api the underlying VR api
* @return the distorted mesh.
*/
public static Mesh setupDistortionMesh(int eye, VRAPI api) {
Mesh distortionMesh = new Mesh();
float m_iLensGridSegmentCountH = 43, m_iLensGridSegmentCountV = 43;
float w = 1f / (m_iLensGridSegmentCountH - 1f);
float h = 1f / (m_iLensGridSegmentCountV - 1f);
float u, v;
float verts[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 3];
float texcoordR[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
float texcoordG[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
float texcoordB[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
int vertPos = 0, coordPos = 0;
float Xoffset = eye == JOpenVRLibrary.EVREye.EVREye_Eye_Left ? -1f : 0;
for (int y = 0; y < m_iLensGridSegmentCountV; y++) {
for (int x = 0; x < m_iLensGridSegmentCountH; x++) {
u = x * w;
v = 1 - y * h;
verts[vertPos] = Xoffset + u; // x
verts[vertPos + 1] = -1 + 2 * y * h; // y
verts[vertPos + 2] = 0f; // z
vertPos += 3;
DistortionCoordinates_t dc0 = new DistortionCoordinates_t();
if( api.getVRSystem() == null ) {
// default to no distortion
texcoordR[coordPos] = u;
texcoordR[coordPos + 1] = 1 - v;
texcoordG[coordPos] = u;
texcoordG[coordPos + 1] = 1 - v;
texcoordB[coordPos] = u;
texcoordB[coordPos + 1] = 1 - v;
} else {
((VR_IVRSystem_FnTable)api.getVRSystem()).ComputeDistortion.apply(eye, u, v, dc0);
texcoordR[coordPos] = dc0.rfRed[0];
texcoordR[coordPos + 1] = 1 - dc0.rfRed[1];
texcoordG[coordPos] = dc0.rfGreen[0];
texcoordG[coordPos + 1] = 1 - dc0.rfGreen[1];
texcoordB[coordPos] = dc0.rfBlue[0];
texcoordB[coordPos + 1] = 1 - dc0.rfBlue[1];
}
coordPos += 2;
}
}
// have UV coordinates & positions, now to setup indices
int[] indices = new int[(int) ((m_iLensGridSegmentCountV - 1) * (m_iLensGridSegmentCountH - 1)) * 6];
int indexPos = 0;
int a, b, c, d;
int offset = 0;
for (int y = 0; y < m_iLensGridSegmentCountV - 1; y++) {
for (int x = 0; x < m_iLensGridSegmentCountH - 1; x++) {
a = (int) (m_iLensGridSegmentCountH * y + x + offset);
b = (int) (m_iLensGridSegmentCountH * y + x + 1 + offset);
c = (int) ((y + 1) * m_iLensGridSegmentCountH + x + 1 + offset);
d = (int) ((y + 1) * m_iLensGridSegmentCountH + x + offset);
indices[indexPos] = a;
indices[indexPos + 1] = b;
indices[indexPos + 2] = c;
indices[indexPos + 3] = a;
indices[indexPos + 4] = c;
indices[indexPos + 5] = d;
indexPos += 6;
}
}
// OK, create the mesh
distortionMesh.setBuffer(VertexBuffer.Type.Position, 3, verts);
distortionMesh.setBuffer(VertexBuffer.Type.Index, 1, indices);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord, 2, texcoordR);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord2, 2, texcoordG);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord3, 2, texcoordB);
distortionMesh.setStatic();
return distortionMesh;
}
}

@ -1,107 +0,0 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jmevr.util;
import com.jme3.app.VRApplication;
import com.jme3.input.vr.VRAPI;
import com.jme3.scene.Mesh;
import com.jme3.scene.VertexBuffer;
import com.jme3.system.jopenvr.DistortionCoordinates_t;
import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.system.jopenvr.VR_IVRSystem_FnTable;
/**
*
* @author reden
*/
public class MeshUtil {
public static Mesh setupDistortionMesh(int eye, VRAPI api) {
Mesh distortionMesh = new Mesh();
float m_iLensGridSegmentCountH = 43, m_iLensGridSegmentCountV = 43;
float w = 1f / (m_iLensGridSegmentCountH - 1f);
float h = 1f / (m_iLensGridSegmentCountV - 1f);
float u, v;
float verts[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 3];
float texcoordR[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
float texcoordG[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
float texcoordB[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
int vertPos = 0, coordPos = 0;
float Xoffset = eye == JOpenVRLibrary.EVREye.EVREye_Eye_Left ? -1f : 0;
for (int y = 0; y < m_iLensGridSegmentCountV; y++) {
for (int x = 0; x < m_iLensGridSegmentCountH; x++) {
u = x * w;
v = 1 - y * h;
verts[vertPos] = Xoffset + u; // x
verts[vertPos + 1] = -1 + 2 * y * h; // y
verts[vertPos + 2] = 0f; // z
vertPos += 3;
DistortionCoordinates_t dc0 = new DistortionCoordinates_t();
if( api.getVRSystem() == null ) {
// default to no distortion
texcoordR[coordPos] = u;
texcoordR[coordPos + 1] = 1 - v;
texcoordG[coordPos] = u;
texcoordG[coordPos + 1] = 1 - v;
texcoordB[coordPos] = u;
texcoordB[coordPos + 1] = 1 - v;
} else {
((VR_IVRSystem_FnTable)api.getVRSystem()).ComputeDistortion.apply(eye, u, v, dc0);
texcoordR[coordPos] = dc0.rfRed[0];
texcoordR[coordPos + 1] = 1 - dc0.rfRed[1];
texcoordG[coordPos] = dc0.rfGreen[0];
texcoordG[coordPos + 1] = 1 - dc0.rfGreen[1];
texcoordB[coordPos] = dc0.rfBlue[0];
texcoordB[coordPos + 1] = 1 - dc0.rfBlue[1];
}
coordPos += 2;
}
}
// have UV coordinates & positions, now to setup indices
int[] indices = new int[(int) ((m_iLensGridSegmentCountV - 1) * (m_iLensGridSegmentCountH - 1)) * 6];
int indexPos = 0;
int a, b, c, d;
int offset = 0;
for (int y = 0; y < m_iLensGridSegmentCountV - 1; y++) {
for (int x = 0; x < m_iLensGridSegmentCountH - 1; x++) {
a = (int) (m_iLensGridSegmentCountH * y + x + offset);
b = (int) (m_iLensGridSegmentCountH * y + x + 1 + offset);
c = (int) ((y + 1) * m_iLensGridSegmentCountH + x + 1 + offset);
d = (int) ((y + 1) * m_iLensGridSegmentCountH + x + offset);
indices[indexPos] = a;
indices[indexPos + 1] = b;
indices[indexPos + 2] = c;
indices[indexPos + 3] = a;
indices[indexPos + 4] = c;
indices[indexPos + 5] = d;
indexPos += 6;
}
}
// OK, create the mesh
distortionMesh.setBuffer(VertexBuffer.Type.Position, 3, verts);
distortionMesh.setBuffer(VertexBuffer.Type.Index, 1, indices);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord, 2, texcoordR);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord2, 2, texcoordG);
distortionMesh.setBuffer(VertexBuffer.Type.TexCoord3, 2, texcoordB);
distortionMesh.setStatic();
return distortionMesh;
}
}

@ -1,334 +0,0 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jmevr.util;
import com.jme3.app.Application;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.state.AppState;
import com.jme3.material.Material;
import com.jme3.material.RenderState.BlendMode;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Matrix3f;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue.Bucket;
import com.jme3.scene.Spatial;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.system.AppSettings;
import com.jme3.texture.FrameBuffer;
import com.jme3.texture.Image.Format;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import java.awt.GraphicsEnvironment;
import java.util.Iterator;
/**
*
* @author
* phr00t
*/
public class VRGuiManager {
public enum POSITIONING_MODE {
MANUAL, AUTO_CAM_ALL, AUTO_CAM_ALL_SKIP_PITCH, AUTO_OBSERVER_POS_CAM_ROTATION, AUTO_OBSERVER_ALL, AUTO_OBSERVER_ALL_CAMHEIGHT
}
private Camera camLeft, camRight;
private float guiDistance = 1.5f, guiScale = 1f, guiPositioningElastic;
private POSITIONING_MODE posMode = POSITIONING_MODE.AUTO_CAM_ALL;
private final Matrix3f orient = new Matrix3f();
private Vector2f screenSize;
protected boolean wantsReposition;
private VRAppState app = null;
private Application application = null;
/**
* Create a new GUI manager attached to the given app state.
* @param app the VR app state that this manager is attached to.
*/
public VRGuiManager(){
}
/**
* Get the VR app state to which this GUI manager is attached.
* @return the VR app state to which this GUI manager is attached.
*/
public VRAppState getVRAppState(){
return app;
}
/**
* Attach the GUI manager to an app state and an Application.
* The application has to be the one that the app state is attached.
* This method should be called from the {@link AppState#initialize(com.jme3.app.state.AppStateManager, Application) initialize}
* method of the {@link AppState} instance.
* @param app the VR app state that this manager is attached to.
* @param application the application to whitch the app state is attcached.
*/
public void attach(VRAppState app, Application application){
this.app = app;
this.application = application;
}
/**
*
* Makes auto GUI positioning happen not immediately, but like an
* elastic connected to the headset. Setting to 0 disables (default)
* Higher settings make it track the headset quicker.
*
* @param elastic amount of elasticity
*/
public void setPositioningElasticity(float elastic) {
guiPositioningElastic = elastic;
}
public float getPositioningElasticity() {
return guiPositioningElastic;
}
public void setPositioningMode(POSITIONING_MODE mode) {
posMode = mode;
}
public Vector2f getCanvasSize() {
if( screenSize == null ) {
if( app.isInVR() && app.getVRHardware() != null ) {
screenSize = new Vector2f();
app.getVRHardware().getRenderSize(screenSize);
screenSize.multLocal(app.getVRViewManager().getResolutionMuliplier());
} else {
AppSettings as = application.getContext().getSettings();
screenSize = new Vector2f(as.getWidth(), as.getHeight());
}
}
return screenSize;
}
private Vector2f ratio;
public Vector2f getCanvasToWindowRatio() {
if( ratio == null ) {
ratio = new Vector2f();
Vector2f canvas = getCanvasSize();
int width = Integer.min(GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getWidth(),
application.getContext().getSettings().getWidth());
int height = Integer.min(GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getHeight(),
application.getContext().getSettings().getHeight());
ratio.x = Float.max(1f, canvas.x / width);
ratio.y = Float.max(1f, canvas.y / height);
}
return ratio;
}
public POSITIONING_MODE getPositioningMode() {
return posMode;
}
public void positionGui() {
wantsReposition = true;
}
private final Vector3f EoldPos = new Vector3f();
private final Quaternion EoldDir = new Quaternion();
private void positionTo(Vector3f pos, Quaternion dir, float tpf) {
Vector3f guiPos = guiQuadNode.getLocalTranslation();
guiPos.set(0f, 0f, guiDistance);
dir.mult(guiPos, guiPos);
guiPos.x += pos.x;
guiPos.y += pos.y + app.getVRHeightAdjustment();
guiPos.z += pos.z;
if( guiPositioningElastic > 0f && posMode != POSITIONING_MODE.MANUAL ) {
// mix pos & dir with current pos & dir
guiPos.interpolateLocal(EoldPos, guiPos, Float.min(1f, tpf * guiPositioningElastic));
EoldPos.set(guiPos);
}
}
protected void updateGuiQuadGeometricState() {
guiQuadNode.updateGeometricState();
}
protected void positionGuiNow(float tpf) {
wantsReposition = false;
if( app.isInVR() == false ) return;
guiQuadNode.setLocalScale(guiDistance * guiScale * 4f, 4f * guiDistance * guiScale, 1f);
switch( posMode ) {
case MANUAL:
case AUTO_CAM_ALL_SKIP_PITCH:
case AUTO_CAM_ALL:
if( camLeft != null && camRight != null ) {
// get middle point
temppos.set(camLeft.getLocation()).interpolateLocal(camRight.getLocation(), 0.5f);
positionTo(temppos, camLeft.getRotation(), tpf);
}
rotateScreenTo(camLeft.getRotation(), tpf);
break;
case AUTO_OBSERVER_POS_CAM_ROTATION:
Object obs = app.getObserver();
if( obs != null ) {
if( obs instanceof Camera ) {
positionTo(((Camera)obs).getLocation(), camLeft.getRotation(), tpf);
} else {
positionTo(((Spatial)obs).getWorldTranslation(), camLeft.getRotation(), tpf);
}
}
rotateScreenTo(camLeft.getRotation(), tpf);
break;
case AUTO_OBSERVER_ALL:
case AUTO_OBSERVER_ALL_CAMHEIGHT:
obs = app.getObserver();
if( obs != null ) {
Quaternion q;
if( obs instanceof Camera ) {
q = ((Camera)obs).getRotation();
temppos.set(((Camera)obs).getLocation());
} else {
q = ((Spatial)obs).getWorldRotation();
temppos.set(((Spatial)obs).getWorldTranslation());
}
if( posMode == POSITIONING_MODE.AUTO_OBSERVER_ALL_CAMHEIGHT ) {
temppos.y = camLeft.getLocation().y;
}
positionTo(temppos, q, tpf);
rotateScreenTo(q, tpf);
}
break;
}
}
private final Vector3f look = new Vector3f(), left = new Vector3f(), temppos = new Vector3f(), up = new Vector3f();
private final Quaternion tempq = new Quaternion();
private void rotateScreenTo(Quaternion dir, float tpf) {
dir.getRotationColumn(2, look).negateLocal();
dir.getRotationColumn(0, left).negateLocal();
orient.fromAxes(left, dir.getRotationColumn(1, up), look);
Quaternion rot = tempq.fromRotationMatrix(orient);
if( posMode == POSITIONING_MODE.AUTO_CAM_ALL_SKIP_PITCH ) VRUtil.stripToYaw(rot);
if( guiPositioningElastic > 0f && posMode != POSITIONING_MODE.MANUAL ) {
// mix pos & dir with current pos & dir
EoldDir.nlerp(rot, tpf * guiPositioningElastic);
guiQuadNode.setLocalRotation(EoldDir);
} else {
guiQuadNode.setLocalRotation(rot);
}
}
public void setGuiDistance(float newGuiDistance) {
guiDistance = newGuiDistance;
}
public void setGuiScale(float scale) {
guiScale = scale;
}
public float getGuiDistance() {
return guiDistance;
}
public void adjustGuiDistance(float adjustAmount) {
guiDistance += adjustAmount;
}
protected void setupGui(Camera leftcam, Camera rightcam, ViewPort left, ViewPort right) {
if( app.hasTraditionalGUIOverlay() ) {
camLeft = leftcam;
camRight = rightcam;
Spatial guiScene = getGuiQuad(camLeft);
left.attachScene(guiScene);
if( right != null ) right.attachScene(guiScene);
setPositioningMode(posMode);
}
}
/*
do not use, set by preconfigure routine in VRApplication
*/
public void _enableCurvedSuface(boolean set) {
useCurvedSurface = set;
}
/*
do not use, set by preconfigure routine in VRApplication
*/
public void _enableGuiOverdraw(boolean set) {
overdraw = set;
}
private boolean useCurvedSurface = false, overdraw = false;
private Geometry guiQuad;
private Node guiQuadNode;
private ViewPort offView;
private Texture2D guiTexture;
private Spatial getGuiQuad(Camera sourceCam){
if( guiQuadNode == null ) {
Vector2f guiCanvasSize = getCanvasSize();
Camera offCamera = sourceCam.clone();
offCamera.setParallelProjection(true);
offCamera.setLocation(Vector3f.ZERO);
offCamera.lookAt(Vector3f.UNIT_Z, Vector3f.UNIT_Y);
offView = application.getRenderManager().createPreView("GUI View", offCamera);
offView.setClearFlags(true, true, true);
offView.setBackgroundColor(ColorRGBA.BlackNoAlpha);
// create offscreen framebuffer
FrameBuffer offBuffer = new FrameBuffer((int)guiCanvasSize.x, (int)guiCanvasSize.y, 1);
//setup framebuffer's texture
guiTexture = new Texture2D((int)guiCanvasSize.x, (int)guiCanvasSize.y, Format.RGBA8);
guiTexture.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
guiTexture.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBuffer.setDepthBuffer(Format.Depth);
offBuffer.setColorTexture(guiTexture);
//set viewport to render to offscreen framebuffer
offView.setOutputFrameBuffer(offBuffer);
// setup framebuffer's scene
Iterator<Spatial> spatialIter = application.getGuiViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
offView.attachScene(spatialIter.next());
}
if( useCurvedSurface ) {
guiQuad = (Geometry)application.getAssetManager().loadModel("Common/Util/gui_mesh.j3o");
} else {
guiQuad = new Geometry("guiQuad", new CenterQuad(1f, 1f));
}
Material mat = new Material(application.getAssetManager(), "Common/MatDefs/VR/GuiOverlay.j3md");
mat.getAdditionalRenderState().setDepthTest(!overdraw);
mat.getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
mat.getAdditionalRenderState().setDepthWrite(false);
mat.setTexture("ColorMap", guiTexture);
guiQuad.setQueueBucket(Bucket.Translucent);
guiQuad.setMaterial(mat);
guiQuadNode = new Node("gui-quad-node");
guiQuadNode.setQueueBucket(Bucket.Translucent);
guiQuadNode.attachChild(guiQuad);
}
return guiQuadNode;
}
}

@ -1,234 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jmevr.util;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.lwjgl.glfw.GLFW;
import com.jme3.app.Application;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.state.AppState;
import com.jme3.input.MouseInput;
import com.jme3.input.controls.AnalogListener;
import com.jme3.input.lwjgl.GlfwMouseInputVR;
import com.jme3.input.vr.VRInputType;
import com.jme3.material.RenderState.BlendMode;
import com.jme3.math.Vector2f;
import com.jme3.scene.Node;
import com.jme3.system.AppSettings;
import com.jme3.system.lwjgl.LwjglWindow;
import com.jme3.system.lwjgl.LwjglWindowVR;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.ui.Picture;
/**
*
* @author Phr00t
*/
public class VRMouseManager {
private static final Logger logger = Logger.getLogger(VRMouseManager.class.getName());
private Application application = null;
private VRAppState app = null;
private final int AVERAGE_AMNT = 4;
private int avgCounter;
private Picture mouseImage;
private int recentCenterCount = 0;
private final Vector2f cursorPos = new Vector2f();
private float ySize, sensitivity = 8f, acceleration = 2f;
private final float[] lastXmv = new float[AVERAGE_AMNT], lastYmv = new float[AVERAGE_AMNT];
private boolean thumbstickMode;
private float moveScale = 1f;
private float avg(float[] arr) {
float amt = 0f;
for(float f : arr) amt += f;
return amt / arr.length;
}
public VRMouseManager(){
}
/**
* Attach the mouse manager to an app state and an Application.
* The application has to be the one that the app state is attached.
* This method should be called from the {@link AppState#initialize(com.jme3.app.state.AppStateManager, Application) initialize}
* method of the {@link AppState} instance.
* @param app the VR app state that this manager is attached to.
* @param application the application to whitch the app state is attcached.
*/
public void attach(VRAppState app, Application application){
this.app = app;
this.application = application;
}
protected void init() {
logger.config("Initializing VR mouse manager.");
// load default mouseimage
mouseImage = new Picture("mouse");
setImage("Common/Util/mouse.png");
// hide default cursor by making it invisible
MouseInput mi = application.getContext().getMouseInput();
if( mi instanceof GlfwMouseInputVR ){
((GlfwMouseInputVR)mi).hideActiveCursor();
}
centerMouse();
logger.config("Initialized VR mouse manager [SUCCESS]");
}
public void setThumbstickMode(boolean set) {
thumbstickMode = set;
}
public boolean isThumbstickMode() {
return thumbstickMode;
}
public void setSpeed(float sensitivity, float acceleration) {
this.sensitivity = sensitivity;
this.acceleration = acceleration;
}
public float getSpeedSensitivity() {
return sensitivity;
}
public float getSpeedAcceleration() {
return acceleration;
}
public void setMouseMoveScale(float set) {
moveScale = set;
}
public void setImage(String texture) {
if( app.isInVR() == false ){
Texture tex = application.getAssetManager().loadTexture(texture);
mouseImage.setTexture(application.getAssetManager(), (Texture2D)tex, true);
ySize = tex.getImage().getHeight();
mouseImage.setHeight(ySize);
mouseImage.setWidth(tex.getImage().getWidth());
mouseImage.getMaterial().getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
mouseImage.getMaterial().getAdditionalRenderState().setDepthWrite(false);
} else {
Texture tex = application.getAssetManager().loadTexture(texture);
mouseImage.setTexture(application.getAssetManager(), (Texture2D)tex, true);
ySize = tex.getImage().getHeight();
mouseImage.setHeight(ySize);
mouseImage.setWidth(tex.getImage().getWidth());
mouseImage.getMaterial().getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
mouseImage.getMaterial().getAdditionalRenderState().setDepthWrite(false);
}
}
public void updateAnalogAsMouse(int inputIndex, AnalogListener mouseListener, String mouseXName, String mouseYName, float tpf) {
// got a tracked controller to use as the "mouse"
if( app.isInVR() == false ||
app.getVRinput() == null ||
app.getVRinput().isInputDeviceTracking(inputIndex) == false ) return;
Vector2f tpDelta;
if( thumbstickMode ) {
tpDelta = app.getVRinput().getAxis(inputIndex, VRInputType.ViveTrackpadAxis);
} else {
tpDelta = app.getVRinput().getAxisDeltaSinceLastCall(inputIndex, VRInputType.ViveTrackpadAxis);
}
float Xamount = (float)Math.pow(Math.abs(tpDelta.x) * sensitivity, acceleration);
float Yamount = (float)Math.pow(Math.abs(tpDelta.y) * sensitivity, acceleration);
if( tpDelta.x < 0f ) Xamount = -Xamount;
if( tpDelta.y < 0f ) Yamount = -Yamount;
Xamount *= moveScale; Yamount *= moveScale;
if( mouseListener != null ) {
if( tpDelta.x != 0f && mouseXName != null ) mouseListener.onAnalog(mouseXName, Xamount * 0.2f, tpf);
if( tpDelta.y != 0f && mouseYName != null ) mouseListener.onAnalog(mouseYName, Yamount * 0.2f, tpf);
}
if( application.getInputManager().isCursorVisible() ) {
int index = (avgCounter+1) % AVERAGE_AMNT;
lastXmv[index] = Xamount * 133f;
lastYmv[index] = Yamount * 133f;
cursorPos.x -= avg(lastXmv);
cursorPos.y -= avg(lastYmv);
Vector2f maxsize = app.getVRGUIManager().getCanvasSize();
if( cursorPos.x > maxsize.x ) cursorPos.x = maxsize.x;
if( cursorPos.x < 0f ) cursorPos.x = 0f;
if( cursorPos.y > maxsize.y ) cursorPos.y = maxsize.y;
if( cursorPos.y < 0f ) cursorPos.y = 0f;
}
}
public Vector2f getCursorPosition() {
if( app.isInVR() ) {
return cursorPos;
}
return application.getInputManager().getCursorPosition();
}
public void centerMouse() {
// set mouse in center of the screen if newly added
Vector2f size = app.getVRGUIManager().getCanvasSize();
MouseInput mi = application.getContext().getMouseInput();
AppSettings as = application.getContext().getSettings();
if( mi instanceof GlfwMouseInputVR ) ((GlfwMouseInputVR)mi).setCursorPosition((int)(as.getWidth() / 2f), (int)(as.getHeight() / 2f));
if( app.isInVR() ) {
cursorPos.x = size.x / 2f;
cursorPos.y = size.y / 2f;
recentCenterCount = 2;
}
}
protected void update(float tpf) {
// if we are showing the cursor, add our picture as it
if( application.getInputManager().isCursorVisible() ) {
if( mouseImage.getParent() == null ) {
application.getGuiViewPort().attachScene(mouseImage);
centerMouse();
// the "real" mouse pointer should stay hidden
if (application.getContext() instanceof LwjglWindow){
GLFW.glfwSetInputMode(((LwjglWindow)application.getContext()).getWindowHandle(), GLFW.GLFW_CURSOR, GLFW.GLFW_CURSOR_DISABLED);
}
}
// handle mouse movements, which may be in addition to (or exclusive from) tracked movement
MouseInput mi = application.getContext().getMouseInput();
if( mi instanceof GlfwMouseInputVR ) {
if( recentCenterCount <= 0 ) {
//Vector2f winratio = VRGuiManager.getCanvasToWindowRatio();
cursorPos.x += ((GlfwMouseInputVR)mi).getLastDeltaX();// * winratio.x;
cursorPos.y += ((GlfwMouseInputVR)mi).getLastDeltaY();// * winratio.y;
if( cursorPos.x < 0f ) cursorPos.x = 0f;
if( cursorPos.y < 0f ) cursorPos.y = 0f;
if( cursorPos.x > app.getVRGUIManager().getCanvasSize().x ) cursorPos.x = app.getVRGUIManager().getCanvasSize().x;
if( cursorPos.y > app.getVRGUIManager().getCanvasSize().y ) cursorPos.y = app.getVRGUIManager().getCanvasSize().y;
} else recentCenterCount--;
((GlfwMouseInputVR)mi).clearDeltas();
}
// ok, update the cursor graphic position
Vector2f currentPos = getCursorPosition();
mouseImage.setLocalTranslation(currentPos.x, currentPos.y - ySize, app.getVRGUIManager().getGuiDistance() + 1f);
mouseImage.updateGeometricState();
} else if( mouseImage.getParent() != null ) {
Node n = mouseImage.getParent();
mouseImage.removeFromParent();
if (n != null){
n.updateGeometricState();
}
}
}
}

@ -1,863 +0,0 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jmevr.util;
import com.jme3.app.Application;
import com.jme3.app.VRAppState;
import com.jme3.app.VRApplication;
import com.jme3.app.state.AppState;
import com.jme3.input.vr.OSVR;
import com.jme3.input.vr.OpenVR;
import com.jme3.input.vr.VRAPI;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.post.CartoonSSAO;
import com.jme3.post.Filter;
import com.jme3.post.FilterPostProcessor;
import com.jme3.post.SceneProcessor;
import com.jme3.post.filters.FogFilter;
import com.jme3.post.filters.TranslucentBucketFilter;
import com.jme3.post.ssao.SSAOFilter;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue.Bucket;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import com.jme3.shadow.DirectionalLightShadowFilter;
import com.jme3.shadow.VRDirectionalLightShadowRenderer;
import com.jme3.system.jopenvr.JOpenVRLibrary;
import com.jme3.system.jopenvr.OpenVRUtil;
import com.jme3.system.jopenvr.Texture_t;
import com.jme3.system.jopenvr.VRTextureBounds_t;
import com.jme3.system.lwjgl.LwjglWindow;
import com.jme3.texture.FrameBuffer;
import com.jme3.texture.Image;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.ui.Picture;
import com.sun.jna.Pointer;
import com.sun.jna.ptr.PointerByReference;
import java.awt.GraphicsEnvironment;
import java.util.Iterator;
import java.util.logging.Logger;
import osvrrendermanageropengl.OSVR_RenderBufferOpenGL;
import osvrrendermanageropengl.OSVR_ViewportDescription;
import osvrrendermanageropengl.OsvrRenderManagerOpenGLLibrary;
/**
* A VR view manager. This class enable to submit 3D views to the VR compositor.
* @author reden - phr00t - https://github.com/phr00t
* @author Julien Seinturier - (c) 2016 - JOrigin project - <a href="http://www.jorigin.org">http:/www.jorigin.org</a>
*/
public class VRViewManager {
private static final Logger logger = Logger.getLogger(VRViewManager.class.getName());
/**
* The name of the left view.
*/
public final static String LEFT_VIEW_NAME = "Left View";
/**
* The name of the right view.
*/
public final static String RIGHT_VIEW_NAME = "Right View";
private VRAppState app;
private Application application;
private Camera leftCamera;
private ViewPort leftViewport;
private FilterPostProcessor leftPostProcessor;
private Texture2D leftEyeTexture;
private Texture2D leftEyeDepth;
private Camera rightCamera;
private ViewPort rightViewport;
private FilterPostProcessor rightPostProcessor;
private Texture2D rightEyeTexture;
private Texture2D rightEyeDepth;
// OpenVR values
private VRTextureBounds_t leftTextureBounds;
private Texture_t leftTextureType;
private VRTextureBounds_t rightTextureBounds;
private Texture_t rightTextureType;
// OSVR values
OSVR_RenderBufferOpenGL.ByValue[] osvr_renderBuffer;
OSVR_ViewportDescription.ByValue osvr_viewDescFull;
OSVR_ViewportDescription.ByValue osvr_viewDescLeft;
OSVR_ViewportDescription.ByValue osvr_viewDescRight;
Pointer osvr_rmBufferState;
//private static boolean useCustomDistortion;
private float heightAdjustment;
private Texture2D dualEyeTex;
private final PointerByReference grabRBS = new PointerByReference();
private float resMult = 1f;
//final & temp values for camera calculations
private final Vector3f finalPosition = new Vector3f();
private final Quaternion finalRotation = new Quaternion();
private final Vector3f hmdPos = new Vector3f();
private final Quaternion hmdRot = new Quaternion();
/**
* Create a new VR view manager attached to the given {@link VRAppState VR app state}.<br>
* in order to be used, this manager has to be attached to an app state and to an application.
*/
public VRViewManager(){
}
/**
* Attach this manager to the given {@link VRAppState app state} and the given {@link Application application}.
* The application has to be the one that the app state is attached.
* This method should be called from the {@link AppState#initialize(com.jme3.app.state.AppStateManager, Application) initialize}
* method of the {@link AppState} instance.
* @param app the {@link VRAppState VR app state} to which this manager is linked.
* @param application the {@link Application} which the app state is attached.
*/
public void attach(VRAppState app, Application application){
this.app = app;
this.application = application;
}
/**
* Get the {@link Camera camera} attached to the left eye.
* @return the {@link Camera camera} attached to the left eye.
* @see #getRightCamera()
*/
public Camera getLeftCamera() {
return leftCamera;
}
/**
* Get the {@link Camera camera} attached to the right eye.
* @return the {@link Camera camera} attached to the right eye.
* @see #getLeftCamera()
*/
public Camera getRightCamera() {
return rightCamera;
}
/**
* Get the {@link ViewPort viewport} attached to the left eye.
* @return the {@link ViewPort viewport} attached to the left eye.
* @see #getRightViewport()
*/
public ViewPort getLeftViewport() {
return leftViewport;
}
/**
* Get the {@link ViewPort viewport} attached to the right eye.
* @return the {@link ViewPort viewport} attached to the right eye.
* @see #getLeftViewport()
*/
public ViewPort getRightViewport() {
return rightViewport;
}
/**
* Get the identifier of the left eye texture.
* @return the identifier of the left eye texture.
* @see #getRightTexId()
* @see #getFullTexId()
*/
private int getLeftTexId() {
return (int)leftEyeTexture.getImage().getId();
}
/**
* Get the identifier of the right eye texture.
* @return the identifier of the right eye texture.
* @see #getLeftTexId()
* @see #getFullTexId()
*/
private int getRightTexId() {
return (int)rightEyeTexture.getImage().getId();
}
/**
* Get the identifier of the full (dual eye) texture.
* @return the identifier of the full (dual eye) texture.
* @see #getLeftTexId()
* @see #getRightTexId()
*/
private int getFullTexId() {
return (int)dualEyeTex.getImage().getId();
}
/**
* Get the height adjustment to apply to the cameras before rendering.
* @return the height adjustment to apply to the cameras before rendering.
* @see #setHeightAdjustment(float)
*/
public float getHeightAdjustment() {
return heightAdjustment;
}
/**
* Set the height adjustment to apply to the cameras before rendering.
* @param amount the height adjustment to apply to the cameras before rendering.
* @see #getHeightAdjustment()
*/
public void setHeightAdjustment(float amount) {
heightAdjustment = amount;
}
/**
* Get the resolution multiplier.
* @return the resolution multiplier.
* @see #setResolutionMultiplier(float)
*/
public float getResolutionMuliplier() {
return resMult;
}
/**
* Set the resolution multiplier.
* @param resMult the resolution multiplier.
* @see #getResolutionMuliplier()
*/
public void setResolutionMultiplier(float resMult) {
this.resMult = resMult;
}
/**
* Initialize the system binds of the textures.
*/
private void initTextureSubmitStructs() {
leftTextureType = new Texture_t();
rightTextureType = new Texture_t();
if( app.getVRHardware() instanceof OpenVR ) {
leftTextureBounds = new VRTextureBounds_t();
rightTextureBounds = new VRTextureBounds_t();
// left eye
leftTextureBounds.uMax = 0.5f;
leftTextureBounds.uMin = 0f;
leftTextureBounds.vMax = 1f;
leftTextureBounds.vMin = 0f;
leftTextureBounds.setAutoSynch(false);
leftTextureBounds.setAutoRead(false);
leftTextureBounds.setAutoWrite(false);
leftTextureBounds.write();
// right eye
rightTextureBounds.uMax = 1f;
rightTextureBounds.uMin = 0.5f;
rightTextureBounds.vMax = 1f;
rightTextureBounds.vMin = 0f;
rightTextureBounds.setAutoSynch(false);
rightTextureBounds.setAutoRead(false);
rightTextureBounds.setAutoWrite(false);
rightTextureBounds.write();
// texture type
// FIXME: Synchronize with JMonkey given texture (at this time is linear but was Gamma with phr00t implementation)
leftTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Gamma;
//leftTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Linear;
leftTextureType.eType = JOpenVRLibrary.ETextureType.ETextureType_TextureType_OpenGL;
leftTextureType.setAutoSynch(false);
leftTextureType.setAutoRead(false);
leftTextureType.setAutoWrite(false);
leftTextureType.handle = -1;
// FIXME: Synchronize with JMonkey given texture (at this time is linear but was Gamma with phr00t implementation)
rightTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Gamma;
//rightTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Linear;
rightTextureType.eType = JOpenVRLibrary.ETextureType.ETextureType_TextureType_OpenGL;
rightTextureType.setAutoSynch(false);
rightTextureType.setAutoRead(false);
rightTextureType.setAutoWrite(false);
rightTextureType.handle = -1;
logger.config("Init eyes native texture binds");
logger.config(" Left eye texture");
logger.config(" address: "+leftTextureType.getPointer());
logger.config(" size: "+leftTextureType.size()+" bytes");
logger.config(" color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
logger.config(" type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType));
logger.config(" auto read: "+leftTextureType.getAutoRead());
logger.config(" auto write: "+leftTextureType.getAutoWrite());
logger.config(" handle address: "+leftTextureType.handle);
logger.config(" handle value: "+leftTextureType.handle);
logger.config("");
logger.config(" Right eye texture");
logger.config(" address: "+rightTextureType.getPointer());
logger.config(" size: "+rightTextureType.size()+" bytes");
logger.config(" color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
logger.config(" type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType));
logger.config(" auto read: "+rightTextureType.getAutoRead());
logger.config(" auto write: "+rightTextureType.getAutoWrite());
logger.config(" handle address: "+rightTextureType.handle);
logger.config(" handle value: "+rightTextureType.handle);
} else if( app.getVRHardware() instanceof OSVR ) {
// must be OSVR
osvr_renderBuffer = new OSVR_RenderBufferOpenGL.ByValue[2];
osvr_renderBuffer[OSVR.EYE_LEFT] = new OSVR_RenderBufferOpenGL.ByValue();
osvr_renderBuffer[OSVR.EYE_RIGHT] = new OSVR_RenderBufferOpenGL.ByValue();
osvr_renderBuffer[OSVR.EYE_LEFT].setAutoSynch(false);
osvr_renderBuffer[OSVR.EYE_RIGHT].setAutoSynch(false);
osvr_viewDescFull = new OSVR_ViewportDescription.ByValue();
osvr_viewDescFull.setAutoSynch(false);
osvr_viewDescFull.left = osvr_viewDescFull.lower = 0.0;
osvr_viewDescFull.width = osvr_viewDescFull.height = 1.0;
osvr_viewDescLeft = new OSVR_ViewportDescription.ByValue();
osvr_viewDescLeft.setAutoSynch(false);
osvr_viewDescLeft.left = osvr_viewDescLeft.lower = 0.0;
osvr_viewDescLeft.width = 0.5;
osvr_viewDescLeft.height = 1.0;
osvr_viewDescRight = new OSVR_ViewportDescription.ByValue();
osvr_viewDescRight.setAutoSynch(false);
osvr_viewDescRight.left = 0.5;
osvr_viewDescRight.lower = 0.0;
osvr_viewDescRight.width = 0.5;
osvr_viewDescRight.height = 1.0;
osvr_viewDescRight.write();
osvr_viewDescLeft.write();
osvr_viewDescFull.write();
osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = -1;
osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = -1;
osvr_renderBuffer[OSVR.EYE_RIGHT].depthStencilBufferName = -1;
osvr_renderBuffer[OSVR.EYE_RIGHT].colorBufferName = -1;
}
}
/**
* Register the OSVR OpenGL buffer.
* @param buf the OSVR OpenGL buffer.
*/
private void registerOSVRBuffer(OSVR_RenderBufferOpenGL.ByValue buf) {
OsvrRenderManagerOpenGLLibrary.osvrRenderManagerStartRegisterRenderBuffers(grabRBS);
OsvrRenderManagerOpenGLLibrary.osvrRenderManagerRegisterRenderBufferOpenGL(grabRBS.getValue(), buf);
OsvrRenderManagerOpenGLLibrary.osvrRenderManagerFinishRegisterRenderBuffers(((OSVR)app.getVRHardware()).getCompositor(), grabRBS.getValue(), (byte)0);
}
/**
* Send the textures to the two eyes.
*/
public void sendTextures() {
if( app.isInVR() ) {
VRAPI api = app.getVRHardware();
if( api.getCompositor() != null ) {
// using the compositor...
int errl = 0, errr = 0;
if( app.isInstanceVRRendering() ) {
if( leftTextureType.handle == -1 || leftTextureType.handle != getFullTexId() ) {
leftTextureType.handle = getFullTexId();
if( leftTextureType.handle != -1 ) {
leftTextureType.write();
if( api instanceof OSVR ) {
osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = leftTextureType.handle;
osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = dualEyeTex.getImage().getId();
osvr_renderBuffer[OSVR.EYE_LEFT].write();
registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_LEFT]);
}
}
} else {
if( api instanceof OpenVR ) {
int submitFlag = JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default;
errr = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, leftTextureType, rightTextureBounds, submitFlag);
errl = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, leftTextureBounds, submitFlag);
} else if( api instanceof OSVR ) {
((OSVR)api).handleRenderBufferPresent(osvr_viewDescLeft, osvr_viewDescRight,
osvr_renderBuffer[OSVR.EYE_LEFT], osvr_renderBuffer[OSVR.EYE_LEFT]);
}
}
} else if( leftTextureType.handle == -1 || rightTextureType.handle == -1 ||
leftTextureType.handle != getLeftTexId() || rightTextureType.handle != getRightTexId() ) {
leftTextureType.handle = getLeftTexId();
if( leftTextureType.handle != -1 ) {
logger.fine("Writing Left texture to native memory at " + leftTextureType.getPointer());
leftTextureType.write();
if( api instanceof OSVR ) {
osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = leftTextureType.handle;
if( leftEyeDepth != null ) osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = leftEyeDepth.getImage().getId();
osvr_renderBuffer[OSVR.EYE_LEFT].write();
registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_LEFT]);
}
}
rightTextureType.handle = getRightTexId();
if( rightTextureType.handle != -1 ) {
logger.fine("Writing Right texture to native memory at " + leftTextureType.getPointer());
rightTextureType.write();
if( api instanceof OSVR ) {
osvr_renderBuffer[OSVR.EYE_RIGHT].colorBufferName = rightTextureType.handle;
if( rightEyeDepth != null ) osvr_renderBuffer[OSVR.EYE_RIGHT].depthStencilBufferName = rightEyeDepth.getImage().getId();
osvr_renderBuffer[OSVR.EYE_RIGHT].write();
registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_RIGHT]);
}
}
} else {
if( api instanceof OpenVR ) {
errl = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, null,
JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default);
errr = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, rightTextureType, null,
JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default);
} else if( api instanceof OSVR ) {
((OSVR)api).handleRenderBufferPresent(osvr_viewDescFull, osvr_viewDescFull,
osvr_renderBuffer[OSVR.EYE_LEFT], osvr_renderBuffer[OSVR.EYE_RIGHT]);
}
}
if( errl != 0 ){
logger.severe("Submit to left compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")");
logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType));
logger.severe(" Texture handle: "+leftTextureType.handle);
logger.severe(" Left eye texture "+leftEyeTexture.getName()+" ("+leftEyeTexture.getImage().getId()+")");
logger.severe(" Type: "+leftEyeTexture.getType());
logger.severe(" Size: "+leftEyeTexture.getImage().getWidth()+"x"+leftEyeTexture.getImage().getHeight());
logger.severe(" Image depth: "+leftEyeTexture.getImage().getDepth());
logger.severe(" Image format: "+leftEyeTexture.getImage().getFormat());
logger.severe(" Image color space: "+leftEyeTexture.getImage().getColorSpace());
}
if( errr != 0 ){
logger.severe("Submit to right compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")");
logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType));
logger.severe(" Texture handle: "+rightTextureType.handle);
logger.severe(" Right eye texture "+rightEyeTexture.getName()+" ("+rightEyeTexture.getImage().getId()+")");
logger.severe(" Type: "+rightEyeTexture.getType());
logger.severe(" Size: "+rightEyeTexture.getImage().getWidth()+"x"+rightEyeTexture.getImage().getHeight());
logger.severe(" Image depth: "+rightEyeTexture.getImage().getDepth());
logger.severe(" Image format: "+rightEyeTexture.getImage().getFormat());
logger.severe(" Image color space: "+rightEyeTexture.getImage().getColorSpace());
}
}
}
}
/**
* Initialize the VR view manager.
*/
public void initialize() {
logger.config("Initializing VR view manager.");
initTextureSubmitStructs();
setupCamerasAndViews();
setupVRScene();
moveScreenProcessingToEyes();
if( app.hasTraditionalGUIOverlay() ) {
app.getVRMouseManager().init();
// update the pose to position the gui correctly on start
update(0f);
app.getVRGUIManager().positionGui();
}
// if we are OSVR, our primary mirror window needs to be the same size as the render manager's output...
if( app.getVRHardware() instanceof OSVR ) {
int origWidth = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getWidth();
int origHeight = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getHeight();
long window = ((LwjglWindow)application.getContext()).getWindowHandle();
Vector2f windowSize = new Vector2f();
((OSVR)app.getVRHardware()).getRenderSize(windowSize);
windowSize.x = Math.max(windowSize.x * 2f, leftCamera.getWidth());
org.lwjgl.glfw.GLFW.glfwSetWindowSize(window, (int)windowSize.x, (int)windowSize.y);
application.getContext().getSettings().setResolution((int)windowSize.x, (int)windowSize.y);
if (application.getRenderManager() != null) {
application.getRenderManager().notifyReshape((int)windowSize.x, (int)windowSize.y);
}
org.lwjgl.glfw.GLFW.glfwSetWindowPos(window, origWidth - (int)windowSize.x, 32);
org.lwjgl.glfw.GLFW.glfwFocusWindow(window);
org.lwjgl.glfw.GLFW.glfwSetCursorPos(window, origWidth / 2.0, origHeight / 2.0);
}
logger.config("Initialized VR view manager [SUCCESS]");
}
/**
* Prepare the size of the given {@link Camera camera} to adapt it to the underlying rendering context.
* @param cam the {@link Camera camera} to prepare.
* @param xMult the camera width multiplier.
*/
private void prepareCameraSize(Camera cam, float xMult) {
Vector2f size = new Vector2f();
VRAPI vrhmd = app.getVRHardware();
if( vrhmd == null ) {
size.x = 1280f;
size.y = 720f;
} else {
vrhmd.getRenderSize(size);
}
if( size.x < application.getContext().getSettings().getWidth() ) {
size.x = application.getContext().getSettings().getWidth();
}
if( size.y < application.getContext().getSettings().getHeight() ) {
size.y = application.getContext().getSettings().getHeight();
}
if( app.isInstanceVRRendering() ) size.x *= 2f;
// other adjustments
size.x *= xMult;
size.x *= resMult;
size.y *= resMult;
if( cam.getWidth() != size.x || cam.getHeight() != size.y ) cam.resize((int)size.x, (int)size.y, false);
}
/**
* Replaces rootNode as the main cameras scene with the distortion mesh
*/
private void setupVRScene(){
// no special scene to setup if we are doing instancing
if( app.isInstanceVRRendering() ) {
// distortion has to be done with compositor here... we want only one pass on our end!
if( application.getContext().getSettings().isSwapBuffers() ) {
setupMirrorBuffers(app.getCamera(), dualEyeTex, true);
}
return;
}
leftEyeTexture = (Texture2D) leftViewport.getOutputFrameBuffer().getColorBuffer().getTexture();
rightEyeTexture = (Texture2D)rightViewport.getOutputFrameBuffer().getColorBuffer().getTexture();
leftEyeDepth = (Texture2D) leftViewport.getOutputFrameBuffer().getDepthBuffer().getTexture();
rightEyeDepth = (Texture2D)rightViewport.getOutputFrameBuffer().getDepthBuffer().getTexture();
// main viewport is either going to be a distortion scene or nothing
// mirroring is handled by copying framebuffers
Iterator<Spatial> spatialIter = application.getViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
application.getViewPort().detachScene(spatialIter.next());
}
spatialIter = application.getGuiViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
application.getGuiViewPort().detachScene(spatialIter.next());
}
// only setup distortion scene if compositor isn't running (or using custom mesh distortion option)
if( app.getVRHardware().getCompositor() == null ) {
Node distortionScene = new Node();
Material leftMat = new Material(application.getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md");
leftMat.setTexture("Texture", leftEyeTexture);
Geometry leftEye = new Geometry("box", MeshUtil.setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Left, app.getVRHardware()));
leftEye.setMaterial(leftMat);
distortionScene.attachChild(leftEye);
Material rightMat = new Material(application.getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md");
rightMat.setTexture("Texture", rightEyeTexture);
Geometry rightEye = new Geometry("box", MeshUtil.setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Right, app.getVRHardware()));
rightEye.setMaterial(rightMat);
distortionScene.attachChild(rightEye);
distortionScene.updateGeometricState();
application.getViewPort().attachScene(distortionScene);
//if( useCustomDistortion ) setupFinalFullTexture(app.getViewPort().getCamera());
}
if( application.getContext().getSettings().isSwapBuffers() ) {
setupMirrorBuffers(app.getCamera(), leftEyeTexture, false);
}
}
/**
* Update the VR view manager.
* This method is called by the attached {@link VRApplication VR application} and should not be called manually.
* @param tpf the time per frame.
*/
public void update(float tpf) {
// grab the observer
Object obs = app.getObserver();
Quaternion objRot;
Vector3f objPos;
if( obs instanceof Camera ) {
objRot = ((Camera)obs).getRotation();
objPos = ((Camera)obs).getLocation();
} else {
objRot = ((Spatial)obs).getWorldRotation();
objPos = ((Spatial)obs).getWorldTranslation();
}
// grab the hardware handle
VRAPI dev = app.getVRHardware();
if( dev != null ) {
// update the HMD's position & orientation
dev.updatePose();
dev.getPositionAndOrientation(hmdPos, hmdRot);
if( obs != null ) {
// update hmdPos based on obs rotation
finalRotation.set(objRot);
finalRotation.mult(hmdPos, hmdPos);
finalRotation.multLocal(hmdRot);
}
finalizeCamera(dev.getHMDVectorPoseLeftEye(), objPos, leftCamera);
finalizeCamera(dev.getHMDVectorPoseRightEye(), objPos, rightCamera);
} else {
leftCamera.setFrame(objPos, objRot);
rightCamera.setFrame(objPos, objRot);
}
if( app.hasTraditionalGUIOverlay() ) {
// update the mouse?
app.getVRMouseManager().update(tpf);
// update GUI position?
if( app.getVRGUIManager().wantsReposition || app.getVRGUIManager().getPositioningMode() != VRGuiManager.POSITIONING_MODE.MANUAL ) {
app.getVRGUIManager().positionGuiNow(tpf);
app.getVRGUIManager().updateGuiQuadGeometricState();
}
}
}
/**
* Place the camera within the scene.
* @param eyePos the eye position.
* @param obsPosition the observer position.
* @param cam the camera to place.
*/
private void finalizeCamera(Vector3f eyePos, Vector3f obsPosition, Camera cam) {
finalRotation.mult(eyePos, finalPosition);
finalPosition.addLocal(hmdPos);
if( obsPosition != null ) finalPosition.addLocal(obsPosition);
finalPosition.y += heightAdjustment;
cam.setFrame(finalPosition, finalRotation);
}
/**
* Handles moving filters from the main view to each eye
*/
public void moveScreenProcessingToEyes() {
if( rightViewport == null ) return;
syncScreenProcessing(application.getViewPort());
application.getViewPort().clearProcessors();
}
/**
* Sets the two views to use the list of {@link SceneProcessor processors}.
* @param sourceViewport the {@link ViewPort viewport} that contains the processors to use.
*/
public void syncScreenProcessing(ViewPort sourceViewport) {
if( rightViewport == null ) return;
// setup post processing filters
if( rightPostProcessor == null ) {
rightPostProcessor = new FilterPostProcessor(application.getAssetManager());
leftPostProcessor = new FilterPostProcessor(application.getAssetManager());
}
// clear out all filters & processors, to start from scratch
rightPostProcessor.removeAllFilters();
leftPostProcessor.removeAllFilters();
leftViewport.clearProcessors();
rightViewport.clearProcessors();
// if we have no processors to sync, don't add the FilterPostProcessor
if( sourceViewport.getProcessors().isEmpty() ) return;
// add post processors we just made, which are empty
leftViewport.addProcessor(leftPostProcessor);
rightViewport.addProcessor(rightPostProcessor);
// go through all of the filters in the processors list
// add them to the left viewport processor & clone them to the right
for(SceneProcessor sceneProcessor : sourceViewport.getProcessors()) {
if (sceneProcessor instanceof FilterPostProcessor) {
for(Filter f : ((FilterPostProcessor)sceneProcessor).getFilterList() ) {
if( f instanceof TranslucentBucketFilter ) {
// just remove this filter, we will add it at the end manually
((FilterPostProcessor)sceneProcessor).removeFilter(f);
} else {
leftPostProcessor.addFilter(f);
// clone to the right
Filter f2;
if(f instanceof FogFilter){
f2 = FilterUtil.cloneFogFilter((FogFilter)f);
} else if (f instanceof CartoonSSAO ) {
f2 = new CartoonSSAO((CartoonSSAO)f);
} else if (f instanceof SSAOFilter){
f2 = FilterUtil.cloneSSAOFilter((SSAOFilter)f);
} else if (f instanceof DirectionalLightShadowFilter){
f2 = FilterUtil.cloneDirectionalLightShadowFilter(application.getAssetManager(), (DirectionalLightShadowFilter)f);
} else {
f2 = f; // dof, bloom, lightscattering etc.
}
rightPostProcessor.addFilter(f2);
}
}
} else if (sceneProcessor instanceof VRDirectionalLightShadowRenderer) {
// shadow processing
// TODO: make right shadow processor use same left shadow maps for performance
VRDirectionalLightShadowRenderer dlsr = (VRDirectionalLightShadowRenderer) sceneProcessor;
VRDirectionalLightShadowRenderer dlsrRight = dlsr.clone();
dlsrRight.setLight(dlsr.getLight());
rightViewport.getProcessors().add(0, dlsrRight);
leftViewport.getProcessors().add(0, sceneProcessor);
}
}
// make sure each has a translucent filter renderer
leftPostProcessor.addFilter(new TranslucentBucketFilter());
rightPostProcessor.addFilter(new TranslucentBucketFilter());
}
private void setupCamerasAndViews() {
// get desired frustrum from original camera
Camera origCam = app.getCamera();
float fFar = origCam.getFrustumFar();
float fNear = origCam.getFrustumNear();
// if we are using OSVR get the eye info here
if( app.getVRHardware() instanceof OSVR ) {
((OSVR)app.getVRHardware()).getEyeInfo();
}
// restore frustrum on distortion scene cam, if needed
if( app.isInstanceVRRendering() ) {
leftCamera = origCam;
} else if( app.compositorAllowed() == false ) {
origCam.setFrustumFar(100f);
origCam.setFrustumNear(1f);
leftCamera = origCam.clone();
prepareCameraSize(origCam, 2f);
} else {
leftCamera = origCam.clone();
}
leftCamera.setFrustumPerspective(app.getDefaultFOV(), app.getDefaultAspect(), fNear, fFar);
prepareCameraSize(leftCamera, 1f);
if( app.getVRHardware() != null ) leftCamera.setProjectionMatrix(app.getVRHardware().getHMDMatrixProjectionLeftEye(leftCamera));
//org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_FRAMEBUFFER_SRGB);
if( !app.isInstanceVRRendering()) {
leftViewport = setupViewBuffers(leftCamera, LEFT_VIEW_NAME);
rightCamera = leftCamera.clone();
if( app.getVRHardware() != null ){
rightCamera.setProjectionMatrix(app.getVRHardware().getHMDMatrixProjectionRightEye(rightCamera));
}
rightViewport = setupViewBuffers(rightCamera, RIGHT_VIEW_NAME);
} else {
System.err.println("[VRViewManager] THIS CODE NEED CHANGES !!!");
leftViewport = application.getViewPort();
//leftViewport.attachScene(app.getRootNode());
rightCamera = leftCamera.clone();
if( app.getVRHardware() != null ){
rightCamera.setProjectionMatrix(app.getVRHardware().getHMDMatrixProjectionRightEye(rightCamera));
}
org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_CLIP_DISTANCE0);
//FIXME: [jme-vr] Fix with JMonkey next release
//RenderManager._VRInstancing_RightCamProjection = camRight.getViewProjectionMatrix();
setupFinalFullTexture(application.getViewPort().getCamera());
}
// setup gui
app.getVRGUIManager().setupGui(leftCamera, rightCamera, leftViewport, rightViewport);
if( app.getVRHardware() != null ) {
// call these to cache the results internally
app.getVRHardware().getHMDMatrixPoseLeftEye();
app.getVRHardware().getHMDMatrixPoseRightEye();
}
}
private ViewPort setupMirrorBuffers(Camera cam, Texture tex, boolean expand) {
Camera clonecam = cam.clone();
ViewPort viewPort = application.getRenderManager().createPostView("MirrorView", clonecam);
clonecam.setParallelProjection(true);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Picture pic = new Picture("fullscene");
pic.setLocalTranslation(-0.75f, -0.5f, 0f);
if( expand ) {
pic.setLocalScale(3f, 1f, 1f);
} else {
pic.setLocalScale(1.5f, 1f, 1f);
}
pic.setQueueBucket(Bucket.Opaque);
pic.setTexture(application.getAssetManager(), (Texture2D)tex, false);
viewPort.attachScene(pic);
viewPort.setOutputFrameBuffer(null);
pic.updateGeometricState();
return viewPort;
}
private void setupFinalFullTexture(Camera cam) {
// create offscreen framebuffer
FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBuffer.setSrgb(true);
//setup framebuffer's texture
dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
logger.config("Dual eye texture "+dualEyeTex.getName()+" ("+dualEyeTex.getImage().getId()+")");
logger.config(" Type: "+dualEyeTex.getType());
logger.config(" Size: "+dualEyeTex.getImage().getWidth()+"x"+dualEyeTex.getImage().getHeight());
logger.config(" Image depth: "+dualEyeTex.getImage().getDepth());
logger.config(" Image format: "+dualEyeTex.getImage().getFormat());
logger.config(" Image color space: "+dualEyeTex.getImage().getColorSpace());
//setup framebuffer to use texture
out.setDepthBuffer(Image.Format.Depth);
out.setColorTexture(dualEyeTex);
ViewPort viewPort = application.getViewPort();
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
viewPort.setOutputFrameBuffer(out);
}
private ViewPort setupViewBuffers(Camera cam, String viewName){
// create offscreen framebuffer
FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBufferLeft.setSrgb(true);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBufferLeft.setDepthBuffer(Image.Format.Depth);
offBufferLeft.setColorTexture(offTex);
ViewPort viewPort = application.getRenderManager().createPreView(viewName, cam);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Iterator<Spatial> spatialIter = application.getViewPort().getScenes().iterator();
while(spatialIter.hasNext()){
viewPort.attachScene(spatialIter.next());
}
//set viewport to render to offscreen framebuffer
viewPort.setOutputFrameBuffer(offBufferLeft);
return viewPort;
}
}
Loading…
Cancel
Save