diff --git a/.nb-gradle-properties b/.nb-gradle-properties index eba3578eeb..32b46a1dea 100644 --- a/.nb-gradle-properties +++ b/.nb-gradle-properties @@ -1,9 +1,9 @@ - - - - - New BSD (3-clause) License - - jMonkeyEngine - - + + + + + New BSD (3-clause) License + + jMonkeyEngine + + diff --git a/jme3-vr/build.gradle b/jme3-vr/build.gradle index d2c299eb06..9fa6899faf 100644 --- a/jme3-vr/build.gradle +++ b/jme3-vr/build.gradle @@ -2,7 +2,7 @@ if (!hasProperty('mainClass')) { ext.mainClass = '' } -def lwjglVersion = '3.1.3' +def lwjglVersion = '3.2.0' sourceCompatibility = '1.8' @@ -18,4 +18,9 @@ dependencies { // Native LibOVR/Oculus support compile "org.lwjgl:lwjgl-ovr:${lwjglVersion}" runtime "org.lwjgl:lwjgl-ovr:${lwjglVersion}:natives-windows" + + compile "org.lwjgl:lwjgl-openvr:${lwjglVersion}" + compile "org.lwjgl:lwjgl-openvr:${lwjglVersion}:natives-windows" + compile "org.lwjgl:lwjgl-openvr:${lwjglVersion}:natives-linux" + compile "org.lwjgl:lwjgl-openvr:${lwjglVersion}:natives-macos" } \ No newline at end of file diff --git a/jme3-vr/src/main/java/com/jme3/app/VRApplication.java b/jme3-vr/src/main/java/com/jme3/app/VRApplication.java deleted file mode 100644 index 0ea29d6455..0000000000 --- a/jme3-vr/src/main/java/com/jme3/app/VRApplication.java +++ /dev/null @@ -1,1535 +0,0 @@ -package com.jme3.app; - -import com.jme3.app.AppTask; -import com.jme3.app.Application; -import com.jme3.app.LegacyApplication; -import com.jme3.app.LostFocusBehavior; -import com.jme3.app.ResetStatsState; -import com.jme3.app.SimpleApplication; -import com.jme3.app.state.AppState; -import com.jme3.app.state.AppStateManager; -import com.jme3.asset.AssetManager; -import com.jme3.audio.AudioContext; -import com.jme3.audio.AudioRenderer; -import com.jme3.audio.Listener; -import com.jme3.input.InputManager; -import com.jme3.input.JoyInput; -import com.jme3.input.KeyInput; -import com.jme3.input.MouseInput; -import com.jme3.input.TouchInput; -import com.jme3.input.controls.KeyTrigger; -import com.jme3.input.vr.VRAPI; -import com.jme3.input.vr.VRInputAPI; -import com.jme3.input.vr.openvr.OpenVR; -import com.jme3.input.vr.openvr.OpenVRMouseManager; -import com.jme3.input.vr.openvr.OpenVRViewManager; -import com.jme3.input.vr.osvr.OSVR; -import com.jme3.math.ColorRGBA; -import com.jme3.math.Quaternion; -import com.jme3.math.Vector3f; -import com.jme3.post.PreNormalCaching; -import com.jme3.profile.AppProfiler; -import com.jme3.renderer.Camera; -import com.jme3.renderer.RenderManager; -import com.jme3.renderer.Renderer; -import com.jme3.renderer.ViewPort; -import com.jme3.renderer.queue.RenderQueue.Bucket; -import com.jme3.scene.Node; -import com.jme3.scene.Spatial; -import com.jme3.scene.Spatial.CullHint; -import com.jme3.system.AppSettings; -import com.jme3.system.JmeContext; -import com.jme3.system.JmeContext.Type; -import com.jme3.system.jopenvr.JOpenVRLibrary; -import com.jme3.system.JmeSystem; -import com.jme3.system.NanoTimer; -import com.jme3.system.SystemListener; -import com.jme3.system.Timer; -import com.jme3.system.lwjgl.LwjglDisplayVR; -import com.jme3.system.lwjgl.LwjglOffscreenBufferVR; -import com.jme3.util.VRGUIPositioningMode; -import com.jme3.util.VRGuiManager; - -import java.awt.GraphicsDevice; -import java.awt.GraphicsEnvironment; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Locale; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.Future; -import java.util.logging.Level; -import java.util.logging.Logger; - -import org.lwjgl.system.Platform; - - -/** - * A JMonkey application dedicated to Virtual Reality. An application that use VR devices (HTC vive, ...) has to extends this one.
- *

- * This class is no more functional and is deprecated. Please use {@link VRAppState VRAppState} instead. - * @author reden - phr00t - https://github.com/phr00t - * @author Julien Seinturier - COMEX SA - http://www.seinturier.fr - * @deprecated use {@link VRAppState VRAppState} instead. - */ -public abstract class VRApplication implements Application, SystemListener { - - - private static final Logger logger = Logger.getLogger(LegacyApplication.class.getName()); - - /** - * The default FOV. - */ - public float DEFAULT_FOV = 108f; - - - /** - * The default aspect ratio. - */ - public float DEFAULT_ASPECT = 1f; - - /** - * Is the application is based on OSVR (default is false). - */ - public boolean CONSTRUCT_WITH_OSVR = false; - - /** - * Is the application has not to start within VR mode (default is false). - */ - public boolean DISABLE_VR = false; - - /** - * VR application configuration parameters. - * @author reden - phr00t - https://github.com/phr00t - * @author Julien Seinturier - (c) 2016 - JOrigin project - http:/www.jorigin.org - * - */ - public static enum PreconfigParameter { - /** - * Is the SteamVR compositor is used (kinda needed at the moment) - */ - USE_VR_COMPOSITOR, - - /** - * Render two eyes, regardless of VR API detection. - */ - FORCE_VR_MODE, - - /** - * Invert the eyes. - */ - FLIP_EYES, - - /** - * Show GUI even if it is behind objects. - */ - SET_GUI_OVERDRAW, - - /** - * - */ - SET_GUI_CURVED_SURFACE, - - /** - * Display a mirror rendering on the screen. Runs faster when set to false. - */ - ENABLE_MIRROR_WINDOW, - - /** - * - */ - PREFER_OPENGL3, - - /** - * Disable VR rendering, regardless VR API and devices are presents. - */ - DISABLE_VR, - - /** - * - */ - SEATED_EXPERIENCE, - - /** - * Remove GUI node from the application. - */ - NO_GUI, - - /** - * Faster VR rendering, requires some vertex shader changes (see Common/MatDefs/VR/Unshaded.j3md) - */ - INSTANCE_VR_RENDERING, - - /** - * - */ - FORCE_DISABLE_MSAA - } - - private VRAPI VRhardware = null; - private VRGuiManager guiManager = null; - private OpenVRMouseManager mouseManager = null; - private OpenVRViewManager viewmanager = null; - - private String OS; - - private Camera dummyCam; - private Spatial observer; - private boolean VRSupportedOS; - private boolean forceVR; - private boolean disableSwapBuffers = true; - private boolean tryOpenGL3 = true; - private boolean seated; - private boolean nogui; - private boolean instanceVR; - private boolean forceDisableMSAA; - - // things taken from LegacyApplication - private AppStateManager stateManager; - private Camera cam; - private AppSettings settings; - private JmeContext context; - private float speed = 1f; - private AudioRenderer audioRenderer; - private LostFocusBehavior lostFocusBehavior = LostFocusBehavior.ThrottleOnLostFocus; - private final ConcurrentLinkedQueue> taskQueue = new ConcurrentLinkedQueue>(); - private Timer timer = new NanoTimer(); - private boolean paused = false, inputEnabled = true; - private InputManager inputManager; - private RenderManager renderManager; - private ViewPort viewPort; - private ViewPort guiViewPort; - private AssetManager assetManager; - private Renderer renderer; - private Listener listener; - private MouseInput mouseInput; - private KeyInput keyInput; - private JoyInput joyInput; - private TouchInput touchInput; - - protected Node guiNode, rootNode; - - private float fFar = 1000f, fNear = 1f; - private int xWin = 1280, yWin = 720; - - private float resMult = 1f; - - private boolean useCompositor = true, compositorOS; - private final String RESET_HMD = "ResetHMD"; - - /** - * Create a new VR application and attach the given {@link AppState app states}.
- * The application scene is made of a {@link #getRootNode() root node} that holds the scene spatials - * and a {@link #getGuiNode() GUI node} that is the root of the Graphical user interface. - * @param initialStates the {@link AppState app states} to attach to the application. - */ - public VRApplication(AppState... initialStates) { - this(); - - if (initialStates != null) { - for (AppState a : initialStates) { - if (a != null) { - stateManager.attach(a); - } - } - } - } - - /** - * Create a new VR application.
- * The application scene is made of a {@link #getRootNode() root node} that holds the scene spatials - * and a {@link #getGuiNode() GUI node} that is the root of the Graphical user interface. - */ - public VRApplication() { - super(); - - rootNode = new Node("root"); - guiNode = new Node("guiNode"); - - guiNode.setQueueBucket(Bucket.Gui); - guiNode.setCullHint(CullHint.Never); - dummyCam = new Camera(); - - initStateManager(); - - // Create the GUI manager. - guiManager = new VRGuiManager(null); - - // Create a new view manager. - viewmanager = new OpenVRViewManager(null); - - // Create a new mouse manager. - mouseManager = new OpenVRMouseManager(null); - - // we are going to use OpenVR now, not the Oculus Rift - // OpenVR does support the Rift - OS = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH); - VRSupportedOS = !OS.contains("nux") && System.getProperty("sun.arch.data.model").equalsIgnoreCase("64"); //for the moment, linux/unix causes crashes, 64-bit only - compositorOS = OS.contains("indows"); - - if( !VRSupportedOS ) { - logger.warning("Non-supported OS: " + OS + ", architecture: " + System.getProperty("sun.arch.data.model")); - } else if( DISABLE_VR ) { - logger.warning("VR disabled via code."); - } else if( VRSupportedOS && DISABLE_VR == false ) { - if( CONSTRUCT_WITH_OSVR ) { - //FIXME: WARNING !! - VRhardware = new OSVR(null); - logger.config("Creating OSVR wrapper [SUCCESS]"); - } else { - //FIXME: WARNING !! - VRhardware = new OpenVR(null); - logger.config("Creating OpenVR wrapper [SUCCESS]"); - } - if( VRhardware.initialize() ) { - setPauseOnLostFocus(false); - } - } - } - - /** - * Get the VR underlying hardware. - * @return the VR underlying hardware. - */ - public VRAPI getVRHardware() { - return VRhardware; - } - - /** - * Get the VR dedicated input. - * @return the VR dedicated input. - */ - public VRInputAPI getVRinput() { - if( VRhardware == null ) return null; - return VRhardware.getVRinput(); - } - - /** - * Get the VR view manager. - * @return the VR view manager. - */ - public OpenVRViewManager getVRViewManager() { - return viewmanager; - } - - /** - * Get the GUI manager attached to this application. - * @return the GUI manager attached to this application. - */ - public VRGuiManager getVRGUIManager(){ - return guiManager; - } - - /** - * Get the VR mouse manager attached to this application. - * @return the VR mouse manager attached to this application. - */ - public OpenVRMouseManager getVRMouseManager(){ - return mouseManager; - } - - /** - * Set the frustum values for the application. - * @param near the frustum near value. - * @param far the frustum far value. - */ - public void setFrustrumNearFar(float near, float far) { - fNear = near; - fFar = far; - } - - /** - * Set the mirror window size in pixel. - * @param width the width of the mirror window in pixel. - * @param height the height of the mirror window in pixel. - */ - public void setMirrorWindowSize(int width, int height) { - xWin = width; - yWin = height; - } - - /** - * Set the resolution multiplier. - * @param val the resolution multiplier. - */ - public void setResolutionMultiplier(float val) { - resMult = val; - if( viewmanager != null ) viewmanager.setResolutionMultiplier(resMult); - } - - - /** - * Is the SteamVR compositor is active. - * @return true if the SteamVR compositor is active and false otherwise. - */ - public boolean compositorAllowed() { - return useCompositor && compositorOS; - } - - /** - * Get if the system currently support VR. - * @return true if the system currently support VR and false otherwise. - */ - public boolean isOSVRSupported() { - return VRSupportedOS; - } - - /** - * Simple update of the application, this method should contains {@link #getRootNode() root node} updates. - * This method is called by the {@link #update() update()} method and should not be called manually. - * @param tpf the application time. - */ - public void simpleUpdate(float tpf) { } - - /** - * Rendering callback of the application. This method is called by the {@link #update() update()} method and should not be called manually. - * @param renderManager the {@link RenderManager render manager}. - */ - public void simpleRender(RenderManager renderManager) { - PreNormalCaching.resetCache(isInVR()); - } - - - /* - we do NOT want to get & modify the distortion scene camera, so - return the left viewport camera instead if we are in VR mode - */ - @Override - public Camera getCamera() { - if( isInVR() && viewmanager != null && viewmanager.getLeftCamera() != null ) { - return dummyCam; - } - return cam; - } - - /** - * Get the application internal camera. - * @return the application internal camera. - * @see #getCamera() - */ - public Camera getBaseCamera() { - return cam; - } - - - @Override - public JmeContext getContext(){ - return context; - } - - @Override - public AssetManager getAssetManager(){ - return assetManager; - } - - @Override - public InputManager getInputManager(){ - return inputManager; - } - - @Override - public AppStateManager getStateManager() { - return stateManager; - } - - @Override - public RenderManager getRenderManager() { - return renderManager; - } - - @Override - public Renderer getRenderer(){ - return renderer; - } - - @Override - public AudioRenderer getAudioRenderer() { - return audioRenderer; - } - - @Override - public Listener getListener() { - return listener; - } - - @Override - public Timer getTimer(){ - return timer; - } - - /** - * Handle the error given in parameters by creating a log entry and a dialog window. Internal use only. - */ - public void handleError(String errMsg, Throwable t){ - // Print error to log. - logger.log(Level.SEVERE, errMsg, t); - // Display error message on screen if not in headless mode - if (context.getType() != JmeContext.Type.Headless) { - if (t != null) { - JmeSystem.showErrorDialog(errMsg + "\n" + t.getClass().getSimpleName() + - (t.getMessage() != null ? ": " + t.getMessage() : "")); - } else { - JmeSystem.showErrorDialog(errMsg); - } - } - - stop(); // stop the application - } - - - /** - * Force the focus gain for the application. Internal use only. - */ - public void gainFocus(){ - if (lostFocusBehavior != LostFocusBehavior.Disabled) { - if (lostFocusBehavior == LostFocusBehavior.PauseOnLostFocus) { - paused = false; - } - context.setAutoFlushFrames(true); - if (inputManager != null) { - inputManager.reset(); - } - } - } - - /** - * Force the focus lost for the application. Internal use only. - */ - public void loseFocus(){ - if (lostFocusBehavior != LostFocusBehavior.Disabled){ - if (lostFocusBehavior == LostFocusBehavior.PauseOnLostFocus) { - paused = true; - } - context.setAutoFlushFrames(false); - } - } - - /** - * Reshape the display window. Internal use only. - */ - public void reshape(int w, int h){ - if (renderManager != null) { - renderManager.notifyReshape(w, h); - } - } - - /** - * Request the application to close. Internal use only. - */ - public void requestClose(boolean esc){ - context.destroy(false); - } - - /** - * Set the {@link AppSettings display settings} to define the display created. - *

- * Examples of display parameters include display frame {@link AppSettings#getWidth() width} and {@link AppSettings#getHeight() height}, - * pixel {@link AppSettings#getBitsPerPixel() color bit depth}, {@link AppSettings#getDepthBits() z-buffer bits}, {@link AppSettings#getSamples() anti-aliasing samples}, {@link AppSettings#getFrequency() update frequency}, ... - *

If this method is called while the application is already running, then - * {@link #restart() } must be called to apply the settings to the display. - * - * @param settings The settings to set. - */ - public void setSettings(AppSettings settings){ - this.settings = settings; - if (context != null && settings.useInput() != inputEnabled){ - // may need to create or destroy input based - // on settings change - inputEnabled = !inputEnabled; - if (inputEnabled){ - initInput(); - }else{ - destroyInput(); - } - }else{ - inputEnabled = settings.useInput(); - } - } - - /** - * Sets the {@link Timer} implementation that will be used for calculating - * frame times.

- * By default, Application will use the Timer as returned by the current {@link JmeContext} implementation. - * @param timer the timer to use. - */ - public void setTimer(Timer timer){ - this.timer = timer; - - if (timer != null) { - timer.reset(); - } - - if (renderManager != null) { - renderManager.setTimer(timer); - } - } - - - /** - * Determine the application's behavior when unfocused. - * @return The lost focus behavior of the application. - */ - public LostFocusBehavior getLostFocusBehavior() { - return lostFocusBehavior; - } - - /** - * Change the application's behavior when unfocused. By default, the application will - * {@link LostFocusBehavior#ThrottleOnLostFocus throttle the update loop} - * so as to not take 100% CPU usage when it is not in focus, e.g. - * alt-tabbed, minimized, or obstructed by another window. - * - * @param lostFocusBehavior The new {@link LostFocusBehavior lost focus behavior} to use. - */ - public void setLostFocusBehavior(LostFocusBehavior lostFocusBehavior) { - this.lostFocusBehavior = lostFocusBehavior; - } - - /** - * Get if the application has to pause then it lost the focus. - * @return true if pause on lost focus is enabled, false otherwise. - * @see #getLostFocusBehavior() - */ - public boolean isPauseOnLostFocus() { - return getLostFocusBehavior() == LostFocusBehavior.PauseOnLostFocus; - } - - /** - * Enable or disable pause on lost focus. - *

- * By default, pause on lost focus is enabled. - * If enabled, the application will stop updating - * when it loses focus or becomes inactive (e.g. alt-tab). - * For online or real-time applications, this might not be preferable, - * so this feature should be set to disabled. For other applications, - * it is best to keep it on so that CPU usage is not used when - * not necessary. - * - * @param pauseOnLostFocus true to enable pause on lost focus, false - * otherwise. - * - * @see #setLostFocusBehavior(com.jme3.app.LostFocusBehavior) - */ - public void setPauseOnLostFocus(boolean pauseOnLostFocus) { - if (pauseOnLostFocus) { - setLostFocusBehavior(LostFocusBehavior.PauseOnLostFocus); - } else { - setLostFocusBehavior(LostFocusBehavior.Disabled); - } - } - - @Override - public void start() { - - logger.config("Starting application..."); - - // set some default settings in-case - // settings dialog is not shown - boolean loadSettings = false; - if (settings == null) { - setSettings(new AppSettings(true)); - loadSettings = true; - } - - GraphicsDevice defDev = null; - try { - GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); - defDev = ge.getDefaultScreenDevice(); - } catch (Throwable e1) { - logger.log(Level.SEVERE, "Cannot access default screen device: "+e1.getMessage(), e1); - } - - if( isInVR() && !compositorAllowed() ) { - logger.warning("VR Composition is not allowed."); - // "easy extended" mode - // TO-DO: JFrame was removed in LWJGL 3, need to use new GLFW library to pick "monitor" display of VR device - // first, find the VR device - GraphicsDevice VRdev = null; - GraphicsDevice[] devs = GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices(); - // pick the display that isn't the default one - for(GraphicsDevice gd : devs) { - if( gd != defDev ) { - VRdev = gd; - break; - } - } - // did we get the VR device? - if( VRdev != null ) { - // set properties for VR acceleration - try { - java.awt.DisplayMode useDM = null; - int max = 0; - for(java.awt.DisplayMode dm : VRdev.getDisplayModes()) { - int check = dm.getHeight() + dm.getWidth() + dm.getRefreshRate() + dm.getBitDepth(); - if( check > max ) { - max = check; - useDM = dm; - } - } - // create a window for the VR device - settings.setWidth(useDM.getWidth()); - settings.setHeight(useDM.getHeight()); - settings.setBitsPerPixel(useDM.getBitDepth()); - settings.setFrequency(useDM.getRefreshRate()); - settings.setSwapBuffers(true); - settings.setVSync(true); // allow vsync on this display - setSettings(settings); - //VRdev.setFullScreenWindow(VRwindow); - // make sure we are in the right display mode - if( VRdev.getDisplayMode().equals(useDM) == false ) { - VRdev.setDisplayMode(useDM); - } - // make a blank cursor to hide it - //BufferedImage cursorImg = new BufferedImage(16, 16, BufferedImage.TYPE_INT_ARGB); - //Cursor blankCursor = Toolkit.getDefaultToolkit().createCustomCursor(cursorImg, new Point(0, 0), "blank cursor"); - //VRwindow.setCursor(blankCursor); - //jmeCanvas.getCanvas().setCursor(blankCursor); - //VRwindow.pack(); - //VRwindow.setVisible(true); - //startCanvas(); - logger.config("Starting application [SUCCESS]"); - return; - } catch(Exception e) { - logger.log(Level.SEVERE, "Error during application start: "+e.getMessage(), e); - } - } - } - - if( !isInVR() ) { - - logger.config("VR mode disabled."); - - // not in VR, show settings dialog - if( Platform.get() != Platform.MACOSX ) { - if (!JmeSystem.showSettingsDialog(settings, loadSettings)) { - logger.config("Starting application [SUCCESS]"); - return; - } - } else { - // GLFW workaround on macs - settings.setFrequency(defDev.getDisplayMode().getRefreshRate()); - settings.setDepthBits(24); - settings.setVSync(true); - // try and read resolution from file in local dir - File resfile = new File("resolution.txt"); - if( resfile.exists() ) { - try { - BufferedReader br = new BufferedReader(new FileReader(resfile)); - settings.setWidth(Integer.parseInt(br.readLine())); - settings.setHeight(Integer.parseInt(br.readLine())); - try { - settings.setFullscreen(br.readLine().toLowerCase(Locale.ENGLISH).contains("full")); - } catch(Exception e) { - settings.setFullscreen(false); - } - br.close(); - } catch(Exception e) { - settings.setWidth(1280); - settings.setHeight(720); - } - } else { - settings.setWidth(1280); - settings.setHeight(720); - settings.setFullscreen(false); - } - settings.setResizable(false); - } - settings.setSwapBuffers(true); - } else { - - logger.config("VR mode enabled."); - - // use basic mirroring window, skip settings window - settings.setWidth(xWin); - settings.setHeight(yWin); - settings.setBitsPerPixel(24); - settings.setFrameRate(0); // never sleep in main loop - settings.setFrequency(VRhardware.getDisplayFrequency()); - settings.setFullscreen(false); - settings.setVSync(false); // stop vsyncing on primary monitor! - settings.setSwapBuffers(!disableSwapBuffers || VRhardware instanceof OSVR); - settings.setTitle("Put Headset On Now: " + settings.getTitle()); - settings.setResizable(true); - } - - if( forceDisableMSAA ) { - logger.config("Disabling multisampling."); - // disable multisampling, which is more likely to break things than be useful - settings.setSamples(1); - } - - // set opengl mode - if( tryOpenGL3 ) { - logger.config("Using LWJGL OpenGL 3 renderer."); - settings.setRenderer(AppSettings.LWJGL_OPENGL3); - } else { - logger.config("Using LWJGL OpenGL 2 renderer."); - settings.setRenderer(AppSettings.LWJGL_OPENGL2); - } - - - setSettings(settings); - start(JmeContext.Type.Display, false); - - // disable annoying warnings about GUI stuff being updated, which is normal behavior - // for late GUI placement for VR purposes - Logger.getLogger("com.jme3").setLevel(Level.SEVERE); - } - - /** - * Starts the application in {@link com.jme3.system.JmeContext.Type#Display display} mode. - * @param waitFor if true, the method will wait until the application is started. - * @see #start(com.jme3.system.JmeContext.Type, boolean) - */ - public void start(boolean waitFor){ - start(JmeContext.Type.Display, waitFor); - } - - /** - * Starts the application. - * Creating a rendering context and executing the main loop in a separate thread. - * @param contextType the {@link com.jme3.system.JmeContext.Type type} of the context to create. - * @param waitFor if true, the method will wait until the application is started. - * @throws IllegalArgumentException if the context type is not supported. - */ - public void start(JmeContext.Type contextType, boolean waitFor){ - if (context != null && context.isCreated()){ - logger.warning("start() called when application already created!"); - return; - } - - if (settings == null){ - settings = new AppSettings(true); - } - - logger.log(Level.FINE, "Starting application: {0}", getClass().getName()); - - // Create VR decicated context - if (contextType == Type.Display){ - context = new LwjglDisplayVR(); - context.setSettings(settings); - } else if (contextType == Type.OffscreenSurface){ - context = new LwjglOffscreenBufferVR(); - context.setSettings(settings); - } else { - logger.severe("Unsupported context type \""+contextType+"\". Supported are \"Display\" and \"OffscreenSurface\""); - throw new IllegalArgumentException("Unsupported context type \""+contextType+"\". Supported are \"Display\" and \"OffscreenSurface\""); - } - - context.setSystemListener(this); - context.create(waitFor); - } - - /** - * Move filters from the main scene into the eye's. - * This removes filters from the main scene. - */ - public void moveScreenProcessingToVR() { - if( isInVR() ) { - viewmanager.moveScreenProcessingToEyes(); - } - } - - /** - * Set VR application {@link PreconfigParameter specific parameter}. - * If making changes to default values, this must be called before the VRApplication starts - * @param parm the parameter to set. - * @param value the value of the parameter. - */ - public void preconfigureVRApp(PreconfigParameter parm, boolean value) { - switch( parm ) { - case SET_GUI_OVERDRAW: - guiManager.setGuiOverdraw(value); - break; - case SET_GUI_CURVED_SURFACE: - guiManager.setCurvedSurface(value); - break; - case FORCE_VR_MODE: - forceVR = value; - break; - //case USE_CUSTOM_DISTORTION: //deprecated, always using a render manager - // VRViewManager._setCustomDistortion(value); - // break; - case USE_VR_COMPOSITOR: - useCompositor = value; - if( value == false ) disableSwapBuffers = false; - break; - case FLIP_EYES: - if( VRhardware == null ) return; - VRhardware.setFlipEyes(value); - break; - case INSTANCE_VR_RENDERING: - instanceVR = value; - break; - case ENABLE_MIRROR_WINDOW: - if( useCompositor == false ) { - disableSwapBuffers = false; - } else disableSwapBuffers = !value; - break; - case PREFER_OPENGL3: - tryOpenGL3 = value; - break; - case DISABLE_VR: - DISABLE_VR = value; - break; - case NO_GUI: - nogui = value; - break; - case SEATED_EXPERIENCE: - seated = value; - break; - case FORCE_DISABLE_MSAA: - forceDisableMSAA = value; - break; - } - } - - /** - * Can be used to change seated experience during runtime. - * @param isSeated true if designed for sitting, false for standing/roomscale - * @see #isSeatedExperience() - */ - public void setSeatedExperience(boolean isSeated) { - seated = isSeated; - if( VRhardware instanceof OpenVR ) { - if( VRhardware.getCompositor() == null ) return; - if( seated ) { - ((OpenVR)VRhardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated); - } else { - ((OpenVR)VRhardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding); - } - } - } - - /** - * Check if the application is configured as a seated experience. - * @return true if the application is configured as a seated experience and false otherwise. - * @see #setSeatedExperience(boolean) - */ - public boolean isSeatedExperience() { - return seated; - } - - /** - * Reset headset pose if seating experience. - */ - public void resetSeatedPose(){ - if( VRSupportedOS == false || isSeatedExperience() == false ) return; - VRhardware.reset(); - } - - /** - * Check if the rendering is instanced (see Geometry instancing). - * @return true if the rendering is instanced and false otherwise. - */ - public boolean isInstanceVRRendering() { - return instanceVR && isInVR(); - } - - /** - * Check if the VR mode is enabled. - * @return true if the VR mode is enabled and false otherwise. - */ - public boolean isInVR() { - return DISABLE_VR == false && (forceVR || VRSupportedOS && VRhardware != null && VRhardware.isInitialized()); - } - - - /** - * Get the GUI node from the application. - * @return the GUI node from the application. - * @see #setGuiNode(Node) - */ - public Node getGuiNode(){ - return guiNode; - } - - /** - * Set the GUI node that is displayed within the GUI viewport. - * Calling this method involve clearing all the scenes previously attached to the gui viewport. - * @param node the GUI node to attach. - * @see #getGuiNode() - */ - public void setGuiNode(Node node){ - if (node != null){ - if (guiViewPort != null){ - - enqueue(new Callable(){ - - @Override - public Object call() throws Exception { - guiViewPort.clearScenes(); - guiViewPort.attachScene(node); - guiNode = node; - return null; - } - - }); - - } else { - throw new IllegalArgumentException("GUI view port is not initialized."); - } - } - - } - - /** - * Get the root node of the application. - * @return the root node of the application. - */ - public Node getRootNode() { - return rootNode; - } - - /** - * Check if the application has a GUI overlay attached. - * @return true if the application has a GUI overlay attached and false otherwise. - */ - public boolean hasTraditionalGUIOverlay() { - return !nogui; - } - - - /** - * Get the scene observer. If no observer has been set, this method return the application {@link #getCamera() camera}. - * @return the scene observer. - * @see #setObserver(Spatial) - */ - public Object getObserver() { - if( observer == null ) { - return getCamera(); - } - return observer; - } - - /** - * Set the scene observer. The VR headset will be linked to it. If no observer is set, the VR headset is linked to the application {@link #getCamera() camera}. - * @param observer the scene observer. - */ - public void setObserver(Spatial observer) { - this.observer = observer; - } - - /* - where is the headset pointing, after all rotations are combined? - depends on observer rotation, if any - */ - private static Quaternion tempq = new Quaternion(); - - /** - * Get the observer final rotation within the scene. - * @return the observer final rotation within the scene. - * @see #getFinalObserverPosition() - */ - public Quaternion getFinalObserverRotation() { - if( viewmanager == null ) { - if( observer == null ) { - return getCamera().getRotation(); - } else return observer.getWorldRotation(); - } - if( observer == null ) { - tempq.set(dummyCam.getRotation()); - } else { - tempq.set(observer.getWorldRotation()); - } - return tempq.multLocal(VRhardware.getOrientation()); - } - - /** - * Get the observer final position within the scene. - * @return the observer position. - * @see #getFinalObserverRotation() - */ - public Vector3f getFinalObserverPosition() { - if( viewmanager == null ) { - if( observer == null ) { - return getCamera().getLocation(); - } else return observer.getWorldTranslation(); - } - Vector3f pos = VRhardware.getPosition(); - if( observer == null ) { - dummyCam.getRotation().mult(pos, pos); - return pos.addLocal(dummyCam.getLocation()); - } else { - observer.getWorldRotation().mult(pos, pos); - return pos.addLocal(observer.getWorldTranslation()); - } - } - - /** - * Set the VR headset height from the ground. - * @param amount the VR headset height from the ground. - * @see #getVRHeightAdjustment() - */ - public void setVRHeightAdjustment(float amount) { - if( viewmanager != null ) viewmanager.setHeightAdjustment(amount); - } - - /** - * Get the VR headset height from the ground. - * @return the VR headset height from the ground. - * @see #setVRHeightAdjustment(float) - */ - public float getVRHeightAdjustment() { - if( viewmanager != null ) return viewmanager.getHeightAdjustment(); - return 0f; - } - - /** - * Get the VR headset left viewport. - * @return the VR headset left viewport. - * @see #getRightViewPort() - */ - public ViewPort getLeftViewPort() { - if( viewmanager == null ) return getViewPort(); - return viewmanager.getLeftViewPort(); - } - - /** - * Get the VR headset right viewport. - * @return the VR headset right viewport. - * @see #getLeftViewPort() - */ - public ViewPort getRightViewPort() { - if( viewmanager == null ) return getViewPort(); - return viewmanager.getRightViewPort(); - } - - - /** - * Set the background color for both left and right view ports. - * @param clr the background color. - */ - public void setBackgroundColors(ColorRGBA clr) { - if( viewmanager == null ) { - getViewPort().setBackgroundColor(clr); - } else if( viewmanager.getLeftViewPort() != null ) { - viewmanager.getLeftViewPort().setBackgroundColor(clr); - if( viewmanager.getRightViewPort() != null ) viewmanager.getRightViewPort().setBackgroundColor(clr); - } - } - - - /** - * Runs tasks enqueued via {@link #enqueue(Callable)} - */ - protected void runQueuedTasks() { - AppTask task; - while( (task = taskQueue.poll()) != null ) { - if (!task.isCancelled()) { - task.invoke(); - } - } - } - - @Override - public void update() { - // Make sure the audio renderer is available to callables - AudioContext.setAudioRenderer(audioRenderer); - - runQueuedTasks(); - - if (speed != 0 && !paused) { - - timer.update(); - - if (inputEnabled){ - inputManager.update(timer.getTimePerFrame()); - } - - if (audioRenderer != null){ - audioRenderer.update(timer.getTimePerFrame()); - } - } - - if (speed == 0 || paused) { - try { - Thread.sleep(50); // throttle the CPU when paused - } catch (InterruptedException ex) { - Logger.getLogger(SimpleApplication.class.getName()).log(Level.SEVERE, null, ex); - } - return; - } - - float tpf = timer.getTimePerFrame() * speed; - - // update states - stateManager.update(tpf); - - // simple update and root node - simpleUpdate(tpf); - - - // render states - stateManager.render(renderManager); - - // update VR pose & cameras - if( viewmanager != null ) { - viewmanager.update(tpf); - } else if( observer != null ) { - getCamera().setFrame(observer.getWorldTranslation(), observer.getWorldRotation()); - } - - //FIXME: check if this code is necessary. - // Updates scene and gui states. - rootNode.updateLogicalState(tpf); - guiNode.updateLogicalState(tpf); - - rootNode.updateGeometricState(); - - if( isInVR() == false || guiManager.getPositioningMode() == VRGUIPositioningMode.MANUAL ) { - // only update geometric state here if GUI is in manual mode, or not in VR - // it will get updated automatically in the viewmanager update otherwise - guiNode.updateGeometricState(); - } - - renderManager.render(tpf, context.isRenderable()); - simpleRender(renderManager); - stateManager.postRender(); - - // update compositor? - if( viewmanager != null ) { - viewmanager.postRender(); - } - } - - private void initAssetManager(){ - URL assetCfgUrl = null; - - if (settings != null){ - String assetCfg = settings.getString("AssetConfigURL"); - if (assetCfg != null){ - try { - assetCfgUrl = new URL(assetCfg); - } catch (MalformedURLException ex) { - } - if (assetCfgUrl == null) { - assetCfgUrl = LegacyApplication.class.getClassLoader().getResource(assetCfg); - if (assetCfgUrl == null) { - logger.log(Level.SEVERE, "Unable to access AssetConfigURL in asset config:{0}", assetCfg); - return; - } - } - } - } - if (assetCfgUrl == null) { - assetCfgUrl = JmeSystem.getPlatformAssetConfigURL(); - } - if (assetManager == null){ - assetManager = JmeSystem.newAssetManager(assetCfgUrl); - logger.config("Created asset manager from "+assetCfgUrl); - } - } - - - private void initDisplay(){ - // aquire important objects - // from the context - settings = context.getSettings(); - - // Only reset the timer if a user has not already provided one - if (timer == null) { - timer = context.getTimer(); - } - - renderer = context.getRenderer(); - } - - private void initAudio(){ - if (settings.getAudioRenderer() != null && context.getType() != JmeContext.Type.Headless){ - audioRenderer = JmeSystem.newAudioRenderer(settings); - audioRenderer.initialize(); - AudioContext.setAudioRenderer(audioRenderer); - - listener = new Listener(); - audioRenderer.setListener(listener); - } - } - - /** - * Creates the camera to use for rendering. Default values are perspective - * projection with 45° field of view, with near and far values 1 and 1000 - * units respectively. - */ - private void initCamera(){ - cam = new Camera(settings.getWidth(), settings.getHeight()); - - cam.setFrustumPerspective(45f, (float)cam.getWidth() / cam.getHeight(), 1f, 1000f); - cam.setLocation(new Vector3f(0f, 0f, 10f)); - cam.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y); - - renderManager = new RenderManager(renderer); - //Remy - 09/14/2010 setted the timer in the renderManager - renderManager.setTimer(timer); - - viewPort = renderManager.createMainView("Default", cam); - viewPort.setClearFlags(true, true, true); - - // Create a new cam for the gui - Camera guiCam = new Camera(settings.getWidth(), settings.getHeight()); - guiViewPort = renderManager.createPostView("Gui Default", guiCam); - guiViewPort.setClearFlags(false, false, false); - } - - /** - * Initializes mouse and keyboard input. Also - * initializes joystick input if joysticks are enabled in the - * AppSettings. - */ - private void initInput(){ - mouseInput = context.getMouseInput(); - if (mouseInput != null) - mouseInput.initialize(); - - keyInput = context.getKeyInput(); - if (keyInput != null) - keyInput.initialize(); - - touchInput = context.getTouchInput(); - if (touchInput != null) - touchInput.initialize(); - - if (!settings.getBoolean("DisableJoysticks")){ - joyInput = context.getJoyInput(); - if (joyInput != null) - joyInput.initialize(); - } - - inputManager = new InputManager(mouseInput, keyInput, joyInput, touchInput); - } - - private void initStateManager(){ - stateManager = new AppStateManager(this); - - // Always register a ResetStatsState to make sure - // that the stats are cleared every frame - stateManager.attach(new ResetStatsState()); - } - - /** - * Do not call manually. - * Callback from ContextListener. - *

- * Initializes the Application, by creating a display and - * default camera. If display settings are not specified, a default - * 640x480 display is created. Default values are used for the camera; - * perspective projection with 45° field of view, with near - * and far values 1 and 1000 units respectively. - */ - private void initialize_internal(){ - if (assetManager == null){ - initAssetManager(); - } - - initDisplay(); - initCamera(); - - if (inputEnabled){ - initInput(); - } - initAudio(); - - // update timer so that the next delta is not too large -// timer.update(); - timer.reset(); - - // user code here.. - } - - @Override - public void initialize() { - - logger.config("Initialize VR application..."); - - initialize_internal(); - cam.setFrustumFar(fFar); - cam.setFrustumNear(fNear); - dummyCam = cam.clone(); - if( isInVR() ) { - - logger.config("VR mode enabled."); - - if( VRhardware != null ) { - VRhardware.initVRCompositor(compositorAllowed()); - } else { - logger.warning("No VR system found."); - } - - //FIXME: WARNING !! - viewmanager = new OpenVRViewManager(null); - viewmanager.setResolutionMultiplier(resMult); - inputManager.addMapping(RESET_HMD, new KeyTrigger(KeyInput.KEY_F9)); - setLostFocusBehavior(LostFocusBehavior.Disabled); - } else { - logger.config("VR mode disabled."); - viewPort.attachScene(rootNode); - guiViewPort.attachScene(guiNode); - } - - if( viewmanager != null ) { - viewmanager.initialize(); - } - - simpleInitApp(); - - // any filters created, move them now - if( viewmanager != null ) { - viewmanager.moveScreenProcessingToEyes(); - - // print out camera information - if( isInVR() ) { - logger.info("VR Initialization Information"); - if( viewmanager.getLeftCamera() != null ){ - logger.info("camLeft: " + viewmanager.getLeftCamera().toString()); - } - - if( viewmanager.getRightCamera() != null ){ - logger.info("camRight: " + viewmanager.getRightCamera().toString()); - } - } - } - } - - /** - * Initialize the application. This method has to be overridden by implementations. - */ - public abstract void simpleInitApp(); - - /** - * Destroy the application (release all resources). - */ - public void destroy() { - if( VRhardware != null ) { - VRhardware.destroy(); - VRhardware = null; - } - DISABLE_VR = true; - stateManager.cleanup(); - - destroyInput(); - if (audioRenderer != null) - audioRenderer.cleanup(); - - timer.reset(); - Runtime.getRuntime().exit(0); - } - - protected void destroyInput(){ - if (mouseInput != null) - mouseInput.destroy(); - - if (keyInput != null) - keyInput.destroy(); - - if (joyInput != null) - joyInput.destroy(); - - if (touchInput != null) - touchInput.destroy(); - - inputManager = null; - } - - @Override - public ViewPort getGuiViewPort() { - return guiViewPort; - } - - @Override - public ViewPort getViewPort() { - return viewPort; - } - - @Override - public Future enqueue(Callable callable) { - AppTask task = new AppTask(callable); - taskQueue.add(task); - return task; - } - - /** - * Enqueues a runnable object to execute in the jME3 - * rendering thread. - *

- * Runnables are executed right at the beginning of the main loop. - * They are executed even if the application is currently paused - * or out of focus. - * - * @param runnable The runnable to run in the main jME3 thread - */ - public void enqueue(Runnable runnable){ - enqueue(new RunnableWrapper(runnable)); - } - - private class RunnableWrapper implements Callable{ - private final Runnable runnable; - - public RunnableWrapper(Runnable runnable){ - this.runnable = runnable; - } - - @Override - public Object call(){ - runnable.run(); - return null; - } - - } - - /** - * Requests the context to close, shutting down the main loop - * and making necessary cleanup operations. - * - * Same as calling stop(false) - * - * @see #stop(boolean) - */ - @Override - public void stop(){ - stop(false); - } - - /** - * Requests the context to close, shutting down the main loop - * and making necessary cleanup operations. - * After the application has stopped, it cannot be used anymore. - */ - @Override - public void stop(boolean waitFor){ - logger.log(Level.FINE, "Closing application: {0}", getClass().getName()); - context.destroy(waitFor); - } - - /** - * Restarts the context, applying any changed settings. - *

- * Changes to the {@link AppSettings} of this Application are not - * applied immediately; calling this method forces the context - * to restart, applying the new settings. - */ - @Override - public void restart(){ - context.setSettings(settings); - context.restart(); - } - - /** - * Sets an AppProfiler hook that will be called back for - * specific steps within a single update frame. Value defaults - * to null. - */ - - public void setAppProfiler(AppProfiler prof) { - return; - } - - /** - * Returns the current AppProfiler hook, or null if none is set. - */ - public AppProfiler getAppProfiler() { - return null; - } -} \ No newline at end of file diff --git a/jme3-vr/src/main/java/com/jme3/app/VREnvironment.java b/jme3-vr/src/main/java/com/jme3/app/VREnvironment.java index aaf5249cac..c332025e2c 100644 --- a/jme3-vr/src/main/java/com/jme3/app/VREnvironment.java +++ b/jme3-vr/src/main/java/com/jme3/app/VREnvironment.java @@ -10,12 +10,12 @@ import com.jme3.input.vr.VRInputAPI; import com.jme3.input.vr.VRMouseManager; import com.jme3.input.vr.VRViewManager; +import com.jme3.input.vr.openvr.OpenVRViewManager; import com.jme3.input.vr.oculus.OculusMouseManager; import com.jme3.input.vr.oculus.OculusVR; import com.jme3.input.vr.oculus.OculusViewManager; import com.jme3.input.vr.openvr.OpenVR; import com.jme3.input.vr.openvr.OpenVRMouseManager; -import com.jme3.input.vr.openvr.OpenVRViewManager; import com.jme3.input.vr.osvr.OSVR; import com.jme3.input.vr.osvr.OSVRViewManager; import com.jme3.renderer.Camera; @@ -157,15 +157,9 @@ public VRMouseManager getVRMouseManager(){ public void setSeatedExperience(boolean isSeated) { seated = isSeated; if( hardware instanceof OpenVR ) { - if( hardware.getCompositor() == null ) { - return; + if( ((OpenVR)hardware).isInitialized() ) { + ((OpenVR)hardware).setTrackingSpace(seated); } - - if( seated ) { - ((OpenVR)hardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated); - } else { - ((OpenVR)hardware).getCompositor().SetTrackingSpace.apply(JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding); - } } } diff --git a/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVR.java b/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVR.java index 8e10ecd0a6..51b7f7c1d2 100644 --- a/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVR.java +++ b/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVR.java @@ -13,64 +13,46 @@ import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; -import com.jme3.system.jopenvr.HmdMatrix34_t; -import com.jme3.system.jopenvr.HmdMatrix44_t; -import com.jme3.system.jopenvr.JOpenVRLibrary; -import com.jme3.system.jopenvr.OpenVRUtil; -import com.jme3.system.jopenvr.TrackedDevicePose_t; -import com.jme3.system.jopenvr.VR_IVRCompositor_FnTable; -import com.jme3.system.jopenvr.VR_IVRSystem_FnTable; -import com.jme3.system.jopenvr.VR_IVRTrackedCamera_FnTable; import com.jme3.util.VRUtil; -import com.sun.jna.Memory; -import com.sun.jna.Pointer; -import com.sun.jna.ptr.FloatByReference; -import com.sun.jna.ptr.IntByReference; -import com.sun.jna.ptr.LongByReference; import java.nio.IntBuffer; import java.util.Locale; -import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; +import org.lwjgl.BufferUtils; +import org.lwjgl.openvr.HmdMatrix34; +import org.lwjgl.openvr.HmdMatrix44; +import org.lwjgl.openvr.TrackedDevicePose; +import org.lwjgl.openvr.VR; +import org.lwjgl.openvr.VRCompositor; +import org.lwjgl.openvr.VRSystem; /** * A class that wraps an OpenVR system. - * @author reden - phr00t - https://github.com/phr00t + * @author reden - phr00t * @author Julien Seinturier - COMEX SA - http://www.seinturier.fr */ public class OpenVR implements VRAPI { - private static final Logger logger = Logger.getLogger(OpenVR.class.getName()); + private static final Logger logger = Logger.getLogger(OpenVR.class.getName()); - private static VR_IVRCompositor_FnTable compositorFunctions; - private static VR_IVRSystem_FnTable vrsystemFunctions; - private static VR_IVRTrackedCamera_FnTable cameraFunctions; - private static boolean initSuccess = false; private static boolean flipEyes = false; private IntBuffer hmdDisplayFrequency; - private TrackedDevicePose_t.ByReference hmdTrackedDevicePoseReference; - protected TrackedDevicePose_t[] hmdTrackedDevicePoses; + private TrackedDevicePose.Buffer trackedDevicePose; + protected TrackedDevicePose[] hmdTrackedDevicePoses; - protected IntByReference hmdErrorStore; + protected IntBuffer hmdErrorStore = BufferUtils.createIntBuffer(1); private final Quaternion rotStore = new Quaternion(); private final Vector3f posStore = new Vector3f(); - private static FloatByReference tlastVsync; - - /** - * The actual frame count. - */ - public static LongByReference _tframeCount; - // for debugging latency private int frames = 0; protected Matrix4f[] poseMatrices; - + private final Matrix4f hmdPose = Matrix4f.IDENTITY.clone(); private Matrix4f hmdProjectionLeftEye; private Matrix4f hmdProjectionRightEye; @@ -102,17 +84,13 @@ public OpenVRInput getVRinput() { } @Override - public VR_IVRSystem_FnTable getVRSystem() { - return vrsystemFunctions; + public Object getVRSystem() { + throw new UnsupportedOperationException("Not yet implemented!"); } @Override - public VR_IVRCompositor_FnTable getCompositor() { - return compositorFunctions; - } - - public VR_IVRTrackedCamera_FnTable getTrackedCamera(){ - return cameraFunctions; + public Object getCompositor() { + throw new UnsupportedOperationException("Not yet implemented!"); } @Override @@ -145,56 +123,38 @@ public boolean initialize() { logger.config("Initializing OpenVR system..."); - hmdErrorStore = new IntByReference(); - vrsystemFunctions = null; - // Init the native linking to the OpenVR library. - try{ - JOpenVRLibrary.init(); - } catch(Throwable t){ - logger.log(Level.SEVERE, "Cannot link to OpenVR system library: "+t.getMessage(), t); - return false; - } - JOpenVRLibrary.VR_InitInternal(hmdErrorStore, JOpenVRLibrary.EVRApplicationType.EVRApplicationType_VRApplication_Scene); + int result = VR.VR_InitInternal(hmdErrorStore, VR.EVRApplicationType_VRApplication_Scene); - if( hmdErrorStore.getValue() == 0 ) { - vrsystemFunctions = new VR_IVRSystem_FnTable(JOpenVRLibrary.VR_GetGenericInterface(JOpenVRLibrary.IVRSystem_Version, hmdErrorStore).getPointer()); - } +// if( hmdErrorStore.get(0) == 0 ) { +// vrsystemFunctions = new VR_IVRSystem_FnTable(JOpenVRLibrary.VR_GetGenericInterface(JOpenVRLibrary.IVRSystem_Version, hmdErrorStore).getPointer()); +// } - if( vrsystemFunctions == null || hmdErrorStore.getValue() != 0 ) { - logger.severe("OpenVR Initialize Result: " + JOpenVRLibrary.VR_GetVRInitErrorAsEnglishDescription(hmdErrorStore.getValue()).getString(0)); + if(hmdErrorStore.get(0) != VR.EVRInitError_VRInitError_None) { + logger.severe("OpenVR Initialize Result: " + VR.VR_GetVRInitErrorAsEnglishDescription(hmdErrorStore.get(0))); logger.severe("Initializing OpenVR system [FAILED]"); return false; } else { logger.config("OpenVR initialized & VR connected."); - - vrsystemFunctions.setAutoSynch(false); - vrsystemFunctions.read(); - - - tlastVsync = new FloatByReference(); - _tframeCount = new LongByReference(); - - hmdDisplayFrequency = IntBuffer.allocate(1); - hmdDisplayFrequency.put( (int) JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_DisplayFrequency_Float); - hmdTrackedDevicePoseReference = new TrackedDevicePose_t.ByReference(); - hmdTrackedDevicePoses = (TrackedDevicePose_t[])hmdTrackedDevicePoseReference.toArray(JOpenVRLibrary.k_unMaxTrackedDeviceCount); - poseMatrices = new Matrix4f[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - for(int i=0;i 0){ + if(hmdErrorStore.get(0) == VR.EVRInitError_VRInitError_None){ + setTrackingSpace(environment.isSeatedExperience() ); + logger.config("OpenVR Compositor initialized"); + } else { + logger.severe("OpenVR Compositor error: " + hmdErrorStore.get(0)); + } } else { - vsyncToPhotons = 0f; + logger.log(Level.SEVERE, "Cannot get generic interface for \""+VR.IVRCompositor_Version+"\", "+VR.VR_GetVRInitErrorAsEnglishDescription(hmdErrorStore.get(0))+" ("+hmdErrorStore.get(0)+")"); } } - return compositorFunctions != null; + return true; } /** * Initialize the headset camera. * @param allowed true is the use of the headset camera is allowed and false otherwise. + * @return token for camera */ - public void initCamera(boolean allowed) { - hmdErrorStore.setValue(0); // clear the error store - - if( allowed && vrsystemFunctions != null ) { - IntByReference intptr = JOpenVRLibrary.VR_GetGenericInterface(JOpenVRLibrary.IVRTrackedCamera_Version, hmdErrorStore); - if (intptr != null){ - cameraFunctions = new VR_IVRTrackedCamera_FnTable(intptr.getPointer()); - if(cameraFunctions != null && hmdErrorStore.getValue() == 0 ){ - cameraFunctions.setAutoSynch(false); - cameraFunctions.read(); + public long initCamera(boolean allowed) { + hmdErrorStore.put(0, VR.EVRInitError_VRInitError_None); // clear the error store + if( allowed) { + + long result = VR.VR_GetGenericInterface(VR.IVRTrackedCamera_Version, hmdErrorStore); + if (result > 0){ + if(hmdErrorStore.get(0) == VR.EVRInitError_VRInitError_None ){ logger.config("OpenVR Camera initialized"); } - } + return result; + } else { + logger.severe("Failed to initialize camera"); + } } + return 0; } @Override public void destroy() { - JOpenVRLibrary.VR_ShutdownInternal(); + VR.VR_ShutdownInternal(); } @Override @@ -289,47 +224,24 @@ public boolean isInitialized() { @Override public void reset() { - if( vrsystemFunctions == null ) return; - vrsystemFunctions.ResetSeatedZeroPose.apply(); + VRSystem.VRSystem_ResetSeatedZeroPose(); hmdSeatToStand = null; } @Override public void getRenderSize(Vector2f store) { - if( vrsystemFunctions == null ) { - // 1344x1512 - store.x = 1344f; - store.y = 1512f; - } else { - IntByReference x = new IntByReference(); - IntByReference y = new IntByReference(); - vrsystemFunctions.GetRecommendedRenderTargetSize.apply(x, y); - store.x = x.getValue(); - store.y = y.getValue(); - } - } - /* - @Override - public float getFOV(int dir) { - float val = 0f; - if( vrsystemFunctions != null ) { - val = vrsystemFunctions.GetFloatTrackedDeviceProperty.apply(JOpenVRLibrary.k_unTrackedDeviceIndex_Hmd, dir, hmdErrorStore); - } - // verification of number - if( val == 0f ) { - return 55f; - } else if( val <= 10f ) { - // most likely a radian number - return val * 57.2957795f; - } - return val; + IntBuffer w = BufferUtils.createIntBuffer(1); + IntBuffer h = BufferUtils.createIntBuffer(1); + VRSystem.VRSystem_GetRecommendedRenderTargetSize(w, h); + logger.config("Recommended render width : " + w.get(0)); + logger.config("Recommended render height: " + h.get(0)); + store.x = w.get(0); + store.y = h.get(0); } - */ @Override public float getInterpupillaryDistance() { - if( vrsystemFunctions == null ) return 0.065f; - return vrsystemFunctions.GetFloatTrackedDeviceProperty.apply(JOpenVRLibrary.k_unTrackedDeviceIndex_Hmd, JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_UserIpdMeters_Float, hmdErrorStore); + throw new UnsupportedOperationException("Not yet implemented!"); } @Override @@ -357,73 +269,20 @@ public void getPositionAndOrientation(Vector3f storePos, Quaternion storeRot) { @Override public void updatePose(){ - if(vrsystemFunctions == null) return; - if(compositorFunctions != null) { - compositorFunctions.WaitGetPoses.apply(hmdTrackedDevicePoseReference, JOpenVRLibrary.k_unMaxTrackedDeviceCount, null, 0); - } else { - // wait - if( latencyWaitTime > 0 ) VRUtil.sleepNanos(latencyWaitTime); - - vrsystemFunctions.GetTimeSinceLastVsync.apply(tlastVsync, _tframeCount); - float fSecondsUntilPhotons = (float)timePerFrame - tlastVsync.getValue() + vsyncToPhotons; - - if( enableDebugLatency ) { - if( frames == 10 ) { - System.out.println("Waited (nanos): " + Long.toString(latencyWaitTime)); - System.out.println("Predict ahead time: " + Float.toString(fSecondsUntilPhotons)); - } - frames = (frames + 1) % 60; - } - - // handle skipping frame stuff - long nowCount = _tframeCount.getValue(); - if( nowCount - frameCount > 1 ) { - // skipped a frame! - if( enableDebugLatency ) System.out.println("Frame skipped!"); - frameCountRun = 0; - if( latencyWaitTime > 0 ) { - latencyWaitTime -= TimeUnit.MILLISECONDS.toNanos(1); - if( latencyWaitTime < 0 ) latencyWaitTime = 0; - } - } else if( latencyWaitTime < timePerFrame * 1000000000.0 ) { - // didn't skip a frame, lets try waiting longer to improve latency - frameCountRun++; - latencyWaitTime += Math.round(Math.pow(frameCountRun / 10.0, 2.0)); - } - - frameCount = nowCount; - - vrsystemFunctions.GetDeviceToAbsoluteTrackingPose.apply( - environment.isSeatedExperience()?JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseSeated: - JOpenVRLibrary.ETrackingUniverseOrigin.ETrackingUniverseOrigin_TrackingUniverseStanding, - fSecondsUntilPhotons, hmdTrackedDevicePoseReference, JOpenVRLibrary.k_unMaxTrackedDeviceCount); - } - - // deal with controllers being plugged in and out - // causing an invalid memory crash... skipping for now - /*boolean hasEvent = false; - while( JOpenVRLibrary.VR_IVRSystem_PollNextEvent(OpenVR.getVRSystemInstance(), tempEvent) != 0 ) { - // wait until the events are clear.. - hasEvent = true; - } - if( hasEvent ) { - // an event probably changed controller state - VRInput._updateConnectedControllers(); - }*/ - //update controllers pose information + int result = VRCompositor.nVRCompositor_WaitGetPoses(trackedDevicePose.address(), trackedDevicePose.remaining(), 0, 0); + // NPE when calling without a gamePoseArray. Issue filed with lwjgl #418 +// int result = VRCompositor.VRCompositor_WaitGetPoses(trackedDevicePose, null); environment.getVRinput().updateControllerStates(); // read pose data from native - for (int nDevice = 0; nDevice < JOpenVRLibrary.k_unMaxTrackedDeviceCount; ++nDevice ){ - hmdTrackedDevicePoses[nDevice].readField("bPoseIsValid"); - if( hmdTrackedDevicePoses[nDevice].bPoseIsValid != 0 ){ - hmdTrackedDevicePoses[nDevice].readField("mDeviceToAbsoluteTracking"); - VRUtil.convertSteamVRMatrix3ToMatrix4f(hmdTrackedDevicePoses[nDevice].mDeviceToAbsoluteTracking, poseMatrices[nDevice]); + for (int nDevice = 0; nDevice < VR.k_unMaxTrackedDeviceCount; ++nDevice ){ + if( hmdTrackedDevicePoses[nDevice].bPoseIsValid() ){ + VRUtil.convertSteamVRMatrix3ToMatrix4f(hmdTrackedDevicePoses[nDevice].mDeviceToAbsoluteTracking(), poseMatrices[nDevice]); } } - if ( hmdTrackedDevicePoses[JOpenVRLibrary.k_unTrackedDeviceIndex_Hmd].bPoseIsValid != 0 ){ - hmdPose.set(poseMatrices[JOpenVRLibrary.k_unTrackedDeviceIndex_Hmd]); + if ( hmdTrackedDevicePoses[VR.k_unTrackedDeviceIndex_Hmd].bPoseIsValid()){ + hmdPose.set(poseMatrices[VR.k_unTrackedDeviceIndex_Hmd]); } else { hmdPose.set(Matrix4f.IDENTITY); } @@ -433,10 +292,9 @@ public void updatePose(){ public Matrix4f getHMDMatrixProjectionLeftEye(Camera cam){ if( hmdProjectionLeftEye != null ) { return hmdProjectionLeftEye; - } else if(vrsystemFunctions == null){ - return cam.getProjectionMatrix(); } else { - HmdMatrix44_t mat = vrsystemFunctions.GetProjectionMatrix.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, cam.getFrustumNear(), cam.getFrustumFar()); + HmdMatrix44 mat = HmdMatrix44.create(); + mat = VRSystem.VRSystem_GetProjectionMatrix(VR.EVREye_Eye_Left, cam.getFrustumNear(), cam.getFrustumFar(), mat); hmdProjectionLeftEye = new Matrix4f(); VRUtil.convertSteamVRMatrix4ToMatrix4f(mat, hmdProjectionLeftEye); return hmdProjectionLeftEye; @@ -447,10 +305,9 @@ public Matrix4f getHMDMatrixProjectionLeftEye(Camera cam){ public Matrix4f getHMDMatrixProjectionRightEye(Camera cam){ if( hmdProjectionRightEye != null ) { return hmdProjectionRightEye; - } else if(vrsystemFunctions == null){ - return cam.getProjectionMatrix(); } else { - HmdMatrix44_t mat = vrsystemFunctions.GetProjectionMatrix.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, cam.getFrustumNear(), cam.getFrustumFar()); + HmdMatrix44 mat = HmdMatrix44.create(); + mat = VRSystem.VRSystem_GetProjectionMatrix(VR.EVREye_Eye_Right, cam.getFrustumNear(), cam.getFrustumFar(), mat); hmdProjectionRightEye = new Matrix4f(); VRUtil.convertSteamVRMatrix4ToMatrix4f(mat, hmdProjectionRightEye); return hmdProjectionRightEye; @@ -488,7 +345,9 @@ public Vector3f getSeatedToAbsolutePosition() { if( environment.isSeatedExperience() == false ) return Vector3f.ZERO; if( hmdSeatToStand == null ) { hmdSeatToStand = new Vector3f(); - HmdMatrix34_t mat = vrsystemFunctions.GetSeatedZeroPoseToStandingAbsoluteTrackingPose.apply(); + + HmdMatrix34 mat = HmdMatrix34.create(); + VRSystem.VRSystem_GetSeatedZeroPoseToStandingAbsoluteTrackingPose(mat); Matrix4f tempmat = new Matrix4f(); VRUtil.convertSteamVRMatrix3ToMatrix4f(mat, tempmat); tempmat.toTranslationVector(hmdSeatToStand); @@ -500,29 +359,38 @@ public Vector3f getSeatedToAbsolutePosition() { public Matrix4f getHMDMatrixPoseLeftEye(){ if( hmdPoseLeftEye != null ) { return hmdPoseLeftEye; - } else if(vrsystemFunctions == null) { - return Matrix4f.IDENTITY; } else { - HmdMatrix34_t mat = vrsystemFunctions.GetEyeToHeadTransform.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left); + HmdMatrix34 mat = HmdMatrix34.create(); + VRSystem.VRSystem_GetEyeToHeadTransform(VR.EVREye_Eye_Left, mat); hmdPoseLeftEye = new Matrix4f(); return VRUtil.convertSteamVRMatrix3ToMatrix4f(mat, hmdPoseLeftEye); } } + + @Override + public Matrix4f getHMDMatrixPoseRightEye(){ + if( hmdPoseRightEye != null ) { + return hmdPoseRightEye; + } else { + HmdMatrix34 mat = HmdMatrix34.create(); + VRSystem.VRSystem_GetEyeToHeadTransform(VR.EVREye_Eye_Right, mat); + hmdPoseRightEye = new Matrix4f(); + return VRUtil.convertSteamVRMatrix3ToMatrix4f(mat, hmdPoseRightEye); + } + } + @Override public HmdType getType() { - if( vrsystemFunctions != null ) { - Pointer str1 = new Memory(128); - Pointer str2 = new Memory(128); String completeName = ""; - vrsystemFunctions.GetStringTrackedDeviceProperty.apply(JOpenVRLibrary.k_unTrackedDeviceIndex_Hmd, - JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_ManufacturerName_String, - str1, 128, hmdErrorStore); - if( hmdErrorStore.getValue() == 0 ) completeName += str1.getString(0); - vrsystemFunctions.GetStringTrackedDeviceProperty.apply(JOpenVRLibrary.k_unTrackedDeviceIndex_Hmd, - JOpenVRLibrary.ETrackedDeviceProperty.ETrackedDeviceProperty_Prop_ModelNumber_String, - str2, 128, hmdErrorStore); - if( hmdErrorStore.getValue() == 0 ) completeName += " " + str2.getString(0); + String name = VRSystem.VRSystem_GetStringTrackedDeviceProperty(VR.k_unTrackedDeviceIndex_Hmd, + VR.ETrackedDeviceProperty_Prop_ManufacturerName_String, + 128, hmdErrorStore); + if( hmdErrorStore.get(0) == 0 ) completeName += name; + String number = VRSystem.VRSystem_GetStringTrackedDeviceProperty(VR.k_unTrackedDeviceIndex_Hmd, + VR.ETrackedDeviceProperty_Prop_ModelNumber_String, + 128, hmdErrorStore); + if( hmdErrorStore.get(0) == 0 ) completeName += " " + number; if( completeName.length() > 0 ) { completeName = completeName.toLowerCase(Locale.ENGLISH).trim(); if( completeName.contains("htc") || completeName.contains("vive") ) { @@ -545,22 +413,21 @@ public HmdType getType() { } else if( completeName.contains("null") ) { return HmdType.NULL; } - } - } else return HmdType.NONE; + } return HmdType.OTHER; } - @Override - public Matrix4f getHMDMatrixPoseRightEye(){ - if( hmdPoseRightEye != null ) { - return hmdPoseRightEye; - } else if(vrsystemFunctions == null) { - return Matrix4f.IDENTITY; + public void setTrackingSpace(boolean isSeated){ + if( isSeated) { + VRCompositor.VRCompositor_SetTrackingSpace(VR.ETrackingUniverseOrigin_TrackingUniverseSeated); } else { - HmdMatrix34_t mat = vrsystemFunctions.GetEyeToHeadTransform.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right); - hmdPoseRightEye = new Matrix4f(); - return VRUtil.convertSteamVRMatrix3ToMatrix4f(mat, hmdPoseRightEye); + VRCompositor.VRCompositor_SetTrackingSpace(VR.ETrackingUniverseOrigin_TrackingUniverseStanding); } } - + + + public Matrix4f[] getPoseMatrices() { + return poseMatrices; + } + } diff --git a/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRBounds.java b/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRBounds.java index b487a5f22f..95613e3902 100644 --- a/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRBounds.java +++ b/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRBounds.java @@ -1,10 +1,10 @@ package com.jme3.input.vr.openvr; +import com.jme3.input.vr.VRAPI; import com.jme3.input.vr.VRBounds; import com.jme3.math.Vector2f; -import com.jme3.system.jopenvr.JOpenVRLibrary; -import com.jme3.system.jopenvr.VR_IVRChaperone_FnTable; -import com.sun.jna.ptr.FloatByReference; +import com.jme3.util.BufferUtils; +import java.nio.FloatBuffer; import java.util.logging.Logger; @@ -17,33 +17,27 @@ public class OpenVRBounds implements VRBounds { private static Logger logger = Logger.getLogger(OpenVRBounds.class.getName()); - private VR_IVRChaperone_FnTable vrChaperone; private Vector2f playSize; + private boolean setup = false; /** * Initialize the VR bounds. * @return true if the initialization is a success and false otherwise. */ - public boolean init(OpenVR api) { + public boolean init(VRAPI api) { logger.config("Initialize VR bounds..."); - if( vrChaperone == null ) { - vrChaperone = new VR_IVRChaperone_FnTable(JOpenVRLibrary.VR_GetGenericInterface(JOpenVRLibrary.IVRChaperone_Version, api.hmdErrorStore).getPointer()); - if( vrChaperone != null ) { - vrChaperone.setAutoSynch(false); - vrChaperone.read(); - FloatByReference fbX = new FloatByReference(); - FloatByReference fbZ = new FloatByReference(); - vrChaperone.GetPlayAreaSize.apply(fbX, fbZ); - playSize = new Vector2f(fbX.getValue(), fbZ.getValue()); - - logger.config("Initialize VR bounds [SUCCESS]"); - return true; // init success - } - - logger.warning("Initialize VR bounds [FAILED]."); - return false; // failed to init + if( !setup ) { +// vrChaperone = new VR_IVRChaperone_FnTable(JOpenVRLibrary.VR_GetGenericInterface(JOpenVRLibrary.IVRChaperone_Version, api.hmdErrorStore).getPointer()); + FloatBuffer fbX = BufferUtils.createFloatBuffer(1); + FloatBuffer fbZ = BufferUtils.createFloatBuffer(1); + org.lwjgl.openvr.VRChaperone.VRChaperone_GetPlayAreaSize(fbX, fbZ); + + playSize = new Vector2f(fbX.get(0), fbZ.get(0)); + setup = true; + logger.config("Initialize VR bounds [SUCCESS]"); + return true; // init success } logger.config("Initialize VR bounds already done."); diff --git a/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRInput.java b/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRInput.java index 08c697fa2e..dcd8a474f3 100644 --- a/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRInput.java +++ b/jme3-vr/src/main/java/com/jme3/input/vr/openvr/OpenVRInput.java @@ -7,23 +7,25 @@ import java.util.ArrayList; import java.util.List; -import java.util.logging.Level; import java.util.logging.Logger; import com.jme3.app.VREnvironment; import com.jme3.input.vr.VRInputAPI; import com.jme3.input.vr.VRInputType; import com.jme3.input.vr.VRTrackedController; +import com.jme3.input.vr.VRViewManager; import com.jme3.math.Quaternion; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; import com.jme3.scene.Spatial; -import com.jme3.system.jopenvr.JOpenVRLibrary; -import com.jme3.system.jopenvr.OpenVRUtil; -import com.jme3.system.jopenvr.VRControllerState_t; -import com.jme3.system.jopenvr.VR_IVRSystem_FnTable; import com.jme3.util.VRUtil; +import java.nio.IntBuffer; +import org.lwjgl.BufferUtils; +import org.lwjgl.openvr.HmdVector3; +import org.lwjgl.openvr.VR; +import org.lwjgl.openvr.VRControllerState; +import org.lwjgl.openvr.VRSystem; /* make helper functions to pull the following easily from raw data (DONE) @@ -60,92 +62,97 @@ make helper functions to pull the following easily from raw data (DONE) Controller#1, Axis#4 X: 0.0, Y: 0.0 Button press: 2, touch: 2 -*/ - + */ /** - * A class that wraps an OpenVR input.
- * null values will be returned if no valid pose exists, or that input device isn't available - * user code should check for null values. - * @author reden - phr00t - https://github.com/phr00t - * @author Julien Seinturier - COMEX SA - http://www.seinturier.fr + * A class that wraps an + * OpenVR + * input.
+ * null values will be returned if no valid pose exists, or that + * input device isn't available user code should check for null + * values. + * + * @author reden - phr00t + * @author Julien Seinturier - COMEX SA - + * http://www.seinturier.fr */ public class OpenVRInput implements VRInputAPI { - - private static final Logger logger = Logger.getLogger(OpenVRInput.class.getName()); - - private final VRControllerState_t[] cStates = new VRControllerState_t[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - - private final Quaternion[] rotStore = new Quaternion[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - - private final Vector3f[] posStore = new Vector3f[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - - private static final int[] controllerIndex = new int[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - + + private static final Logger logger = Logger.getLogger(OpenVRInput.class.getName()); + + private final VRControllerState[] cStates = new VRControllerState[VR.k_unMaxTrackedDeviceCount]; + + private final Quaternion[] rotStore = new Quaternion[VR.k_unMaxTrackedDeviceCount]; + + private final Vector3f[] posStore = new Vector3f[VR.k_unMaxTrackedDeviceCount]; + + private static final int[] controllerIndex = new int[VR.k_unMaxTrackedDeviceCount]; + private int controllerCount = 0; - + private final Vector2f tempAxis = new Vector2f(), temp2Axis = new Vector2f(); - - private final Vector2f lastCallAxis[] = new Vector2f[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - - private final boolean needsNewVelocity[] = new boolean[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - - private final boolean needsNewAngVelocity[] = new boolean[JOpenVRLibrary.k_unMaxTrackedDeviceCount]; - - private final boolean buttonDown[][] = new boolean[JOpenVRLibrary.k_unMaxTrackedDeviceCount][16]; - + + private final Vector2f lastCallAxis[] = new Vector2f[VR.k_unMaxTrackedDeviceCount]; + + private final boolean buttonDown[][] = new boolean[VR.k_unMaxTrackedDeviceCount][16]; + private float axisMultiplier = 1f; - + private final Vector3f tempVel = new Vector3f(); - + private final Quaternion tempq = new Quaternion(); - private VREnvironment environment; + private final VREnvironment environment; private List trackedControllers = null; - + /** - * Create a new OpenVR input attached to the given VR environment. + * Create a new + * OpenVR + * input attached to the given VR environment. + * * @param environment the VR environment to which the input is attached. */ - public OpenVRInput(VREnvironment environment){ - this.environment = environment; + public OpenVRInput(VREnvironment environment) { + this.environment = environment; } - + @Override public float getAxisMultiplier() { return axisMultiplier; } - + @Override public void setAxisMultiplier(float set) { axisMultiplier = set; } - + @Override public void swapHands() { - if( controllerCount != 2 ) return; + if (controllerCount != 2) { + return; + } int temp = controllerIndex[0]; controllerIndex[0] = controllerIndex[1]; controllerIndex[1] = temp; } - + @Override public boolean isButtonDown(int controllerIndex, VRInputType checkButton) { - VRControllerState_t cs = cStates[OpenVRInput.controllerIndex[controllerIndex]]; - switch( checkButton ) { + VRControllerState cs = cStates[OpenVRInput.controllerIndex[controllerIndex]]; + switch (checkButton) { default: return false; case ViveGripButton: - return (cs.ulButtonPressed & 4) != 0; + return (cs.ulButtonPressed() & 4) != 0; case ViveMenuButton: - return (cs.ulButtonPressed & 2) != 0; + return (cs.ulButtonPressed() & 2) != 0; case ViveTrackpadAxis: - return (cs.ulButtonPressed & 4294967296l) != 0; + return (cs.ulButtonPressed() & 4294967296l) != 0; case ViveTriggerAxis: - return (cs.ulButtonPressed & 8589934592l) != 0; + return (cs.ulButtonPressed() & 8589934592l) != 0; } } - + @Override public boolean wasButtonPressedSinceLastCall(int controllerIndex, VRInputType checkButton) { boolean buttonDownNow = isButtonDown(controllerIndex, checkButton); @@ -155,27 +162,27 @@ public boolean wasButtonPressedSinceLastCall(int controllerIndex, VRInputType ch buttonDown[cIndex][checkButtonValue] = buttonDownNow; return retval; } - + @Override public void resetInputSinceLastCall() { - for(int i=0;i 0) && (index < trackedControllers.size())){ - return trackedControllers.get(index); - } - } - - return null; + public VRTrackedController getTrackedController(int index) { + if (trackedControllers != null) { + if ((trackedControllers.size() > 0) && (index < trackedControllers.size())) { + return trackedControllers.get(index); + } + } + + return null; } - + @Override public int getTrackedControllerCount() { return controllerCount; } - + @Override - public VRControllerState_t getRawControllerState(int index) { - if( isInputDeviceTracking(index) == false ) return null; + public VRControllerState getRawControllerState(int index) { + if (isInputDeviceTracking(index) == false) { + return null; + } return cStates[controllerIndex[index]]; } - - //public Matrix4f getPoseForInputDevice(int index) { - // if( isInputDeviceTracking(index) == false ) return null; - // return OpenVR.poseMatrices[controllerIndex[index]]; - //} - + @Override public boolean isInputFocused() { - if (environment != null){ - return ((VR_IVRSystem_FnTable)environment.getVRHardware().getVRSystem()).IsInputFocusCapturedByAnotherProcess.apply() == 0; + // not a 100% match, but the closest i can find in lwjgl. Doc seems to confirm this too. + return VRSystem.VRSystem_IsInputAvailable(); + //return ((VR_IVRSystem_FnTable)environment.getVRHardware().getVRSystem()).IsInputFocusCapturedByAnotherProcess.apply() == 0; } else { throw new IllegalStateException("VR input is not attached to a VR environment."); } } - + @Override public boolean isInputDeviceTracking(int index) { - if( index < 0 || index >= controllerCount ){ - return false; + if (index < 0 || index >= controllerCount) { + return false; + } + + if (environment != null) { + + if (environment.getVRHardware() instanceof OpenVR) { + return ((OpenVR) environment.getVRHardware()).hmdTrackedDevicePoses[controllerIndex[index]].bPoseIsValid(); + } else { + throw new IllegalStateException("VR hardware " + environment.getVRHardware().getClass().getSimpleName() + " is not a subclass of " + OpenVR.class.getSimpleName()); + } + } else { + throw new IllegalStateException("VR input is not attached to a VR environment."); } - - if (environment != null){ - - if (environment.getVRHardware() instanceof OpenVR){ - return ((OpenVR)environment.getVRHardware()).hmdTrackedDevicePoses[controllerIndex[index]].bPoseIsValid != 0; - } else { - throw new IllegalStateException("VR hardware "+environment.getVRHardware().getClass().getSimpleName()+" is not a subclass of "+OpenVR.class.getSimpleName()); - } - } else { - throw new IllegalStateException("VR input is not attached to a VR environment."); - } } - + @Override public Quaternion getOrientation(int index) { - if( isInputDeviceTracking(index) == false ){ - return null; + if (isInputDeviceTracking(index) == false) { + return null; + } + + if (environment != null) { + + if (environment.getVRHardware() instanceof OpenVR) { + index = controllerIndex[index]; + VRUtil.convertMatrix4toQuat(((OpenVR) environment.getVRHardware()).poseMatrices[index], rotStore[index]); + return rotStore[index]; + } else { + throw new IllegalStateException("VR hardware " + environment.getVRHardware().getClass().getSimpleName() + " is not a subclass of " + OpenVR.class.getSimpleName()); + } + } else { + throw new IllegalStateException("VR input is not attached to a VR environment."); } - - if (environment != null){ - - if (environment.getVRHardware() instanceof OpenVR){ - index = controllerIndex[index]; - VRUtil.convertMatrix4toQuat(((OpenVR)environment.getVRHardware()).poseMatrices[index], rotStore[index]); - return rotStore[index]; - } else { - throw new IllegalStateException("VR hardware "+environment.getVRHardware().getClass().getSimpleName()+" is not a subclass of "+OpenVR.class.getSimpleName()); - } - } else { - throw new IllegalStateException("VR input is not attached to a VR environment."); - } } @Override public Vector3f getPosition(int index) { - if( isInputDeviceTracking(index) == false ){ - return null; + if (isInputDeviceTracking(index) == false) { + return null; + } + + if (environment != null) { + + if (environment.getVRHardware() instanceof OpenVR) { + // the hmdPose comes in rotated funny, fix that here + index = controllerIndex[index]; + ((OpenVR) environment.getVRHardware()).poseMatrices[index].toTranslationVector(posStore[index]); + posStore[index].x = -posStore[index].x; + posStore[index].z = -posStore[index].z; + return posStore[index]; + } else { + throw new IllegalStateException("VR hardware " + environment.getVRHardware().getClass().getSimpleName() + " is not a subclass of " + OpenVR.class.getSimpleName()); + } + } else { + throw new IllegalStateException("VR input is not attached to a VR environment."); } - - if (environment != null){ - - if (environment.getVRHardware() instanceof OpenVR){ - // the hmdPose comes in rotated funny, fix that here - index = controllerIndex[index]; - ((OpenVR)environment.getVRHardware()).poseMatrices[index].toTranslationVector(posStore[index]); - posStore[index].x = -posStore[index].x; - posStore[index].z = -posStore[index].z; - return posStore[index]; - } else { - throw new IllegalStateException("VR hardware "+environment.getVRHardware().getClass().getSimpleName()+" is not a subclass of "+OpenVR.class.getSimpleName()); - } - } else { - throw new IllegalStateException("VR input is not attached to a VR environment."); - } - } - + @Override public Quaternion getFinalObserverRotation(int index) { - - if (environment != null){ - OpenVRViewManager vrvm = (OpenVRViewManager)environment.getVRViewManager(); - - if (vrvm != null){ - if(isInputDeviceTracking(index) == false ){ - return null; + + if (environment != null) { + VRViewManager vrvm = environment.getVRViewManager(); + + if (vrvm != null) { + if (isInputDeviceTracking(index) == false) { + return null; } - + Object obs = environment.getObserver(); - if( obs instanceof Camera ) { - tempq.set(((Camera)obs).getRotation()); + if (obs instanceof Camera) { + tempq.set(((Camera) obs).getRotation()); } else { - tempq.set(((Spatial)obs).getWorldRotation()); + tempq.set(((Spatial) obs).getWorldRotation()); } - + return tempq.multLocal(getOrientation(index)); } else { - throw new IllegalStateException("VR environment has no valid view manager."); + throw new IllegalStateException("VR environment has no valid view manager."); } - - } else { - throw new IllegalStateException("VR input is not attached to a VR environment."); - } + } else { + throw new IllegalStateException("VR input is not attached to a VR environment."); + } } - - @Override + + @Override public Vector3f getFinalObserverPosition(int index) { - - if (environment != null){ - OpenVRViewManager vrvm = (OpenVRViewManager)environment.getVRViewManager(); - - if (vrvm != null){ - if(isInputDeviceTracking(index) == false ){ - return null; + + if (environment != null) { + VRViewManager vrvm = (VRViewManager) environment.getVRViewManager(); + + if (vrvm != null) { + if (isInputDeviceTracking(index) == false) { + return null; } Object obs = environment.getObserver(); Vector3f pos = getPosition(index); - if( obs instanceof Camera ) { - ((Camera)obs).getRotation().mult(pos, pos); - return pos.addLocal(((Camera)obs).getLocation()); + if (obs instanceof Camera) { + ((Camera) obs).getRotation().mult(pos, pos); + return pos.addLocal(((Camera) obs).getLocation()); } else { - ((Spatial)obs).getWorldRotation().mult(pos, pos); - return pos.addLocal(((Spatial)obs).getWorldTranslation()); + ((Spatial) obs).getWorldRotation().mult(pos, pos); + return pos.addLocal(((Spatial) obs).getWorldTranslation()); } } else { - throw new IllegalStateException("VR environment has no valid view manager."); + throw new IllegalStateException("VR environment has no valid view manager."); } - - } else { - throw new IllegalStateException("VR input is not attached to a VR environment."); - } - } - + + } else { + throw new IllegalStateException("VR input is not attached to a VR environment."); + } + } + @Override public void triggerHapticPulse(int controllerIndex, float seconds) { - if( environment.isInVR() == false || isInputDeviceTracking(controllerIndex) == false ){ - return; + if (environment.isInVR() == false || isInputDeviceTracking(controllerIndex) == false) { + return; } - + // apparently only axis ID of 0 works - ((VR_IVRSystem_FnTable)environment.getVRHardware().getVRSystem()).TriggerHapticPulse.apply(OpenVRInput.controllerIndex[controllerIndex], - 0, (short)Math.round(3f * seconds / 1e-3f)); + VRSystem.VRSystem_TriggerHapticPulse(OpenVRInput.controllerIndex[controllerIndex], + 0, (short) Math.round(3f * seconds / 1e-3f)); } - + @Override public void updateConnectedControllers() { - logger.config("Updating connected controllers."); - - if (environment != null){ - controllerCount = 0; - for(int i=0;i(JOpenVRLibrary.k_unMaxTrackedDeviceCount); - } - trackedControllers.add(new OpenVRTrackedController(i, this, controllerName, manufacturerName, environment)); - - // Send a Haptic pulse to the controller - triggerHapticPulse(controllerCount, 1.0f); - - controllerCount++; - logger.config(" Tracked controller "+(i+1)+"/"+JOpenVRLibrary.k_unMaxTrackedDeviceCount+" "+controllerName+" ("+manufacturerName+") attached."); - } else { - logger.config(" Controller "+(i+1)+"/"+JOpenVRLibrary.k_unMaxTrackedDeviceCount+" ignored."); - } - } - } else { - throw new IllegalStateException("VR input is not attached to a VR environment."); - } + logger.config("Updating connected controllers."); + + if (environment != null) { + controllerCount = 0; + for (int i = 0; i < VR.k_unMaxTrackedDeviceCount; i++) { + int classCallback = VRSystem.VRSystem_GetTrackedDeviceClass(i); + if (classCallback == VR.ETrackedDeviceClass_TrackedDeviceClass_Controller || classCallback == VR.ETrackedDeviceClass_TrackedDeviceClass_GenericTracker) { + IntBuffer error = BufferUtils.createIntBuffer(1); + String controllerName = "Unknown"; + String manufacturerName = "Unknown"; + controllerName = VRSystem.VRSystem_GetStringTrackedDeviceProperty(i, VR.ETrackedDeviceProperty_Prop_TrackingSystemName_String, error); + manufacturerName = VRSystem.VRSystem_GetStringTrackedDeviceProperty(i, VR.ETrackedDeviceProperty_Prop_ManufacturerName_String, error); + + if (error.get(0) != 0) { + logger.warning("Error getting controller information " + controllerName + " " + manufacturerName + "Code (" + error.get(0) + ")"); + } + controllerIndex[controllerCount] = i; + + // Adding tracked controller to control. + if (trackedControllers == null) { + trackedControllers = new ArrayList(VR.k_unMaxTrackedDeviceCount); + } + trackedControllers.add(new OpenVRTrackedController(i, this, controllerName, manufacturerName, environment)); + + // Send a Haptic pulse to the controller + triggerHapticPulse(controllerCount, 1.0f); + + controllerCount++; + logger.config(" Tracked controller " + (i + 1) + "/" + VR.k_unMaxTrackedDeviceCount + " " + controllerName + " (" + manufacturerName + ") attached."); + } else { + logger.config(" Controller " + (i + 1) + "/" + VR.k_unMaxTrackedDeviceCount + " ignored."); + } + } + } else { + throw new IllegalStateException("VR input is not attached to a VR environment."); + } } @Override public void updateControllerStates() { - - if (environment != null){ - for(int i=0;ihttp://www.seinturier.fr + * A VR view manager based on OpenVR. This class enable to submit 3D views to + * the VR compositor. + * + * @author reden - phr00t + * @author Julien Seinturier - COMEX SA - + * http://www.seinturier.fr */ public class OpenVRViewManager extends AbstractVRViewManager { - private static final Logger logger = Logger.getLogger(OpenVRViewManager.class.getName()); + private static final Logger logger = Logger.getLogger(OpenVRViewManager.class.getName()); // OpenVR values - private VRTextureBounds_t leftTextureBounds; - private Texture_t leftTextureType; - - private VRTextureBounds_t rightTextureBounds; - private Texture_t rightTextureType; + private VRTextureBounds leftTextureBounds; + private Texture leftTextureType; + + private VRTextureBounds rightTextureBounds; + private Texture rightTextureType; private Texture2D dualEyeTex; - + //final & temp values for camera calculations - private final Vector3f finalPosition = new Vector3f(); + private final Vector3f finalPosition = new Vector3f(); private final Quaternion finalRotation = new Quaternion(); - private final Vector3f hmdPos = new Vector3f(); - private final Quaternion hmdRot = new Quaternion(); - + private final Vector3f hmdPos = new Vector3f(); + private final Quaternion hmdRot = new Quaternion(); + /** - * Create a new VR view manager attached to the given {@link VREnvironment VR environment}. - * @param environment the {@link VREnvironment VR environment} to which this view manager is attached. + * Create a new VR view manager attached to the given + * {@link VREnvironment VR environment}. + * + * @param environment the {@link VREnvironment VR environment} to which this + * view manager is attached. */ - public OpenVRViewManager(VREnvironment environment){ - this.environment = environment; + public OpenVRViewManager(VREnvironment environment) { + this.environment = environment; } - + /** * Get the identifier of the left eye texture. + * * @return the identifier of the left eye texture. * @see #getRightTexId() * @see #getFullTexId() */ protected int getLeftTexId() { - return (int)getLeftTexture().getImage().getId(); + return (int) getLeftTexture().getImage().getId(); } - + /** * Get the identifier of the right eye texture. + * * @return the identifier of the right eye texture. * @see #getLeftTexId() * @see #getFullTexId() */ protected int getRightTexId() { - return (int)getRightTexture().getImage().getId(); + return (int) getRightTexture().getImage().getId(); } - + /** * Get the identifier of the full (dual eye) texture. + * * @return the identifier of the full (dual eye) texture. * @see #getLeftTexId() * @see #getRightTexId() */ private int getFullTexId() { - return (int)dualEyeTex.getImage().getId(); + return (int) dualEyeTex.getImage().getId(); } - + /** * Initialize the system binds of the textures. */ private void initTextureSubmitStructs() { - leftTextureType = new Texture_t(); - rightTextureType = new Texture_t(); - - if (environment != null){ - if( environment.getVRHardware() instanceof OpenVR ) { - leftTextureBounds = new VRTextureBounds_t(); - rightTextureBounds = new VRTextureBounds_t(); + leftTextureType = Texture.create(); + rightTextureType = Texture.create(); + + if (environment != null) { + if (environment.getVRHardware() instanceof OpenVR) { + leftTextureBounds = VRTextureBounds.create(); + rightTextureBounds = VRTextureBounds.create(); // left eye - leftTextureBounds.uMax = 0.5f; - leftTextureBounds.uMin = 0f; - leftTextureBounds.vMax = 1f; - leftTextureBounds.vMin = 0f; - leftTextureBounds.setAutoSynch(false); - leftTextureBounds.setAutoRead(false); - leftTextureBounds.setAutoWrite(false); - leftTextureBounds.write(); + leftTextureBounds.set(0f, 0f, 0.5f, 1f); // right eye - rightTextureBounds.uMax = 1f; - rightTextureBounds.uMin = 0.5f; - rightTextureBounds.vMax = 1f; - rightTextureBounds.vMin = 0f; - rightTextureBounds.setAutoSynch(false); - rightTextureBounds.setAutoRead(false); - rightTextureBounds.setAutoWrite(false); - rightTextureBounds.write(); + rightTextureBounds.set(0.5f, 0f, 1f, 1f); // texture type - leftTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Gamma; - leftTextureType.eType = JOpenVRLibrary.ETextureType.ETextureType_TextureType_OpenGL; - leftTextureType.setAutoSynch(false); - leftTextureType.setAutoRead(false); - leftTextureType.setAutoWrite(false); - leftTextureType.handle = -1; - rightTextureType.eColorSpace = JOpenVRLibrary.EColorSpace.EColorSpace_ColorSpace_Gamma; - rightTextureType.eType = JOpenVRLibrary.ETextureType.ETextureType_TextureType_OpenGL; - rightTextureType.setAutoSynch(false); - rightTextureType.setAutoRead(false); - rightTextureType.setAutoWrite(false); - rightTextureType.handle = -1; - - - logger.config("Init eyes native texture binds"); - logger.config(" Left eye texture"); - logger.config(" address: "+leftTextureType.getPointer()); - logger.config(" size: "+leftTextureType.size()+" bytes"); - logger.config(" color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace)); - logger.config(" type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType)); - logger.config(" auto read: "+leftTextureType.getAutoRead()); - logger.config(" auto write: "+leftTextureType.getAutoWrite()); - logger.config(" handle address: "+leftTextureType.handle); - logger.config(" handle value: "+leftTextureType.handle); - logger.config(""); - logger.config(" Right eye texture"); - logger.config(" address: "+rightTextureType.getPointer()); - logger.config(" size: "+rightTextureType.size()+" bytes"); - logger.config(" color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace)); - logger.config(" type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType)); - logger.config(" auto read: "+rightTextureType.getAutoRead()); - logger.config(" auto write: "+rightTextureType.getAutoWrite()); - logger.config(" handle address: "+rightTextureType.handle); - logger.config(" handle value: "+rightTextureType.handle); + leftTextureType.set(-1, VR.ETextureType_TextureType_OpenGL, VR.EColorSpace_ColorSpace_Gamma); + rightTextureType.set(-1, VR.ETextureType_TextureType_OpenGL, VR.EColorSpace_ColorSpace_Gamma); + } } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); + } } - + @Override + /** + * updatePose can be called here because appstates are always called before the main renderer. This way we get the latest pose close to when it's supposed to render + */ public void render() { - + if (environment != null) { + // grab the observer + Object obs = environment.getObserver(); + Quaternion objRot; + Vector3f objPos; + if (obs instanceof Camera) { + objRot = ((Camera) obs).getRotation(); + objPos = ((Camera) obs).getLocation(); + } else { + objRot = ((Spatial) obs).getWorldRotation(); + objPos = ((Spatial) obs).getWorldTranslation(); + } + // grab the hardware handle + VRAPI dev = environment.getVRHardware(); + if (dev != null) { + + // update the HMD's position & orientation + dev.updatePose(); + dev.getPositionAndOrientation(hmdPos, hmdRot); + + if (obs != null) { + // update hmdPos based on obs rotation + finalRotation.set(objRot); + finalRotation.mult(hmdPos, hmdPos); + finalRotation.multLocal(hmdRot); } - + + finalizeCamera(dev.getHMDVectorPoseLeftEye(), objPos, getLeftCamera()); + finalizeCamera(dev.getHMDVectorPoseRightEye(), objPos, getRightCamera()); + } else { + getLeftCamera().setFrame(objPos, objRot); + getRightCamera().setFrame(objPos, objRot); + } + } + } + @Override public void postRender() { - - if (environment != null){ - if( environment.isInVR() ) { + + if (environment != null) { + if (environment.isInVR()) { VRAPI api = environment.getVRHardware(); - if( api.getCompositor() != null ) { - // using the compositor... - int errl = 0, errr = 0; - if( environment.isInstanceRendering() ) { - if( leftTextureType.handle == -1 || leftTextureType.handle != getFullTexId() ) { - leftTextureType.handle = getFullTexId(); - if( leftTextureType.handle != -1 ) { - leftTextureType.write(); - } - } else { - if( api instanceof OpenVR ) { - int submitFlag = JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default; - errr = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, leftTextureType, rightTextureBounds, submitFlag); - errl = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, leftTextureBounds, submitFlag); - } - } - } else if( leftTextureType.handle == -1 || rightTextureType.handle == -1 || - leftTextureType.handle != getLeftTexId() || rightTextureType.handle != getRightTexId() ) { - leftTextureType.handle = getLeftTexId(); - if( leftTextureType.handle != -1 ) { - logger.fine("Writing Left texture to native memory at " + leftTextureType.getPointer()); - leftTextureType.write(); - } - rightTextureType.handle = getRightTexId(); - if( rightTextureType.handle != -1 ) { - logger.fine("Writing Right texture to native memory at " + leftTextureType.getPointer()); - rightTextureType.write(); - } + // using the compositor... + int errl = 0, errr = 0; + if (environment.isInstanceRendering()) { + if (leftTextureType.handle() == -1 || leftTextureType.handle() != getFullTexId()) { + leftTextureType.set(getFullTexId(), leftTextureType.eType(), leftTextureType.eColorSpace()); } else { - if( api instanceof OpenVR ) { - errl = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, null, - JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default); - errr = ((OpenVR)api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, rightTextureType, null, - JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default); - } else { - + if (api instanceof OpenVR) { + int submitFlag = VR.EVRSubmitFlags_Submit_Default; + errr = VRCompositor.VRCompositor_Submit(VR.EVREye_Eye_Right, rightTextureType, rightTextureBounds, submitFlag); + errl = VRCompositor.VRCompositor_Submit(VR.EVREye_Eye_Left, leftTextureType, leftTextureBounds, submitFlag); } } - - if( errl != 0 ){ - logger.severe("Submit to left compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")"); - logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace)); - logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(leftTextureType.eType)); - logger.severe(" Texture handle: "+leftTextureType.handle); - - logger.severe(" Left eye texture "+leftEyeTexture.getName()+" ("+leftEyeTexture.getImage().getId()+")"); - logger.severe(" Type: "+leftEyeTexture.getType()); - logger.severe(" Size: "+leftEyeTexture.getImage().getWidth()+"x"+leftEyeTexture.getImage().getHeight()); - logger.severe(" Image depth: "+leftEyeTexture.getImage().getDepth()); - logger.severe(" Image format: "+leftEyeTexture.getImage().getFormat()); - logger.severe(" Image color space: "+leftEyeTexture.getImage().getColorSpace()); - - } - - if( errr != 0 ){ - logger.severe("Submit to right compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl)+" ("+Integer.toString(errl)+")"); - logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace)); - logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType)); - logger.severe(" Texture handle: "+rightTextureType.handle); - - logger.severe(" Right eye texture "+rightEyeTexture.getName()+" ("+rightEyeTexture.getImage().getId()+")"); - logger.severe(" Type: "+rightEyeTexture.getType()); - logger.severe(" Size: "+rightEyeTexture.getImage().getWidth()+"x"+rightEyeTexture.getImage().getHeight()); - logger.severe(" Image depth: "+rightEyeTexture.getImage().getDepth()); - logger.severe(" Image format: "+rightEyeTexture.getImage().getFormat()); - logger.severe(" Image color space: "+rightEyeTexture.getImage().getColorSpace()); + } else if (leftTextureType.handle() == -1 || rightTextureType.handle() == -1 + || leftTextureType.handle() != getLeftTexId() || rightTextureType.handle() != getRightTexId()) { + leftTextureType.set(getLeftTexId(), leftTextureType.eType(), leftTextureType.eColorSpace()); + rightTextureType.set(getRightTexId(), leftTextureType.eType(), leftTextureType.eColorSpace()); + } else { + if (api instanceof OpenVR) { + int submitFlag = VR.EVRSubmitFlags_Submit_Default; + errr = VRCompositor.VRCompositor_Submit(VR.EVREye_Eye_Right, rightTextureType, null, submitFlag); + errl = VRCompositor.VRCompositor_Submit(VR.EVREye_Eye_Left, leftTextureType, null, submitFlag); + } else { + } } + + if (errl != 0) { + logger.severe("Submit to left compositor error: " + " (" + Integer.toString(errl) + ")"); + logger.severe(" Texture handle: " + leftTextureType.handle()); + + logger.severe(" Left eye texture " + leftEyeTexture.getName() + " (" + leftEyeTexture.getImage().getId() + ")"); + logger.severe(" Type: " + leftEyeTexture.getType()); + logger.severe(" Size: " + leftEyeTexture.getImage().getWidth() + "x" + leftEyeTexture.getImage().getHeight()); + logger.severe(" Image depth: " + leftEyeTexture.getImage().getDepth()); + logger.severe(" Image format: " + leftEyeTexture.getImage().getFormat()); + logger.severe(" Image color space: " + leftEyeTexture.getImage().getColorSpace()); + + } + + if (errr != 0) { + logger.severe("Submit to right compositor error: " + " (" + Integer.toString(errl) + ")"); +// logger.severe(" Texture color space: "+OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace)); +// logger.severe(" Texture type: "+OpenVRUtil.getETextureTypeString(rightTextureType.eType)); + logger.severe(" Texture handle: " + rightTextureType.handle()); + + logger.severe(" Right eye texture " + rightEyeTexture.getName() + " (" + rightEyeTexture.getImage().getId() + ")"); + logger.severe(" Type: " + rightEyeTexture.getType()); + logger.severe(" Size: " + rightEyeTexture.getImage().getWidth() + "x" + rightEyeTexture.getImage().getHeight()); + logger.severe(" Image depth: " + rightEyeTexture.getImage().getDepth()); + logger.severe(" Image format: " + rightEyeTexture.getImage().getFormat()); + logger.severe(" Image color space: " + rightEyeTexture.getImage().getColorSpace()); + } } - } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } - - - - } + } else { + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); + } + VRCompositor.VRCompositor_PostPresentHandoff(); + + } @Override - public void initialize() { - - logger.config("Initializing VR view manager."); - - if (environment != null){ - - initTextureSubmitStructs(); - setupCamerasAndViews(); - setupVRScene(); - moveScreenProcessingToEyes(); - - if( environment.hasTraditionalGUIOverlay() ) { - - environment.getVRMouseManager().initialize(); - - // update the pose to position the gui correctly on start - update(0f); - environment.getVRGUIManager().positionGui(); - } - - logger.config("Initialized VR view manager [SUCCESS]"); - + public void initialize() { + + logger.config("Initializing VR view manager."); + + if (environment != null) { + + initTextureSubmitStructs(); + setupCamerasAndViews(); + setupVRScene(); + moveScreenProcessingToEyes(); + + if (environment.hasTraditionalGUIOverlay()) { + + environment.getVRMouseManager().initialize(); + + // update the pose to position the gui correctly on start + update(0f); + environment.getVRGUIManager().positionGui(); + } + + logger.config("Initialized VR view manager [SUCCESS]"); + } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); + } } - + /** - * Prepare the size of the given {@link Camera camera} to adapt it to the underlying rendering context. + * Prepare the size of the given {@link Camera camera} to adapt it to the + * underlying rendering context. + * * @param cam the {@link Camera camera} to prepare. * @param xMult the camera width multiplier. */ private void prepareCameraSize(Camera cam, float xMult) { - - if (environment != null){ - - if (environment.getApplication() != null){ - Vector2f size = new Vector2f(); - VRAPI vrhmd = environment.getVRHardware(); - - if( vrhmd == null ) { - size.x = 1280f; - size.y = 720f; - } else { - vrhmd.getRenderSize(size); - } - - if( size.x < environment.getApplication().getContext().getSettings().getWidth() ) { - size.x = environment.getApplication().getContext().getSettings().getWidth(); - } - if( size.y < environment.getApplication().getContext().getSettings().getHeight() ) { - size.y = environment.getApplication().getContext().getSettings().getHeight(); - } - - if( environment.isInstanceRendering() ){ - size.x *= 2f; - } - - // other adjustments - size.x *= xMult; - size.x *= getResolutionMuliplier(); - size.y *= getResolutionMuliplier(); - - if( cam.getWidth() != size.x || cam.getHeight() != size.y ){ - cam.resize((int)size.x, (int)size.y, false); - } - } else { - throw new IllegalStateException("This VR environment is not attached to any application."); - } - - } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } - - + + if (environment != null) { + + if (environment.getApplication() != null) { + Vector2f size = new Vector2f(); + VRAPI vrhmd = environment.getVRHardware(); + + if (vrhmd == null) { + size.x = 1280f; + size.y = 720f; + } else { + vrhmd.getRenderSize(size); + } + + if (size.x < environment.getApplication().getContext().getSettings().getWidth()) { + size.x = environment.getApplication().getContext().getSettings().getWidth(); + } + if (size.y < environment.getApplication().getContext().getSettings().getHeight()) { + size.y = environment.getApplication().getContext().getSettings().getHeight(); + } + + if (environment.isInstanceRendering()) { + size.x *= 2f; + } + + // other adjustments + size.x *= xMult; + size.x *= getResolutionMuliplier(); + size.y *= getResolutionMuliplier(); + + if (cam.getWidth() != size.x || cam.getHeight() != size.y) { + cam.resize((int) size.x, (int) size.y, false); + } + } else { + throw new IllegalStateException("This VR environment is not attached to any application."); + } + + } else { + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); + } + } - + /** * Replaces rootNode as the main cameras scene with the distortion mesh */ - private void setupVRScene(){ - - - if (environment != null){ - if (environment.getApplication() != null){ - // no special scene to setup if we are doing instancing - if( environment.isInstanceRendering() ) { - // distortion has to be done with compositor here... we want only one pass on our end! - if( environment.getApplication().getContext().getSettings().isSwapBuffers() ) { - setupMirrorBuffers(environment.getCamera(), dualEyeTex, true); - } - return; - } - - leftEyeTexture = (Texture2D) getLeftViewPort().getOutputFrameBuffer().getColorBuffer().getTexture(); - rightEyeTexture = (Texture2D)getRightViewPort().getOutputFrameBuffer().getColorBuffer().getTexture(); - leftEyeDepth = (Texture2D) getLeftViewPort().getOutputFrameBuffer().getDepthBuffer().getTexture(); - rightEyeDepth = (Texture2D)getRightViewPort().getOutputFrameBuffer().getDepthBuffer().getTexture(); - - // main viewport is either going to be a distortion scene or nothing - // mirroring is handled by copying framebuffers - Iterator spatialIter = environment.getApplication().getViewPort().getScenes().iterator(); - while(spatialIter.hasNext()){ - environment.getApplication().getViewPort().detachScene(spatialIter.next()); - } - - spatialIter = environment.getApplication().getGuiViewPort().getScenes().iterator(); - while(spatialIter.hasNext()){ - environment.getApplication().getGuiViewPort().detachScene(spatialIter.next()); - } - - // only setup distortion scene if compositor isn't running (or using custom mesh distortion option) - if( environment.getVRHardware().getCompositor() == null ) { - Node distortionScene = new Node(); - Material leftMat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md"); - leftMat.setTexture("Texture", leftEyeTexture); - Geometry leftEye = new Geometry("box", setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Left, environment.getVRHardware())); - leftEye.setMaterial(leftMat); - distortionScene.attachChild(leftEye); - - Material rightMat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/OpenVR.j3md"); - rightMat.setTexture("Texture", rightEyeTexture); - Geometry rightEye = new Geometry("box", setupDistortionMesh(JOpenVRLibrary.EVREye.EVREye_Eye_Right, environment.getVRHardware())); - rightEye.setMaterial(rightMat); - distortionScene.attachChild(rightEye); - - distortionScene.updateGeometricState(); - - environment.getApplication().getViewPort().attachScene(distortionScene); - - //if( useCustomDistortion ) setupFinalFullTexture(app.getViewPort().getCamera()); - } - - if( environment.getApplication().getContext().getSettings().isSwapBuffers() ) { - setupMirrorBuffers(environment.getCamera(), leftEyeTexture, false); - - } - } else { - throw new IllegalStateException("This VR environment is not attached to any application."); - } - } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } - } - - @Override - public void update(float tpf) { - - if (environment != null){ - // grab the observer - Object obs = environment.getObserver(); - Quaternion objRot; - Vector3f objPos; - if( obs instanceof Camera ) { - objRot = ((Camera)obs).getRotation(); - objPos = ((Camera)obs).getLocation(); - } else { - objRot = ((Spatial)obs).getWorldRotation(); - objPos = ((Spatial)obs).getWorldTranslation(); - } - // grab the hardware handle - VRAPI dev = environment.getVRHardware(); - if( dev != null ) { - + private void setupVRScene() { - // update the HMD's position & orientation - dev.updatePose(); - dev.getPositionAndOrientation(hmdPos, hmdRot); -/* - // TOREMOVE - Vector3f v = dev.getVRinput().getTrackedController(0).getPosition(); - Quaternion q = dev.getVRinput().getTrackedController(0).getOrientation(); - if ((v != null)&&(q != null)){ - hmdPos.set(v); - hmdRot.set(q); + if (environment != null) { + if (environment.getApplication() != null) { + // no special scene to setup if we are doing instancing + if (environment.isInstanceRendering()) { + // distortion has to be done with compositor here... we want only one pass on our end! + if (environment.getApplication().getContext().getSettings().isSwapBuffers()) { + setupMirrorBuffers(environment.getCamera(), dualEyeTex, true); + } + return; } - - logger.severe("HMD controller "); - logger.severe(" Position "+hmdPos); - logger.severe(" Orientation "+hmdRot); - - VRTrackedController tc = null; - for(int i = 0; i < dev.getVRinput().getTrackedControllerCount(); i++){ - tc = dev.getVRinput().getTrackedController(i); - logger.severe("Tracked controller "+i+": "+tc.getControllerName()); - logger.severe(" Position "+tc.getPosition()); - logger.severe(" Orientation "+tc.getOrientation()); - logger.severe(""); + + leftEyeTexture = (Texture2D) getLeftViewPort().getOutputFrameBuffer().getColorBuffer().getTexture(); + rightEyeTexture = (Texture2D) getRightViewPort().getOutputFrameBuffer().getColorBuffer().getTexture(); + leftEyeDepth = (Texture2D) getLeftViewPort().getOutputFrameBuffer().getDepthBuffer().getTexture(); + rightEyeDepth = (Texture2D) getRightViewPort().getOutputFrameBuffer().getDepthBuffer().getTexture(); + + // main viewport is either going to be a distortion scene or nothing + // mirroring is handled by copying framebuffers + Iterator spatialIter = environment.getApplication().getViewPort().getScenes().iterator(); + while (spatialIter.hasNext()) { + environment.getApplication().getViewPort().detachScene(spatialIter.next()); } -*/ - // TOREMOVE - - if( obs != null ) { - // update hmdPos based on obs rotation - finalRotation.set(objRot); - finalRotation.mult(hmdPos, hmdPos); - finalRotation.multLocal(hmdRot); + + spatialIter = environment.getApplication().getGuiViewPort().getScenes().iterator(); + while (spatialIter.hasNext()) { + environment.getApplication().getGuiViewPort().detachScene(spatialIter.next()); + } + + // only setup distortion scene if compositor isn't running (or using custom mesh distortion option) + if (environment.getApplication().getContext().getSettings().isSwapBuffers()) { + setupMirrorBuffers(environment.getCamera(), leftEyeTexture, false); + } - - finalizeCamera(dev.getHMDVectorPoseLeftEye(), objPos, getLeftCamera()); - finalizeCamera(dev.getHMDVectorPoseRightEye(), objPos, getRightCamera()); } else { - getLeftCamera().setFrame(objPos, objRot); - getRightCamera().setFrame(objPos, objRot); + throw new IllegalStateException("This VR environment is not attached to any application."); } - - if( environment.hasTraditionalGUIOverlay() ) { + } else { + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); + } + } + + @Override + public void update(float tpf) { + + if (environment != null) { + + if (environment.hasTraditionalGUIOverlay()) { // update the mouse? - environment.getVRMouseManager().update(tpf); - + environment.getVRMouseManager().update(tpf); + // update GUI position? - if( environment.getVRGUIManager().isWantsReposition() || environment.getVRGUIManager().getPositioningMode() != VRGUIPositioningMode.MANUAL ) { - environment.getVRGUIManager().positionGuiNow(tpf); - environment.getVRGUIManager().updateGuiQuadGeometricState(); + if (environment.getVRGUIManager().isWantsReposition() || environment.getVRGUIManager().getPositioningMode() != VRGUIPositioningMode.MANUAL) { + environment.getVRGUIManager().positionGuiNow(tpf); + environment.getVRGUIManager().updateGuiQuadGeometricState(); } } - } else { + } else { throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } + } } - + /** * Place the camera within the scene. + * * @param eyePos the eye position. * @param obsPosition the observer position. * @param cam the camera to place. @@ -488,278 +392,187 @@ public void update(float tpf) { private void finalizeCamera(Vector3f eyePos, Vector3f obsPosition, Camera cam) { finalRotation.mult(eyePos, finalPosition); finalPosition.addLocal(hmdPos); - if( obsPosition != null ) finalPosition.addLocal(obsPosition); + if (obsPosition != null) { + finalPosition.addLocal(obsPosition); + } finalPosition.y += getHeightAdjustment(); cam.setFrame(finalPosition, finalRotation); } - - private void setupCamerasAndViews() { - - if (environment != null){ - // get desired frustum from original camera - Camera origCam = environment.getCamera(); + private void setupCamerasAndViews() { + + if (environment != null) { + // get desired frustum from original camera + Camera origCam = environment.getCamera(); float fFar = origCam.getFrustumFar(); float fNear = origCam.getFrustumNear(); - + // restore frustum on distortion scene cam, if needed - if( environment.isInstanceRendering() ) { + if (environment.isInstanceRendering()) { leftCamera = origCam; - } else if( environment.compositorAllowed() == false ) { + } else if (environment.compositorAllowed() == false) { origCam.setFrustumFar(100f); - origCam.setFrustumNear(1f); - leftCamera = origCam.clone(); + origCam.setFrustumNear(1f); + leftCamera = origCam.clone(); prepareCameraSize(origCam, 2f); } else { leftCamera = origCam.clone(); } - - getLeftCamera().setFrustumPerspective(environment.getDefaultFOV(), environment.getDefaultAspect(), fNear, fFar); - + + getLeftCamera().setFrustumPerspective(environment.getDefaultFOV(), environment.getDefaultAspect(), fNear, fFar); + prepareCameraSize(getLeftCamera(), 1f); - if( environment.getVRHardware() != null ) { - getLeftCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionLeftEye(getLeftCamera())); + if (environment.getVRHardware() != null) { + getLeftCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionLeftEye(getLeftCamera())); } //org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_FRAMEBUFFER_SRGB); - - if( !environment.isInstanceRendering()) { + + if (!environment.isInstanceRendering()) { leftViewPort = setupViewBuffers(getLeftCamera(), LEFT_VIEW_NAME); rightCamera = getLeftCamera().clone(); - if( environment.getVRHardware() != null ){ - getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera())); + if (environment.getVRHardware() != null) { + getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera())); } rightViewPort = setupViewBuffers(getRightCamera(), RIGHT_VIEW_NAME); } else { - - if (environment.getApplication() != null){ - - logger.severe("THIS CODE NEED CHANGES !!!"); + + if (environment.getApplication() != null) { + + logger.severe("THIS CODE NEED CHANGES !!!"); leftViewPort = environment.getApplication().getViewPort(); //leftViewport.attachScene(app.getRootNode()); rightCamera = getLeftCamera().clone(); - if( environment.getVRHardware() != null ){ - getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera())); + if (environment.getVRHardware() != null) { + getRightCamera().setProjectionMatrix(environment.getVRHardware().getHMDMatrixProjectionRightEye(getRightCamera())); } - + org.lwjgl.opengl.GL11.glEnable(org.lwjgl.opengl.GL30.GL_CLIP_DISTANCE0); - + //FIXME: [jme-vr] Fix with JMonkey next release //RenderManager._VRInstancing_RightCamProjection = camRight.getViewProjectionMatrix(); - setupFinalFullTexture(environment.getApplication().getViewPort().getCamera()); - } else { - throw new IllegalStateException("This VR environment is not attached to any application."); - } - + setupFinalFullTexture(environment.getApplication().getViewPort().getCamera()); + } else { + throw new IllegalStateException("This VR environment is not attached to any application."); + } + } - + // setup gui environment.getVRGUIManager().setupGui(getLeftCamera(), getRightCamera(), getLeftViewPort(), getRightViewPort()); - - if( environment.getVRHardware() != null ) { + + if (environment.getVRHardware() != null) { // call these to cache the results internally - environment.getVRHardware().getHMDMatrixPoseLeftEye(); - environment.getVRHardware().getHMDMatrixPoseRightEye(); + environment.getVRHardware().getHMDMatrixPoseLeftEye(); + environment.getVRHardware().getHMDMatrixPoseRightEye(); } - } else { + } else { throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } + } } - - private ViewPort setupMirrorBuffers(Camera cam, Texture tex, boolean expand) { - - if (environment != null){ - if (environment.getApplication() != null){ - Camera clonecam = cam.clone(); - ViewPort viewPort = environment.getApplication().getRenderManager().createPostView("MirrorView", clonecam); - clonecam.setParallelProjection(true); - viewPort.setClearFlags(true, true, true); - viewPort.setBackgroundColor(ColorRGBA.Black); - Picture pic = new Picture("fullscene"); - pic.setLocalTranslation(-0.75f, -0.5f, 0f); - if( expand ) { - pic.setLocalScale(3f, 1f, 1f); - } else { - pic.setLocalScale(1.5f, 1f, 1f); - } - pic.setQueueBucket(Bucket.Opaque); - pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D)tex, false); - viewPort.attachScene(pic); - viewPort.setOutputFrameBuffer(null); - - pic.updateGeometricState(); - - return viewPort; - } else { - throw new IllegalStateException("This VR environment is not attached to any application."); - } - } else { + + private ViewPort setupMirrorBuffers(Camera cam, Texture2D tex, boolean expand) { + + if (environment != null) { + if (environment.getApplication() != null) { + Camera clonecam = cam.clone(); + ViewPort viewPort = environment.getApplication().getRenderManager().createPostView("MirrorView", clonecam); + clonecam.setParallelProjection(true); + viewPort.setClearFlags(true, true, true); + viewPort.setBackgroundColor(ColorRGBA.Black); + Picture pic = new Picture("fullscene"); + pic.setLocalTranslation(-0.75f, -0.5f, 0f); + if (expand) { + pic.setLocalScale(3f, 1f, 1f); + } else { + pic.setLocalScale(1.5f, 1f, 1f); + } + pic.setQueueBucket(Bucket.Opaque); + pic.setTexture(environment.getApplication().getAssetManager(), (Texture2D) tex, false); + viewPort.attachScene(pic); + viewPort.setOutputFrameBuffer(null); + + pic.updateGeometricState(); + + return viewPort; + } else { + throw new IllegalStateException("This VR environment is not attached to any application."); + } + } else { throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } + } } - + private void setupFinalFullTexture(Camera cam) { - - if (environment != null){ - if (environment.getApplication() != null){ - // create offscreen framebuffer - FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); - //offBuffer.setSrgb(true); - - //setup framebuffer's texture - dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); - dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); - dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear); - - logger.config("Dual eye texture "+dualEyeTex.getName()+" ("+dualEyeTex.getImage().getId()+")"); - logger.config(" Type: "+dualEyeTex.getType()); - logger.config(" Size: "+dualEyeTex.getImage().getWidth()+"x"+dualEyeTex.getImage().getHeight()); - logger.config(" Image depth: "+dualEyeTex.getImage().getDepth()); - logger.config(" Image format: "+dualEyeTex.getImage().getFormat()); - logger.config(" Image color space: "+dualEyeTex.getImage().getColorSpace()); - - //setup framebuffer to use texture - out.setDepthBuffer(Image.Format.Depth); - out.setColorTexture(dualEyeTex); - - ViewPort viewPort = environment.getApplication().getViewPort(); - viewPort.setClearFlags(true, true, true); - viewPort.setBackgroundColor(ColorRGBA.Black); - viewPort.setOutputFrameBuffer(out); - } else { - throw new IllegalStateException("This VR environment is not attached to any application."); - } - } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } - } - - private ViewPort setupViewBuffers(Camera cam, String viewName){ - - if (environment != null){ - if (environment.getApplication() != null){ - // create offscreen framebuffer - FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); - //offBufferLeft.setSrgb(true); - - //setup framebuffer's texture - Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); - offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); - offTex.setMagFilter(Texture.MagFilter.Bilinear); - - //setup framebuffer to use texture - offBufferLeft.setDepthBuffer(Image.Format.Depth); - offBufferLeft.setColorTexture(offTex); - - ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam); - viewPort.setClearFlags(true, true, true); - viewPort.setBackgroundColor(ColorRGBA.Black); - - Iterator spatialIter = environment.getApplication().getViewPort().getScenes().iterator(); - while(spatialIter.hasNext()){ - viewPort.attachScene(spatialIter.next()); - } - - //set viewport to render to offscreen framebuffer - viewPort.setOutputFrameBuffer(offBufferLeft); - return viewPort; - } else { - throw new IllegalStateException("This VR environment is not attached to any application."); - } - } else { - throw new IllegalStateException("This VR view manager is not attached to any VR environment."); - } - } - - /** - * Setup a distortion mesh for the stereo view. - * @param eye the eye to apply. - * @param api the underlying VR api - * @return the distorted mesh. - */ - public static Mesh setupDistortionMesh(int eye, VRAPI api) { - Mesh distortionMesh = new Mesh(); - float m_iLensGridSegmentCountH = 43, m_iLensGridSegmentCountV = 43; - - float w = 1f / (m_iLensGridSegmentCountH - 1f); - float h = 1f / (m_iLensGridSegmentCountV - 1f); - - float u, v; - - float verts[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 3]; - - float texcoordR[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2]; - float texcoordG[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2]; - float texcoordB[] = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2]; - - int vertPos = 0, coordPos = 0; - - float Xoffset = eye == JOpenVRLibrary.EVREye.EVREye_Eye_Left ? -1f : 0; - for (int y = 0; y < m_iLensGridSegmentCountV; y++) { - for (int x = 0; x < m_iLensGridSegmentCountH; x++) { - u = x * w; - v = 1 - y * h; - verts[vertPos] = Xoffset + u; // x - verts[vertPos + 1] = -1 + 2 * y * h; // y - verts[vertPos + 2] = 0f; // z - vertPos += 3; - - DistortionCoordinates_t dc0 = new DistortionCoordinates_t(); - if( api.getVRSystem() == null ) { - // default to no distortion - texcoordR[coordPos] = u; - texcoordR[coordPos + 1] = 1 - v; - texcoordG[coordPos] = u; - texcoordG[coordPos + 1] = 1 - v; - texcoordB[coordPos] = u; - texcoordB[coordPos + 1] = 1 - v; - } else { - ((VR_IVRSystem_FnTable)api.getVRSystem()).ComputeDistortion.apply(eye, u, v, dc0); - - texcoordR[coordPos] = dc0.rfRed[0]; - texcoordR[coordPos + 1] = 1 - dc0.rfRed[1]; - texcoordG[coordPos] = dc0.rfGreen[0]; - texcoordG[coordPos + 1] = 1 - dc0.rfGreen[1]; - texcoordB[coordPos] = dc0.rfBlue[0]; - texcoordB[coordPos + 1] = 1 - dc0.rfBlue[1]; - } - - coordPos += 2; + + if (environment != null) { + if (environment.getApplication() != null) { + // create offscreen framebuffer + FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); + //offBuffer.setSrgb(true); + + //setup framebuffer's texture + dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); + dualEyeTex.setMinFilter(Texture2D.MinFilter.BilinearNoMipMaps); + dualEyeTex.setMagFilter(Texture2D.MagFilter.Bilinear); + + logger.config("Dual eye texture " + dualEyeTex.getName() + " (" + dualEyeTex.getImage().getId() + ")"); + logger.config(" Type: " + dualEyeTex.getType()); + logger.config(" Size: " + dualEyeTex.getImage().getWidth() + "x" + dualEyeTex.getImage().getHeight()); + logger.config(" Image depth: " + dualEyeTex.getImage().getDepth()); + logger.config(" Image format: " + dualEyeTex.getImage().getFormat()); + logger.config(" Image color space: " + dualEyeTex.getImage().getColorSpace()); + + //setup framebuffer to use texture + out.setDepthBuffer(Image.Format.Depth); + out.setColorTexture(dualEyeTex); + + ViewPort viewPort = environment.getApplication().getViewPort(); + viewPort.setClearFlags(true, true, true); + viewPort.setBackgroundColor(ColorRGBA.Black); + viewPort.setOutputFrameBuffer(out); + } else { + throw new IllegalStateException("This VR environment is not attached to any application."); } + } else { + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); } + } - // have UV coordinates & positions, now to setup indices - - int[] indices = new int[(int) ((m_iLensGridSegmentCountV - 1) * (m_iLensGridSegmentCountH - 1)) * 6]; - int indexPos = 0; - int a, b, c, d; - - int offset = 0; - for (int y = 0; y < m_iLensGridSegmentCountV - 1; y++) { - for (int x = 0; x < m_iLensGridSegmentCountH - 1; x++) { - a = (int) (m_iLensGridSegmentCountH * y + x + offset); - b = (int) (m_iLensGridSegmentCountH * y + x + 1 + offset); - c = (int) ((y + 1) * m_iLensGridSegmentCountH + x + 1 + offset); - d = (int) ((y + 1) * m_iLensGridSegmentCountH + x + offset); - - indices[indexPos] = a; - indices[indexPos + 1] = b; - indices[indexPos + 2] = c; - - indices[indexPos + 3] = a; - indices[indexPos + 4] = c; - indices[indexPos + 5] = d; - - indexPos += 6; + private ViewPort setupViewBuffers(Camera cam, String viewName) { + + if (environment != null) { + if (environment.getApplication() != null) { + // create offscreen framebuffer + FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1); + //offBufferLeft.setSrgb(true); + + //setup framebuffer's texture + Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8); + offTex.setMinFilter(Texture2D.MinFilter.BilinearNoMipMaps); + offTex.setMagFilter(Texture2D.MagFilter.Bilinear); + + //setup framebuffer to use texture + offBufferLeft.setDepthBuffer(Image.Format.Depth); + offBufferLeft.setColorTexture(offTex); + + ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam); + viewPort.setClearFlags(true, true, true); + viewPort.setBackgroundColor(ColorRGBA.Black); + + Iterator spatialIter = environment.getApplication().getViewPort().getScenes().iterator(); + while (spatialIter.hasNext()) { + viewPort.attachScene(spatialIter.next()); + } + + //set viewport to render to offscreen framebuffer + viewPort.setOutputFrameBuffer(offBufferLeft); + return viewPort; + } else { + throw new IllegalStateException("This VR environment is not attached to any application."); } + } else { + throw new IllegalStateException("This VR view manager is not attached to any VR environment."); } - - // OK, create the mesh - distortionMesh.setBuffer(VertexBuffer.Type.Position, 3, verts); - distortionMesh.setBuffer(VertexBuffer.Type.Index, 1, indices); - distortionMesh.setBuffer(VertexBuffer.Type.TexCoord, 2, texcoordR); - distortionMesh.setBuffer(VertexBuffer.Type.TexCoord2, 2, texcoordG); - distortionMesh.setBuffer(VertexBuffer.Type.TexCoord3, 2, texcoordB); - distortionMesh.setStatic(); - return distortionMesh; } + } diff --git a/jme3-vr/src/main/java/com/jme3/post/OpenVRFilter.java b/jme3-vr/src/main/java/com/jme3/post/OpenVRFilter.java deleted file mode 100644 index 61cf840ede..0000000000 --- a/jme3-vr/src/main/java/com/jme3/post/OpenVRFilter.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package com.jme3.post; - -import com.jme3.app.VRApplication; -import com.jme3.asset.AssetManager; -import com.jme3.material.Material; -import com.jme3.post.Filter; -import com.jme3.renderer.RenderManager; -import com.jme3.renderer.Renderer; -import com.jme3.renderer.ViewPort; -import com.jme3.scene.Mesh; -import com.jme3.scene.VertexBuffer; -import com.jme3.system.jopenvr.DistortionCoordinates_t; -import com.jme3.system.jopenvr.JOpenVRLibrary; -import com.jme3.system.jopenvr.VR_IVRSystem_FnTable; -import com.jme3.texture.FrameBuffer; - -/** - * DO NOT USE - * @author phr00t - * @deprecated DO NOT USE - */ -@Deprecated -public class OpenVRFilter extends Filter { - - private Mesh distortionMesh; - - private VRApplication application = null; - - /** - * DO NOT USE - * @param application the VR application. - */ - public OpenVRFilter(VRApplication application) { - this.application = application; - } - - /** - * DO NOT USE - * @return the distortion mesh. - */ - public Mesh getDistortionMesh() { - return distortionMesh; - } - - @Override - protected void initFilter(AssetManager manager, RenderManager renderManager, ViewPort vp, int w, int h) { - material = new Material(manager, "Common/MatDefs/VR/OpenVR.j3md"); - configureDistortionMesh(); - } - - @Override - protected Material getMaterial() { - return material; - - } - - @Override - protected void preFrame(float tpf) { - super.preFrame(tpf); - } - - @Override - protected void postFrame(RenderManager renderManager, ViewPort viewPort, FrameBuffer prevFilterBuffer, FrameBuffer sceneBuffer) { - super.postFrame(renderManager, viewPort, prevFilterBuffer, sceneBuffer); - } - - @Override - protected void postFilter(Renderer r, FrameBuffer buffer) { - super.postFilter(r, buffer); - } - - /* - function converted from: - https://github.com/ValveSoftware/openvr/blob/master/samples/hellovr_opengl/hellovr_opengl_main.cpp#L1335 - */ - private void configureDistortionMesh() { - float m_iLensGridSegmentCountH = 43, m_iLensGridSegmentCountV = 43; - - float w = 1f / m_iLensGridSegmentCountH - 1f; - float h = 1f / m_iLensGridSegmentCountV - 1f; - - float u, v; - - distortionMesh = new Mesh(); - float verts[] = new float[(int)(m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 3]; - - float texcoordR[] = new float[(int)(m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2]; - float texcoordG[] = new float[(int)(m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2]; - float texcoordB[] = new float[(int)(m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2]; - - int vertPos = 0, coordPos = 0; - - //left eye distortion verts - float Xoffset = -1f; - for( int y=0; y vIndices; - int[] indices = new int[(int)((m_iLensGridSegmentCountV - 1) * (m_iLensGridSegmentCountH - 1)) * 6]; - int indexPos = 0; - int a,b,c,d; - - int offset = 0; - for( int y=0; y + */ +public class TestInitHmd { + + public static void main(String... args){ + testInitHmd(); + } + + public static void testInitHmd(){ + VREnvironment environment = new VREnvironment(new AppSettings(true)); + environment.initialize(); + OpenVR openVr = (OpenVR) environment.getVRHardware(); + System.out.println(openVr.getName()); + + openVr.updatePose(); + + openVr.destroy(); + + } +} diff --git a/jme3-vr/src/main/resources/Common/ShaderLib/InstanceVR.glsllib b/jme3-vr/src/main/resources/Common/ShaderLib/InstanceVR.glsllib index 9294488af4..993495c843 100644 --- a/jme3-vr/src/main/resources/Common/ShaderLib/InstanceVR.glsllib +++ b/jme3-vr/src/main/resources/Common/ShaderLib/InstanceVR.glsllib @@ -1,121 +1,126 @@ -// Instancing GLSL library. Modified for VR use. -// -// When the INSTANCING define is set in the shader, -// all global matrices are replaced with "instanced" versions. -// One exception is g_NormalMatrix which becomes unusable, -// instead the function ApplyNormalTransform is used to transform -// the normal and tangent vectors into world view space. - -// The world matrix and normal transform quaternion need to be passed -// as vertex attributes "inWorldMatrix" and "inNormalRotationQuaternion" -// respectively. -// The VertexBuffers for those two attributes -// need to be configured into instanced mode (VertexBuffer.setInstanced(true)). -// - inWorldMatrix should have 12 * numInstances floats. -// - inNormalRotationQuaternion should have 4 * numInstances. -// Thus, instancing data occupies 4 vertex attributes (16 / 4 = 4). -// -// The GL_ARB_draw_instanced and GL_ARB_instanced_arrays extensions -// are required (OGL 3.3). - -#if defined INSTANCING - -uniform mat4 g_ViewProjectionMatrix; -uniform mat4 g_ViewMatrix; -uniform mat4 m_RightEyeViewProjectionMatrix; - -// World Matrix + Normal Rotation Quaternion. -// The World Matrix is the top 3 rows - -// since the bottom row is always 0,0,0,1 for this transform. -// The bottom row is the transpose of the inverse of WorldView Transform -// as a quaternion. i.e. g_NormalMatrix converted to a quaternion. -// -// Using a quaternion instead of a matrix here allows saving approximately -// 2 vertex attributes which now can be used for additional per-vertex data. -attribute mat4 inInstanceData; - -// Extract the world matrix out of the instance data, leaving out the -// quaternion at the end. -mat4 worldMatrix = mat4(vec4(inInstanceData[0].xyz, 0.0), - vec4(inInstanceData[1].xyz, 0.0), - vec4(inInstanceData[2].xyz, 0.0), - vec4(inInstanceData[3].xyz, 1.0)); - -vec4 TransformWorld(vec4 position) -{ - return (worldMatrix * position); -} - -vec4 TransformWorldView(vec4 position) -{ - return g_ViewMatrix * TransformWorld(position); -} - -vec4 TransformWorldViewProjection(vec4 position) -{ - return g_ViewProjectionMatrix * TransformWorld(position); -} - -// VR specific variables -const float EyeOffsetScale[2] = float[](-0.5, 0.5); -const vec4 EyeClipEdge[2] = vec4[](vec4(-1.0,0.0,0.0,1.0), vec4(1.0,0.0,0.0,1.0)); -out float gl_ClipDistance[1]; -in int gl_InstanceID; - -vec4 TransformWorldViewProjectionVR(vec4 position) -{ - vec4 clipPos = (gl_InstanceID == 0 ? g_ViewProjectionMatrix : m_RightEyeViewProjectionMatrix) * TransformWorld(position); - gl_ClipDistance[0] = dot(clipPos, EyeClipEdge[gl_InstanceID]); - clipPos.x *= 0.5; // shrink to half of the screen - clipPos.x += EyeOffsetScale[gl_InstanceID] * clipPos.w; // scoot left or right. - return clipPos; -} - -vec3 TransformNormal(vec3 vec) -{ - vec4 quat = vec4(inInstanceData[0].w, inInstanceData[1].w, - inInstanceData[2].w, inInstanceData[3].w); - - vec3 worldNormal = vec + vec3(2.0) * cross(cross(vec, quat.xyz) + vec3(quat.w) * vec, quat.xyz); - - return (g_ViewMatrix * vec4(worldNormal, 0.0)).xyz; -} - -// Prevent user from using g_** matrices which will have invalid data in this case. -#define g_WorldMatrix use_TransformWorld_not_gWorldMatrix -#define g_WorldViewMatrix use_TransformWorldView_not_gWorldMatrix -#define g_WorldViewProjectionMatrix use_TransformWorldViewProjectionVR_not_gWorldViewProjectionMatrix -#define g_NormalMatrix use_TransformNormal_not_gNormalMatrix - -#else - -uniform mat4 g_WorldMatrix; -uniform mat4 g_WorldViewMatrix; -uniform mat4 g_WorldViewProjectionMatrix; -uniform mat3 g_NormalMatrix; - -vec4 TransformWorld(vec4 position) -{ - return g_WorldMatrix * position; -} - -vec4 TransformWorldView(vec4 position) -{ - return g_WorldViewMatrix * position; -} - -vec4 TransformWorldViewProjection(vec4 position) -{ - return g_WorldViewProjectionMatrix * position; -} - -vec4 TransformWorldViewProjectionVR(vec4 position) -{ - return g_WorldViewProjectionMatrix * position; -} - -vec3 TransformNormal(vec3 normal) { - return g_NormalMatrix * normal; -} - -#endif +// Instancing GLSL library. Modified for VR use. +// +// When the INSTANCING define is set in the shader, +// all global matrices are replaced with "instanced" versions. +// One exception is g_NormalMatrix which becomes unusable, +// instead the function ApplyNormalTransform is used to transform +// the normal and tangent vectors into world view space. + +// The world matrix and normal transform quaternion need to be passed +// as vertex attributes "inWorldMatrix" and "inNormalRotationQuaternion" +// respectively. +// The VertexBuffers for those two attributes +// need to be configured into instanced mode (VertexBuffer.setInstanced(true)). +// - inWorldMatrix should have 12 * numInstances floats. +// - inNormalRotationQuaternion should have 4 * numInstances. +// Thus, instancing data occupies 4 vertex attributes (16 / 4 = 4). +// +// The GL_ARB_draw_instanced and GL_ARB_instanced_arrays extensions +// are required (OGL 3.3). + +#if defined INSTANCING + +uniform mat4 g_ViewProjectionMatrix; +uniform mat4 g_ViewMatrix; +uniform mat4 m_RightEyeViewProjectionMatrix; + +// World Matrix + Normal Rotation Quaternion. +// The World Matrix is the top 3 rows - +// since the bottom row is always 0,0,0,1 for this transform. +// The bottom row is the transpose of the inverse of WorldView Transform +// as a quaternion. i.e. g_NormalMatrix converted to a quaternion. +// +// Using a quaternion instead of a matrix here allows saving approximately +// 2 vertex attributes which now can be used for additional per-vertex data. +attribute mat4 inInstanceData; + +// Extract the world matrix out of the instance data, leaving out the +// quaternion at the end. +mat4 worldMatrix = mat4(vec4(inInstanceData[0].xyz, 0.0), + vec4(inInstanceData[1].xyz, 0.0), + vec4(inInstanceData[2].xyz, 0.0), + vec4(inInstanceData[3].xyz, 1.0)); + +vec4 TransformWorld(vec4 position) +{ + return (worldMatrix * position); +} + +vec4 TransformWorldView(vec4 position) +{ + return g_ViewMatrix * TransformWorld(position); +} + +vec4 TransformWorldViewProjection(vec4 position) +{ + return g_ViewProjectionMatrix * TransformWorld(position); +} + +// VR specific variables +const float EyeOffsetScale[2] = float[](-0.5, 0.5); +const vec4 EyeClipEdge[2] = vec4[](vec4(-1.0,0.0,0.0,1.0), vec4(1.0,0.0,0.0,1.0)); +out float gl_ClipDistance[1]; +in int gl_InstanceID; + +vec4 TransformWorldViewProjectionVR(vec4 position) +{ + vec4 clipPos = (gl_InstanceID == 0 ? g_ViewProjectionMatrix : m_RightEyeViewProjectionMatrix) * TransformWorld(position); + gl_ClipDistance[0] = dot(clipPos, EyeClipEdge[gl_InstanceID]); + clipPos.x *= 0.5; // shrink to half of the screen + clipPos.x += EyeOffsetScale[gl_InstanceID] * clipPos.w; // scoot left or right. + return clipPos; +} + +vec3 TransformNormal(vec3 vec) +{ + vec4 quat = vec4(inInstanceData[0].w, inInstanceData[1].w, + inInstanceData[2].w, inInstanceData[3].w); + + vec3 worldNormal = vec + vec3(2.0) * cross(cross(vec, quat.xyz) + vec3(quat.w) * vec, quat.xyz); + + return (g_ViewMatrix * vec4(worldNormal, 0.0)).xyz; +} + +// Prevent user from using g_** matrices which will have invalid data in this case. +#define g_WorldMatrix use_TransformWorld_not_gWorldMatrix +#define g_WorldViewMatrix use_TransformWorldView_not_gWorldMatrix +#define g_WorldViewProjectionMatrix use_TransformWorldViewProjectionVR_not_gWorldViewProjectionMatrix +#define g_NormalMatrix use_TransformNormal_not_gNormalMatrix + +#else + +uniform mat4 g_WorldMatrix; +uniform mat4 g_WorldViewMatrix; +uniform mat4 g_WorldViewProjectionMatrix; +uniform mat3 g_NormalMatrix; +uniform mat3 g_WorldNormalMatrix; + +vec4 TransformWorld(vec4 position) +{ + return g_WorldMatrix * position; +} + +vec4 TransformWorldView(vec4 position) +{ + return g_WorldViewMatrix * position; +} + +vec4 TransformWorldViewProjection(vec4 position) +{ + return g_WorldViewProjectionMatrix * position; +} + +vec4 TransformWorldViewProjectionVR(vec4 position) +{ + return g_WorldViewProjectionMatrix * position; +} + +vec3 TransformNormal(vec3 normal) { + return g_NormalMatrix * normal; +} + +vec3 TransformWorldNormal(vec3 normal) { + return normalize(g_WorldNormalMatrix * normal); +} + +#endif