feat: add linux vr support (#481)
This commit is contained in:
parent
a6419c1187
commit
4c45b4b4a2
|
@ -1,15 +1,16 @@
|
|||
### Virtual Reality Overview
|
||||
|
||||
Virtual reality is currently only supported on Windows 10/11 on the HTC Vive and Oculus Rift/Quest (with link).
|
||||
Virtual reality is currently supported on Windows 10/11 on the HTC Vive and Oculus Rift/Quest (with link), and on Linux with the HTC Vive (there is no linux driver for Oculus).
|
||||
|
||||
Linux support is planned, but the HTC Vive Pro Eye tracking driver is not available for Linux, and currently aysnchronous projection/motion smoothing is not supported on the Nvidia proprietary drivers on linux.
|
||||
The HTC Vive Pro Eye tracking driver is not available for Linux. You must have the latest Nvidia driver 470.XX and SteamVR 1.19.7 as asynchronous reprojection (a form of motion smoothing necessary for a usable VR experience) was only added in mid 2021.
|
||||
|
||||
### Setup
|
||||
1. Set up the HTC Vive VR hardware according to the [setup guide](https://support.steampowered.com/steamvr/HTC_Vive/)
|
||||
|
||||
2. (optional) if you plan to use eye tracking on Windows, create a [vive developer account](https://hub.vive.com/sso/login) then download and install the [SRAnipal runtime](https://developer.vive.com/resources/vive-sense/sdk/vive-eye-and-facial-tracking-sdk/). Note you should [calibrate](https://developer.vive.com/us/support/sdk/category_howto/how-to-calibrate-eye-tracking.html) the Vive eye tracker before each recording session.
|
||||
|
||||
3. Ensure you have installed iGibson according to the installation [instructions](http://svl.stanford.edu/igibson/docs/installation.html#installation-method)
|
||||
3. Ensure you have installed iGibson according to the installation [instructions](http://svl.stanford.edu/igibson/docs/installation.html#installation-method).
|
||||
* Note: On Windows VR support is eanbled by default. On Linux, you must install with an additional environmental variable `USE_VR=True pip install -e .`. You must also have addition development headers installed: on Ubuntu `sudo apt install xorg-dev` and on Centos/Fedora: `sudo dnf install libXinerama-devel libXi-devel libXrandr-devel libXcursor-devel`.
|
||||
|
||||
### VR examples
|
||||
|
||||
|
|
|
@ -2,6 +2,8 @@ cmake_minimum_required(VERSION 2.8.12)
|
|||
project(CppMeshRenderer)
|
||||
|
||||
set(USE_GLAD TRUE)
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS TRUE)
|
||||
set(OpenGL_GL_PREFERENCE GLVND)
|
||||
|
||||
if (MAC_PLATFORM)
|
||||
set(USE_CUDA FALSE)
|
||||
|
@ -12,6 +14,10 @@ elseif (WIN32)
|
|||
set(USE_GLFW TRUE)
|
||||
# iGibson on Windows is always in VR mode
|
||||
set(USE_VR TRUE)
|
||||
elseif (UNIX AND USE_VR)
|
||||
set(USE_CUDA TRUE)
|
||||
set(USE_GLFW TRUE)
|
||||
set(USE_VR TRUE)
|
||||
else ()
|
||||
set(USE_CUDA TRUE)
|
||||
set(USE_GLFW FALSE)
|
||||
|
@ -57,53 +63,68 @@ endif ()
|
|||
|
||||
if (USE_VR)
|
||||
# Find OpenVR
|
||||
set(WINDOWS_PATH_SUFFIXES win64 Win64 x64)
|
||||
set(OPENVR_DIR openvr)
|
||||
find_library(OPENVR_LIBRARIES
|
||||
NAMES
|
||||
openvr_api
|
||||
PATHS
|
||||
"${OPENVR_DIR}/bin"
|
||||
"${OPENVR_DIR}/lib"
|
||||
PATH_SUFFIXES
|
||||
${WINDOWS_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH
|
||||
NO_CMAKE_FIND_ROOT_PATH
|
||||
)
|
||||
set(OPENVR_INCLUDE_DIR "${OPENVR_DIR}/headers")
|
||||
include_directories("${OPENVR_INCLUDE_DIR}")
|
||||
|
||||
# Find SRAnipal
|
||||
set(SRANI_DIR sranipal)
|
||||
find_library(SRANI_LIBRARIES
|
||||
NAMES
|
||||
SRAnipal
|
||||
PATHS
|
||||
"${SRANI_DIR}/lib"
|
||||
NO_DEFAULT_PATH
|
||||
NO_CMAKE_FIND_ROOT_PATH
|
||||
)
|
||||
include_directories("${SRANI_DIR}/include")
|
||||
if (WIN32)
|
||||
set(PATH_SUFFIXES win64 Win64 x64)
|
||||
# Find SRAnipal
|
||||
set(SRANI_DIR sranipal)
|
||||
find_library(SRANI_LIBRARIES
|
||||
NAMES
|
||||
SRAnipal
|
||||
PATHS
|
||||
"${SRANI_DIR}/lib"
|
||||
NO_DEFAULT_PATH
|
||||
NO_CMAKE_FIND_ROOT_PATH
|
||||
)
|
||||
include_directories("${SRANI_DIR}/include")
|
||||
else ()
|
||||
set(PATH_SUFFIXES linux64 Linux64 x64)
|
||||
endif()
|
||||
|
||||
find_library(OPENVR_LIBRARIES
|
||||
NAMES
|
||||
openvr_api
|
||||
PATHS
|
||||
"${OPENVR_DIR}/bin"
|
||||
"${OPENVR_DIR}/lib"
|
||||
PATH_SUFFIXES
|
||||
${PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH
|
||||
NO_CMAKE_FIND_ROOT_PATH
|
||||
)
|
||||
|
||||
endif()
|
||||
|
||||
add_library(tinyobjloader MODULE cpp/tinyobjloader/tiny_obj_loader.cc cpp/tinyobjloader/tiny_obj_loader_decrypt.cc cpp/tinyobjloader/bindings.cc)
|
||||
if (USE_VR)
|
||||
add_library(VRRendererContext MODULE glad/gl.cpp cpp/vr_mesh_renderer.cpp cpp/glfw_mesh_renderer.cpp cpp/mesh_renderer.cpp)
|
||||
else()
|
||||
if (USE_CUDA)
|
||||
find_package(CUDA REQUIRED)
|
||||
set(CUDA_LIBRARIES PUBLIC ${CUDA_LIBRARIES})
|
||||
cuda_add_library(EGLRendererContext MODULE glad/egl.cpp glad/gl.cpp cpp/mesh_renderer.cpp cpp/egl_mesh_renderer.cpp)
|
||||
else ()
|
||||
add_library(EGLRendererContext MODULE glad/egl.cpp glad/gl.cpp cpp/mesh_renderer.cpp cpp/egl_mesh_renderer.cpp)
|
||||
endif ()
|
||||
if (USE_GLFW)
|
||||
add_library(GLFWRendererContext MODULE glad/gl.cpp cpp/glfw_mesh_renderer.cpp cpp/mesh_renderer.cpp)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (USE_CUDA)
|
||||
find_package(CUDA REQUIRED)
|
||||
set(CUDA_LIBRARIES PUBLIC ${CUDA_LIBRARIES})
|
||||
cuda_add_library(EGLRendererContext MODULE glad/egl.cpp glad/gl.cpp cpp/mesh_renderer.cpp cpp/egl_mesh_renderer.cpp)
|
||||
else ()
|
||||
add_library(EGLRendererContext MODULE glad/egl.cpp glad/gl.cpp cpp/mesh_renderer.cpp cpp/egl_mesh_renderer.cpp)
|
||||
endif ()
|
||||
|
||||
if (USE_GLFW)
|
||||
add_library(GLFWRendererContext MODULE glad/gl.cpp cpp/glfw_mesh_renderer.cpp cpp/mesh_renderer.cpp)
|
||||
endif ()
|
||||
|
||||
if (USE_VR)
|
||||
target_link_libraries(VRRendererContext PRIVATE pybind11::module ${CMAKE_DL_LIBS} glfw ${GLFW_LIBRARIES} ${OPENGL_LIBRARIES} ${OPENVR_LIBRARIES} ${SRANI_LIBRARIES} cryptopp-static)
|
||||
if (WIN32)
|
||||
target_link_libraries(VRRendererContext PRIVATE pybind11::module ${CMAKE_DL_LIBS} glfw ${GLFW_LIBRARIES} ${OPENGL_LIBRARIES} ${OPENVR_LIBRARIES} ${SRANI_LIBRARIES} cryptopp-static)
|
||||
else ()
|
||||
target_link_libraries(VRRendererContext PRIVATE pybind11::module ${CMAKE_DL_LIBS} glfw ${GLFW_LIBRARIES} ${OPENGL_LIBRARIES} ${OPENVR_LIBRARIES} ${CUDA_LIBRARIES} cryptopp-static)
|
||||
target_link_libraries(EGLRendererContext PRIVATE pybind11::module dl pthread)
|
||||
# if (USE_GLFW)
|
||||
target_link_libraries(GLFWRendererContext PRIVATE pybind11::module dl glfw ${GLFW_LIBRARIES} ${OPENGL_LIBRARIES})
|
||||
# endif ()
|
||||
endif ()
|
||||
else ()
|
||||
if (USE_GLAD)
|
||||
target_link_libraries(EGLRendererContext PRIVATE pybind11::module dl pthread cryptopp-static)
|
||||
|
@ -135,7 +156,7 @@ endif()
|
|||
set_target_properties(tinyobjloader PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}"
|
||||
SUFFIX "${PYTHON_MODULE_EXTENSION}")
|
||||
|
||||
if (NOT USE_VR)
|
||||
if (UNIX)
|
||||
add_executable(query_devices glad/egl.cpp glad/gl.cpp cpp/query_devices.cpp)
|
||||
add_executable(test_device glad/egl.cpp glad/gl.cpp cpp/test_device.cpp)
|
||||
if (USE_GLAD)
|
||||
|
|
|
@ -2,9 +2,7 @@
|
|||
#define GLFW_MESH_RENDERER_HEADER
|
||||
|
||||
#include "mesh_renderer.h"
|
||||
#ifdef WIN32
|
||||
#include <GLFW/glfw3.h>
|
||||
#endif
|
||||
|
||||
|
||||
class GLFWRendererContext: public MeshRendererContext {
|
||||
|
|
|
@ -89,7 +89,7 @@ public:
|
|||
cudaGraphicsResource* cuda_res[MAX_NUM_RESOURCES];
|
||||
#endif
|
||||
|
||||
int init() {};
|
||||
void init() {};
|
||||
|
||||
void release() {};
|
||||
|
||||
|
@ -254,4 +254,4 @@ public:
|
|||
};
|
||||
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
#include "vr_mesh_renderer.h"
|
||||
|
||||
#ifdef WIN32
|
||||
#include "SRanipal.h"
|
||||
#include "SRanipal_Eye.h"
|
||||
#include "SRanipal_Enums.h"
|
||||
#endif
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
@ -159,7 +162,7 @@ py::list VRRendererContext::getDeviceCoordinateSystem(char* device) {
|
|||
// TIMELINE: Call after getDataForVRDevice, since this relies on knowing latest HMD transform
|
||||
py::list VRRendererContext::getEyeTrackingData() {
|
||||
py::list eyeData;
|
||||
|
||||
#ifdef WIN32
|
||||
// Transform data into Gibson coordinate system before returning to user
|
||||
glm::vec3 gibOrigin(vrToGib * glm::vec4(eyeTrackingData.origin, 1.0));
|
||||
glm::vec3 gibDir(vrToGib * glm::vec4(eyeTrackingData.dir, 1.0));
|
||||
|
@ -185,10 +188,29 @@ py::list VRRendererContext::getEyeTrackingData() {
|
|||
eyeData.append(dir);
|
||||
eyeData.append(eyeTrackingData.leftPupilDiameter);
|
||||
eyeData.append(eyeTrackingData.rightPupilDiameter);
|
||||
|
||||
// Return dummy data with false validity if eye tracking is not enabled (on non-Windows system)
|
||||
#else
|
||||
py::list dummy_origin, dummy_dir;
|
||||
float dummy_diameter_l, dummy_diameter_r;
|
||||
eyeData.append(false);
|
||||
eyeData.append(dummy_origin);
|
||||
eyeData.append(dummy_dir);
|
||||
eyeData.append(dummy_diameter_l);
|
||||
eyeData.append(dummy_diameter_r);
|
||||
#endif
|
||||
return eyeData;
|
||||
}
|
||||
|
||||
// Returns whether the current VR system supports eye tracking
|
||||
bool VRRendererContext::hasEyeTrackingSupport() {
|
||||
#ifdef WIN32
|
||||
return ViveSR::anipal::Eye::IsViveProEye();
|
||||
// Non-windows OS always have eye tracking disabled
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Gets the VR offset vector in form x, y, z
|
||||
// TIMELINE: Can call any time
|
||||
py::list VRRendererContext::getVROffset() {
|
||||
|
@ -202,11 +224,6 @@ py::list VRRendererContext::getVROffset() {
|
|||
return offset;
|
||||
}
|
||||
|
||||
// Returns whether the current VR system supports eye tracking
|
||||
bool VRRendererContext::hasEyeTrackingSupport() {
|
||||
return ViveSR::anipal::Eye::IsViveProEye();
|
||||
}
|
||||
|
||||
// Initialize the VR system and compositor
|
||||
// TIMELINE: Call during init of renderer, before height/width are set
|
||||
void VRRendererContext::initVR(bool useEyeTracking) {
|
||||
|
@ -238,12 +255,15 @@ void VRRendererContext::initVR(bool useEyeTracking) {
|
|||
// No VR system offset by default
|
||||
vrOffsetVec = glm::vec3(0, 0, 0);
|
||||
|
||||
// Only activate eye tracking on Windows
|
||||
#ifdef WIN32
|
||||
// Set eye tracking boolean
|
||||
this->useEyeTracking = useEyeTracking;
|
||||
if (useEyeTracking) {
|
||||
shouldShutDownEyeTracking = false;
|
||||
initAnipal();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Polls for VR events, such as button presses
|
||||
|
@ -353,10 +373,12 @@ void VRRendererContext::releaseVR() {
|
|||
vr::VR_Shutdown();
|
||||
m_pHMD = NULL;
|
||||
|
||||
#ifdef WIN32
|
||||
if (this->useEyeTracking) {
|
||||
this->shouldShutDownEyeTracking = true;
|
||||
eyeTrackingThread->join();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Sets the offset of the VR headset
|
||||
|
@ -410,8 +432,8 @@ void VRRendererContext::updateVRData() {
|
|||
hmdData.index = idx;
|
||||
hmdData.isValidData = true;
|
||||
hmdActualPos = getPositionFromSteamVRMatrix(transformMat);
|
||||
|
||||
setSteamVRMatrixPos(hmdActualPos + vrOffsetVec, transformMat);
|
||||
glm::vec3 steamVrMatrixPos = hmdActualPos + vrOffsetVec;
|
||||
setSteamVRMatrixPos(steamVrMatrixPos, transformMat);
|
||||
|
||||
hmdData.deviceTransform = convertSteamVRMatrixToGlmMat4(transformMat);
|
||||
hmdData.devicePos = getPositionFromSteamVRMatrix(transformMat);
|
||||
|
@ -447,7 +469,8 @@ void VRRendererContext::updateVRData() {
|
|||
leftControllerData.isValidData = getControllerDataResult;
|
||||
|
||||
glm::vec3 leftControllerPos = getPositionFromSteamVRMatrix(transformMat);
|
||||
setSteamVRMatrixPos(leftControllerPos + vrOffsetVec, transformMat);
|
||||
glm::vec3 steamVrMatrixPos = leftControllerPos + vrOffsetVec;
|
||||
setSteamVRMatrixPos(steamVrMatrixPos, transformMat);
|
||||
|
||||
leftControllerData.deviceTransform = convertSteamVRMatrixToGlmMat4(transformMat);
|
||||
leftControllerData.devicePos = getPositionFromSteamVRMatrix(transformMat);
|
||||
|
@ -463,7 +486,9 @@ void VRRendererContext::updateVRData() {
|
|||
rightControllerData.isValidData = getControllerDataResult;
|
||||
|
||||
glm::vec3 rightControllerPos = getPositionFromSteamVRMatrix(transformMat);
|
||||
setSteamVRMatrixPos(rightControllerPos + vrOffsetVec, transformMat);
|
||||
glm::vec3 steamVrMatrixPos = rightControllerPos + vrOffsetVec;
|
||||
|
||||
setSteamVRMatrixPos(steamVrMatrixPos, transformMat);
|
||||
|
||||
rightControllerData.deviceTransform = convertSteamVRMatrixToGlmMat4(transformMat);
|
||||
rightControllerData.devicePos = getPositionFromSteamVRMatrix(transformMat);
|
||||
|
@ -481,7 +506,8 @@ void VRRendererContext::updateVRData() {
|
|||
|
||||
// Apply VR offset to tracker position
|
||||
glm::vec3 trackerPos = getPositionFromSteamVRMatrix(transformMat);
|
||||
setSteamVRMatrixPos(trackerPos + vrOffsetVec, transformMat);
|
||||
glm::vec3 steamVrMatrixPos = trackerPos + vrOffsetVec;
|
||||
setSteamVRMatrixPos(steamVrMatrixPos, transformMat);
|
||||
|
||||
if (this->trackerNamesToData.find(serial) != this->trackerNamesToData.end()) {
|
||||
this->trackerNamesToData[serial].index = idx;
|
||||
|
@ -641,6 +667,7 @@ glm::vec3 VRRendererContext::getVec3ColFromMat4(int col_index, glm::mat4& mat) {
|
|||
return v;
|
||||
}
|
||||
|
||||
#ifdef WIN32
|
||||
// Initializes the SRAnipal runtime, if the user selects this option
|
||||
void VRRendererContext::initAnipal() {
|
||||
if (!ViveSR::anipal::Eye::IsViveProEye()) {
|
||||
|
@ -720,6 +747,7 @@ void VRRendererContext::pollAnipal() {
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Print string version of mat4 for debugging purposes
|
||||
void VRRendererContext::printMat4(glm::mat4& m) {
|
||||
|
@ -873,8 +901,8 @@ PYBIND11_MODULE(VRRendererContext, m) {
|
|||
pymodule.def("getDataForVRTracker", &VRRendererContext::getDataForVRTracker);
|
||||
pymodule.def("getDeviceCoordinateSystem", &VRRendererContext::getDeviceCoordinateSystem);
|
||||
pymodule.def("getEyeTrackingData", &VRRendererContext::getEyeTrackingData);
|
||||
pymodule.def("getVROffset", &VRRendererContext::getVROffset);
|
||||
pymodule.def("hasEyeTrackingSupport", &VRRendererContext::hasEyeTrackingSupport);
|
||||
pymodule.def("getVROffset", &VRRendererContext::getVROffset);
|
||||
pymodule.def("initVR", &VRRendererContext::initVR);
|
||||
pymodule.def("pollVREvents", &VRRendererContext::pollVREvents);
|
||||
pymodule.def("postRenderVRForEye", &VRRendererContext::postRenderVRForEye);
|
||||
|
|
|
@ -2,9 +2,11 @@
|
|||
#define VR_MESH_RENDERER_HEADER
|
||||
|
||||
#include "glfw_mesh_renderer.h"
|
||||
#ifdef WIN32
|
||||
#include "SRanipal.h"
|
||||
#include "SRanipal_Eye.h"
|
||||
#include "SRanipal_Enums.h"
|
||||
#endif
|
||||
|
||||
#include <thread>
|
||||
|
||||
|
@ -75,12 +77,16 @@ public:
|
|||
glm::mat4 gibToVr;
|
||||
glm::mat4 vrToGib;
|
||||
|
||||
// SRAnipal variables
|
||||
bool useEyeTracking;
|
||||
bool shouldShutDownEyeTracking;
|
||||
|
||||
#ifdef WIN32
|
||||
// SRAnipal variables
|
||||
std::thread* eyeTrackingThread;
|
||||
ViveSR::anipal::Eye::EyeData eyeData;
|
||||
#endif
|
||||
|
||||
int result;
|
||||
bool shouldShutDownEyeTracking;
|
||||
|
||||
// Struct storing eye data for SR anipal - we only return origin and direction in world space
|
||||
// As most users will want to use this ray to query intersection or something similar
|
||||
|
|
|
@ -143,10 +143,23 @@ class MeshRenderer(object):
|
|||
self.rendering_settings.show_glfw_window,
|
||||
rendering_settings.fullscreen,
|
||||
)
|
||||
else:
|
||||
elif self.platform == "Linux" and self.__class__.__name__ == "MeshRendererVR":
|
||||
from igibson.render.mesh_renderer import VRRendererContext
|
||||
|
||||
self.r = VRRendererContext.VRRendererContext(
|
||||
width,
|
||||
height,
|
||||
int(self.rendering_settings.glfw_gl_version[0]),
|
||||
int(self.rendering_settings.glfw_gl_version[1]),
|
||||
self.rendering_settings.show_glfw_window,
|
||||
rendering_settings.fullscreen,
|
||||
)
|
||||
elif self.platform == "Linux":
|
||||
from igibson.render.mesh_renderer import EGLRendererContext
|
||||
|
||||
self.r = EGLRendererContext.EGLRendererContext(width, height, device)
|
||||
else:
|
||||
Exception("Unsupported platform and renderer combination")
|
||||
|
||||
self.r.init()
|
||||
|
||||
|
|
|
@ -261,7 +261,10 @@ class MeshRendererVR(MeshRenderer):
|
|||
self.width = 1296
|
||||
self.height = 1440
|
||||
super().__init__(
|
||||
width=self.width, height=self.height, rendering_settings=self.vr_rendering_settings, simulator=simulator
|
||||
width=self.width,
|
||||
height=self.height,
|
||||
rendering_settings=self.vr_rendering_settings,
|
||||
simulator=simulator,
|
||||
)
|
||||
|
||||
# Rename self.r to self.vrsys
|
||||
|
|
|
@ -398,6 +398,7 @@ class IGLogWriter(object):
|
|||
vr_pos_data.extend(p.getConstraintState(self.vr_robot.parts["body"].movement_cid))
|
||||
self.data_map["vr"]["vr_device_data"]["vr_position_data"][self.frame_counter, ...] = np.array(vr_pos_data)
|
||||
|
||||
# On systems where eye tracking is not supported, we get dummy data and a guaranteed False validity reading
|
||||
is_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = self.sim.get_eye_tracking_data()
|
||||
if is_valid:
|
||||
eye_data_list = [is_valid]
|
||||
|
|
4
setup.py
4
setup.py
|
@ -1,4 +1,5 @@
|
|||
import codecs
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import re
|
||||
|
@ -87,6 +88,9 @@ class CMakeBuild(build_ext):
|
|||
else:
|
||||
cmake_args += ["-DMAC_PLATFORM=FALSE"]
|
||||
|
||||
if os.getenv("USE_VR"):
|
||||
cmake_args += ["-DUSE_VR=TRUE"]
|
||||
|
||||
cfg = "Debug" if self.debug else "Release"
|
||||
build_args = ["--config", cfg]
|
||||
|
||||
|
|
Loading…
Reference in New Issue