Uploaded new VR hand assets, fixed VR body speed issues, made modifications to pbr/non-pbr demos and updated data saving to work with new VR changes. In addition, tested action saving and put both state saving and action saving demos into the data_save_replay folder.
This commit is contained in:
parent
c431129721
commit
eb4bda8480
|
@ -147,6 +147,7 @@ Run vr_demo_save to save a demo to a log file, and vr_demo_replay to run it agai
|
|||
Please see the demos and gibson2/utils/vr_logging.py for more details on the data saving/replay system.
|
||||
|
||||
To use the VR hand asset, please download and unzip the asset and put it into assets/models under the folder name 'vr_hand'.
|
||||
The asset is stored in a drive folder and is entitled vr_hand.zip.
|
||||
Link to VR hand zip: https://drive.google.com/drive/folders/1zm3ZpPc7yHwyALEGfsb0_NybFMvV81Um?usp=sharing
|
||||
|
||||
Have fun in VR!
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
""" VR replay demo using simplified VR playground code.
|
||||
|
||||
This demo runs the log saved at vr_logs/vr_demo_save.h5"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogReader
|
||||
from gibson2.utils.vr_utils import translate_vr_position_by_vecs
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
|
||||
# Initialize simulator
|
||||
s = Simulator(mode='vr', timestep = 1/90.0, optimized_renderer=optimize, vrFullscreen=fullscreen, vrMode=False)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
rHand = VrHand()
|
||||
s.import_object(rHand)
|
||||
# Note: We do not call set_start_state, as this would add in a constraint that messes up data replay
|
||||
|
||||
# Add playground objects to the scene
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Note: the VRLogReader plays back the demo at the recorded fps, so there is not need to set this
|
||||
vr_log_path = 'vr_logs/vr_demo_save.h5'
|
||||
vr_reader = VRLogReader(log_filepath=vr_log_path)
|
||||
|
||||
while vr_reader.get_data_left_to_read():
|
||||
# Note: Please see the code in gibson2/utils/vr_logging.py to extract custom
|
||||
# data for experiments
|
||||
vr_reader.read_frame(s, fullReplay=True)
|
||||
|
||||
s.step(shouldPrintTime=False)
|
|
@ -1,19 +1,28 @@
|
|||
""" VR replay demo using simplified VR playground code.
|
||||
""" VR saving demo using simplified VR playground code.
|
||||
|
||||
This demo runs the log saved at vr_logs/vr_demo_save.h5"""
|
||||
This demo replays the actions of certain objects in the scene.
|
||||
|
||||
Note: This demo does not use PBR so it can be supported on a wide range of devices, including Mac OS.
|
||||
|
||||
This demo saves to vr_logs/vr_demo_save_states.h5
|
||||
If you would like to replay the data, please run
|
||||
vr_demo_replay using this file path as an input.
|
||||
|
||||
Run this demo if you would like to save your own data."""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrHand
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogReader
|
||||
from gibson2.utils.vr_utils import translate_vr_position_by_vecs
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
|
||||
|
@ -21,21 +30,49 @@ sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
|||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Enables the VR collision body
|
||||
enable_vr_body = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad-based movement
|
||||
movement_speed = 0.03
|
||||
# Whether we should hide a mustard bottle when the menu button is presed
|
||||
hide_mustard_on_press = True
|
||||
|
||||
# Initialize simulator
|
||||
s = Simulator(mode='vr', timestep = 1/90.0, optimized_renderer=optimize, vrFullscreen=fullscreen, vrMode=False)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0,
|
||||
rendering_settings=MeshRendererSettings(optimized=optimize, fullscreen=fullscreen, enable_pbr=False),
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=False)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
rHand = VrHand()
|
||||
s.import_object(rHand)
|
||||
# Note: We do not call set_start_state, as this would add in a constraint that messes up data replay
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
s.import_object(vr_body)
|
||||
# Note: we don't call init_body since we will be controlling the body directly through pos/orientation actions
|
||||
|
||||
# Add playground objects to the scene
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
|
@ -45,8 +82,10 @@ p.changeDynamics(basket.body_id, -1, mass=5)
|
|||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
mustard_list = []
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
mustard_list.append(mustard)
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
@ -54,14 +93,62 @@ for i in range(len(mass_list)):
|
|||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Note: the VRLogReader plays back the demo at the recorded fps, so there is not need to set this
|
||||
vr_log_path = 'vr_logs/vr_demo_save.h5'
|
||||
# Start user close to counter for interaction
|
||||
s.set_vr_offset([-0.5, 0.0, -0.5])
|
||||
|
||||
# State of can hiding, toggled by a menu press
|
||||
hide_mustard = False
|
||||
|
||||
# Modify this path to save to different files
|
||||
vr_log_path = 'vr_logs/vr_demo_save_actions.h5'
|
||||
vr_right_hand_action_path = 'vr_hand/right'
|
||||
vr_left_hand_action_path = 'vr_hand/left'
|
||||
vr_menu_button_action_path = 'vr_menu_button'
|
||||
vr_body_action_path = 'vr_body'
|
||||
|
||||
vr_reader = VRLogReader(log_filepath=vr_log_path)
|
||||
vr_hand_action_path = 'vr_hand'
|
||||
|
||||
# In this demo, we feed actions into the simulator and simulate
|
||||
# everything else.
|
||||
# The VR reader automatically shuts itself down and performs cleanup once the while loop has finished running
|
||||
while vr_reader.get_data_left_to_read():
|
||||
vr_hand_actions = vr_reader.read_action(vr_hand_action_path)
|
||||
print(vr_hand_actions.shape, vr_hand_actions)
|
||||
|
||||
# We set fullReplay to false so we only simulate using actions
|
||||
vr_reader.read_frame(s, fullReplay=False)
|
||||
s.step(shouldPrintTime=False)
|
||||
s.step()
|
||||
|
||||
# Contains validity [0], trans [1-3], orn [4-7], trig_frac [8], touch coordinates (x and y) [9-10]
|
||||
vr_rh_actions = vr_reader.read_action(vr_right_hand_action_path)
|
||||
vr_lh_actions = vr_reader.read_action(vr_left_hand_action_path)
|
||||
vr_menu_state = vr_reader.read_action(vr_menu_button_action_path)
|
||||
vr_body_actions = vr_reader.read_action(vr_body_action_path)
|
||||
|
||||
# Set mustard hidden state based on recorded button action
|
||||
if vr_menu_state == 1:
|
||||
s.set_hidden_state(mustard_list[2], hide=True)
|
||||
elif vr_menu_state == 0:
|
||||
s.set_hidden_state(mustard_list[2], hide=False)
|
||||
|
||||
# Move VR hands
|
||||
if vr_rh_actions[0] == 1.0:
|
||||
r_hand.move(vr_rh_actions[1:4], vr_rh_actions[4:8])
|
||||
r_hand.set_close_fraction(vr_rh_actions[8])
|
||||
|
||||
if vr_lh_actions[0] == 1.0:
|
||||
l_hand.move(vr_lh_actions[1:4], vr_lh_actions[4:8])
|
||||
l_hand.set_close_fraction(vr_lh_actions[8])
|
||||
|
||||
# Move VR body
|
||||
vr_body.set_position_orientation(vr_body_actions[0:3], vr_body_actions[3:7])
|
||||
|
||||
# Get stored eye tracking data - this is an example of how to read values that are not actions from the VRLogReader
|
||||
eye_data = vr_reader.read_value('vr/vr_eye_tracking_data')
|
||||
is_eye_data_valid = eye_data[0]
|
||||
origin = eye_data[1:4]
|
||||
direction = eye_data[4:7]
|
||||
left_pupil_diameter = eye_data[7]
|
||||
right_pupil_diameter = eye_data[8]
|
||||
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + direction[0], origin[1] + direction[1], origin[2] + direction[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
|
@ -0,0 +1,104 @@
|
|||
""" VR replay demo using simplified VR playground code.
|
||||
|
||||
This demo replay the states of all objects in their entirety, and does
|
||||
not involve any meaningful physical simulation.
|
||||
|
||||
Note: This demo does not use PBR so it can be supported on a wide range of devices, including Mac OS.
|
||||
|
||||
This demo reads logs from to vr_logs/vr_demo_save_states.h5
|
||||
If you would like to replay your own data, please run
|
||||
vr_demo_save_states and change the file path where data is recoded."""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogReader
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Enables the VR collision body
|
||||
enable_vr_body = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad-based movement
|
||||
movement_speed = 0.03
|
||||
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0,
|
||||
rendering_settings=MeshRendererSettings(optimized=optimize, fullscreen=fullscreen, enable_pbr=False),
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=False)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
s.import_object(vr_body)
|
||||
# Note: we don't call init_body for the VR body to avoid constraints interfering with the replay
|
||||
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand)
|
||||
# Note: we don't call set start state for the VR hands to avoid constraints interfering with the replay
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand)
|
||||
# Note: we don't call set start state for the VR hands to avoid constraints interfering with the replay
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
mustard_list = []
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
mustard_list.append(mustard)
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
s.set_vr_offset([-0.5, 0.0, -0.5])
|
||||
|
||||
# Note: the VRLogReader plays back the demo at the recorded fps, so there is not need to set this
|
||||
vr_log_path = 'vr_logs/vr_demo_save_states.h5'
|
||||
vr_reader = VRLogReader(log_filepath=vr_log_path)
|
||||
|
||||
# The VR reader automatically shuts itself down and performs cleanup once the while loop has finished running
|
||||
while vr_reader.get_data_left_to_read():
|
||||
# Note: Please see the code in gibson2/utils/vr_logging.py to extract custom
|
||||
# data for experiments
|
||||
vr_reader.read_frame(s, fullReplay=True)
|
||||
|
||||
s.step()
|
|
@ -1,129 +0,0 @@
|
|||
""" VR saving demo using simplified VR playground code.
|
||||
|
||||
This demo saves to vr_logs/vr_demo_save.h5
|
||||
If you would like to replay the data, please run
|
||||
vr_demo_replay using this file path as an input.
|
||||
|
||||
Run this demo if you would like to save your own data."""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogWriter
|
||||
from gibson2.utils.vr_utils import translate_vr_position_by_vecs
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
vr_mode = True
|
||||
print_fps = False
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad movement
|
||||
movement_speed = 0.01
|
||||
|
||||
# Initialize simulator
|
||||
s = Simulator(mode='vr', timestep = 1/90.0, optimized_renderer=optimize, vrFullscreen=fullscreen, vrEyeTracking=use_eye_tracking, vrMode=vr_mode)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
# This playground only uses one hand - it has enough friction to pick up some of the
|
||||
# mustard bottles
|
||||
rHand = VrHand()
|
||||
s.import_object(rHand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
rHand.set_start_state(start_pos=[0.0, 0.5, 1.5])
|
||||
|
||||
# Add playground objects to the scene
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
s.setVROffset([1.0, 0, -0.4])
|
||||
|
||||
# Modify this path to save to different files
|
||||
vr_log_path = 'vr_logs/vr_demo_save.h5'
|
||||
# Saves every 2 seconds or so (200 / 90fps is approx 2 seconds)
|
||||
vr_writer = VRLogWriter(frames_before_write=200, log_filepath=vr_log_path, profiling_mode=True)
|
||||
|
||||
# Save Vr hand transform, validity and trigger fraction
|
||||
# action->vr_hand (dataset)
|
||||
# Total size of numpy array: 1 (validity) + 3 (pos) + 4 (orn) + 1 (trig_frac) = (9,)
|
||||
vr_hand_action_path = 'vr_hand'
|
||||
vr_writer.register_action(vr_hand_action_path, (9,))
|
||||
|
||||
# Call set_up_data_storage once all actions have been registered
|
||||
vr_writer.set_up_data_storage()
|
||||
|
||||
# Main simulation loop - 20 to 30 seconds of simulation data recorded
|
||||
for i in range(210):
|
||||
# Optionally print fps during simulator step
|
||||
s.step(shouldPrintTime=print_fps)
|
||||
|
||||
rIsValid, rTrans, rRot = s.getDataForVRDevice('right_controller')
|
||||
rTrig, rTouchX, rTouchY = s.getButtonDataForController('right_controller')
|
||||
|
||||
# VR eye tracking data
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.getEyeTrackingData()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
# Get coordinate system for relative movement device
|
||||
right, _, forward = s.getDeviceCoordinateSystem(relative_movement_device)
|
||||
|
||||
# Save VR hand data
|
||||
vr_hand_data = [1.0 if rIsValid else 0.0]
|
||||
vr_hand_data.extend(rTrans)
|
||||
vr_hand_data.extend(rRot)
|
||||
vr_hand_data.append(rTrig)
|
||||
vr_hand_data = np.array(vr_hand_data)
|
||||
|
||||
vr_writer.save_action(vr_hand_action_path, vr_hand_data)
|
||||
|
||||
if rIsValid:
|
||||
rHand.move(rTrans, rRot)
|
||||
rHand.set_close_fraction(rTrig)
|
||||
s.setVROffset(translate_vr_position_by_vecs(rTouchX, rTouchY, right, forward, s.getVROffset(), movement_speed))
|
||||
|
||||
# Record this frame's data in the VRLogWriter
|
||||
vr_writer.process_frame(s)
|
||||
|
||||
# Note: always call this after the simulation is over to close the log file
|
||||
# and clean up resources used.
|
||||
vr_writer.end_log_session()
|
||||
s.disconnect()
|
|
@ -0,0 +1,220 @@
|
|||
""" VR saving demo using simplified VR playground code.
|
||||
|
||||
This demo saves the actions of certain objects as well as states. Either can
|
||||
be used to playback later in the replay demo.
|
||||
|
||||
Note: This demo does not use PBR so it can be supported on a wide range of devices, including Mac OS.
|
||||
|
||||
This demo saves to vr_logs/vr_demo_save_states.h5
|
||||
If you would like to replay the data, please run
|
||||
vr_demo_replay using this file path as an input.
|
||||
|
||||
Run this demo if you would like to save your own data."""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogWriter
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Enables the VR collision body
|
||||
enable_vr_body = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad-based movement
|
||||
movement_speed = 0.03
|
||||
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0,
|
||||
rendering_settings=MeshRendererSettings(optimized=optimize, fullscreen=fullscreen, enable_pbr=False),
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=True)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
s.import_object(vr_body)
|
||||
vr_body.init_body([0,0])
|
||||
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
mustard_list = []
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
mustard_list.append(mustard)
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
s.set_vr_offset([-0.5, 0.0, -0.5])
|
||||
|
||||
# Modify this path to save to different files
|
||||
vr_log_path = 'vr_logs/vr_demo_save_actions.h5'
|
||||
# Saves every 2 seconds or so (200 / 90fps is approx 2 seconds)
|
||||
vr_writer = VRLogWriter(frames_before_write=200, log_filepath=vr_log_path, profiling_mode=True)
|
||||
|
||||
# Register all actions. In this demo we register the following actions:
|
||||
|
||||
# Save Vr hand transform, validity and trigger fraction for each hand
|
||||
# action->vr_hand->right/left (dataset)
|
||||
# Total size of numpy array: 1 (validity) + 3 (pos) + 4 (orn) + 1 (trig_frac) + 2 (touch coordinates)= (11,)
|
||||
vr_right_hand_action_path = 'vr_hand/right'
|
||||
vr_writer.register_action(vr_right_hand_action_path, (11,))
|
||||
vr_left_hand_action_path = 'vr_hand/left'
|
||||
vr_writer.register_action(vr_left_hand_action_path, (11,))
|
||||
# Save menu button to we can replay hiding the mustard bottle
|
||||
vr_menu_button_action_path = 'vr_menu_button'
|
||||
# We will save the state - 1 is pressed, 0 is not pressed (-1 indicates no data for the given frame)
|
||||
vr_writer.register_action(vr_menu_button_action_path, (1,))
|
||||
# Save body position and orientation as an action - it is quite complicated to replay the VR body using VR data,
|
||||
# so we will just record its position and orientation as an action
|
||||
vr_body_action_path = 'vr_body'
|
||||
# Total size of numpy array: 3 (pos) + 4 (orn) = (7,)
|
||||
vr_writer.register_action(vr_body_action_path, (7,))
|
||||
|
||||
# Call set_up_data_storage once all actions have been registered (in this demo we only save states so there are none)
|
||||
# Despite having no actions, we need to call this function
|
||||
vr_writer.set_up_data_storage()
|
||||
|
||||
# Main simulation loop
|
||||
for i in range(3000):
|
||||
# We save the right controller menu press that hides/unhides the mustard - this can be replayed
|
||||
# VR button data is saved by default, so we don't need to make it an action
|
||||
# Please see utils/vr_logging.py for more details on what is saved by default for the VR system
|
||||
|
||||
# In this example, the mustard is visible until the user presses the menu button, and then is toggled
|
||||
# on/off depending on whether the menu is pressed or unpressed
|
||||
event_list = s.poll_vr_events()
|
||||
for event in event_list:
|
||||
device_type, event_type = event
|
||||
if device_type == 'right_controller':
|
||||
if event_type == 'menu_press':
|
||||
# Toggle mustard hidden state
|
||||
s.set_hidden_state(mustard_list[2], hide=True)
|
||||
vr_writer.save_action(vr_menu_button_action_path, np.array([1]))
|
||||
elif event_type == 'menu_unpress':
|
||||
s.set_hidden_state(mustard_list[2], hide=False)
|
||||
vr_writer.save_action(vr_menu_button_action_path, np.array([0]))
|
||||
|
||||
# Step the simulator - this needs to be done every frame to actually run the simulation
|
||||
s.step()
|
||||
|
||||
# VR device data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
|
||||
# VR button data
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
# Create actions and save them
|
||||
vr_right_hand_data = [1.0 if r_is_valid else 0.0]
|
||||
vr_right_hand_data.extend(r_trans)
|
||||
vr_right_hand_data.extend(r_rot)
|
||||
vr_right_hand_data.append(r_trig)
|
||||
vr_right_hand_data.extend([r_touch_x, r_touch_y])
|
||||
vr_right_hand_data = np.array(vr_right_hand_data)
|
||||
|
||||
vr_writer.save_action(vr_right_hand_action_path, vr_right_hand_data)
|
||||
|
||||
vr_left_hand_data = [1.0 if l_is_valid else 0.0]
|
||||
vr_left_hand_data.extend(l_trans)
|
||||
vr_left_hand_data.extend(l_rot)
|
||||
vr_left_hand_data.append(l_trig)
|
||||
vr_left_hand_data.extend([l_touch_x, l_touch_y])
|
||||
vr_left_hand_data = np.array(vr_left_hand_data)
|
||||
|
||||
vr_writer.save_action(vr_left_hand_action_path, vr_left_hand_data)
|
||||
|
||||
vr_body_data = list(vr_body.get_position())
|
||||
vr_body_data.extend(vr_body.get_orientation())
|
||||
vr_body_data = np.array(vr_body_data)
|
||||
|
||||
vr_writer.save_action(vr_body_action_path, vr_body_data)
|
||||
|
||||
# VR eye tracking data
|
||||
if use_eye_tracking:
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
if enable_vr_body:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
else:
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
# Note: open trigger has closed fraction of 0.05 when open, so cutoff haptic input under 0.1
|
||||
# to avoid constant rumbling
|
||||
s.trigger_haptic_pulse('right_controller', r_trig if r_trig > 0.1 else 0)
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
s.trigger_haptic_pulse('left_controller', l_trig if l_trig > 0.1 else 0)
|
||||
|
||||
# Record this frame's data in the VRLogWriter
|
||||
vr_writer.process_frame(s)
|
||||
|
||||
# Note: always call this after the simulation is over to close the log file
|
||||
# and clean up resources used.
|
||||
vr_writer.end_log_session()
|
||||
s.disconnect()
|
|
@ -0,0 +1,159 @@
|
|||
""" VR saving demo using simplified VR playground code.
|
||||
|
||||
This demo saves the states of all objects in their entirety. The replay
|
||||
resulting from this is completely controlled by the saved state data, and does
|
||||
not involve any meaningful physical simulation.
|
||||
|
||||
Note: This demo does not use PBR so it can be supported on a wide range of devices, including Mac OS.
|
||||
|
||||
This demo saves to vr_logs/vr_demo_save_states.h5
|
||||
If you would like to replay the data, please run
|
||||
vr_demo_replay using this file path as an input.
|
||||
|
||||
Run this demo if you would like to save your own data."""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogWriter
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Enables the VR collision body
|
||||
enable_vr_body = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad-based movement
|
||||
movement_speed = 0.03
|
||||
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0,
|
||||
rendering_settings=MeshRendererSettings(optimized=optimize, fullscreen=fullscreen, enable_pbr=False),
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=True)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
s.import_object(vr_body)
|
||||
vr_body.init_body([0,0])
|
||||
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
mustard_list = []
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
mustard_list.append(mustard)
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
s.set_vr_offset([-0.5, 0.0, -0.5])
|
||||
|
||||
# Modify this path to save to different files
|
||||
vr_log_path = 'vr_logs/vr_demo_save_states.h5'
|
||||
# Saves every 2 seconds or so (200 / 90fps is approx 2 seconds)
|
||||
vr_writer = VRLogWriter(frames_before_write=200, log_filepath=vr_log_path, profiling_mode=True)
|
||||
|
||||
# Call set_up_data_storage once all actions have been registered (in this demo we only save states so there are none)
|
||||
# Despite having no actions, we need to call this function
|
||||
vr_writer.set_up_data_storage()
|
||||
|
||||
# Main simulation loop
|
||||
for i in range(3000):
|
||||
# Step the simulator - this needs to be done every frame to actually run the simulation
|
||||
s.step()
|
||||
|
||||
# VR device data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
|
||||
# VR button data
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
# VR eye tracking data
|
||||
if use_eye_tracking:
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
if enable_vr_body:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
else:
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
# Note: open trigger has closed fraction of 0.05 when open, so cutoff haptic input under 0.1
|
||||
# to avoid constant rumbling
|
||||
s.trigger_haptic_pulse('right_controller', r_trig if r_trig > 0.1 else 0)
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
s.trigger_haptic_pulse('left_controller', l_trig if l_trig > 0.1 else 0)
|
||||
|
||||
# Record this frame's data in the VRLogWriter
|
||||
vr_writer.process_frame(s)
|
||||
|
||||
# Note: always call this after the simulation is over to close the log file
|
||||
# and clean up resources used.
|
||||
vr_writer.end_log_session()
|
||||
s.disconnect()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,72 +0,0 @@
|
|||
""" VR playground containing various objects and VR options that can be toggled
|
||||
to experiment with the VR experience in iGibson. This playground operates in a
|
||||
PBR scene. Please see vr_playground_no_pbr.py for a non-PBR experience.
|
||||
|
||||
Important: VR functionality and where to find it:
|
||||
|
||||
1) Most VR functions can be found in the gibson2/simulator.py
|
||||
2) VR utility functions are found in gibson2/utils/vr_utils.py
|
||||
3) The VR renderer can be found in gibson2/render/mesh_renderer.py
|
||||
4) The underlying VR C++ code can be found in vr_mesh_render.h and .cpp in gibson2/render/cpp
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.simulator import Simulator
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
vr_mode = False
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=optimize,
|
||||
fullscreen=False,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0, rendering_settings=vr_rendering_settings,
|
||||
vr_eye_tracking=False, vr_mode=vr_mode)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
scene._set_first_n_objects(10)
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
if not vr_mode:
|
||||
camera_pose = np.array([0, 0, 1.2])
|
||||
# Look out over the main body of the Rs scene
|
||||
view_direction = np.array([0, -1, 0])
|
||||
s.renderer.set_camera(camera_pose, camera_pose + view_direction, [0, 0, 1])
|
||||
s.renderer.set_fov(90)
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
while True:
|
||||
start_time = time.time()
|
||||
s.step()
|
||||
frame_time = time.time() - start_time
|
||||
print('Frame time: {}'.format(frame_time))
|
||||
print('Fps: {}'.format(round(1/max(0.0001, frame_time), 2)))
|
||||
|
||||
s.disconnect()
|
|
@ -16,7 +16,7 @@ import pybullet as p
|
|||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject, VArticulatedObject
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
|
@ -93,7 +93,7 @@ if optimize:
|
|||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
s.set_vr_offset([-0.5, 0.0, -0.4])
|
||||
s.set_vr_offset([-0.5, 0.0, -0.5])
|
||||
|
||||
# State of mustard hiding, toggled by a menu press
|
||||
hide_mustard = False
|
||||
|
|
|
@ -71,12 +71,13 @@ vr_rendering_settings = MeshRendererSettings(optimized=optimize,
|
|||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0, rendering_settings=vr_rendering_settings,
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=True)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
# Turn this on when debugging to speed up loading
|
||||
# scene._set_first_n_objects(10)
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
print("Importing body!")
|
||||
s.import_object(vr_body, use_pbr=False, use_pbr_mapping=False, shadow_caster=False)
|
||||
vr_body.init_body([0,0])
|
||||
|
||||
|
@ -117,7 +118,7 @@ if optimize:
|
|||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Set VR starting position in the scene
|
||||
s.set_vr_offset([0, 0, -0.3])
|
||||
s.set_vr_offset([0, 0, -0.6])
|
||||
|
||||
# State of can hiding, toggled by a menu press
|
||||
hide_can = False
|
||||
|
|
|
@ -35,6 +35,8 @@ class VrBody(Object):
|
|||
# Keep track of start x and y rotation so we can lock object to these values
|
||||
self.start_x_rot = 0.0
|
||||
self.start_y_rot = 0.0
|
||||
# Need this extra factor to amplify HMD movement vector, since body doesn't reach HMD each frame (since constraints don't set position)
|
||||
self.hmd_vec_amp = 3
|
||||
|
||||
# TIMELINE: Call this after loading the VR body into the simulator
|
||||
def init_body(self, start_pos):
|
||||
|
@ -89,8 +91,8 @@ class VrBody(Object):
|
|||
curr_offset = s.get_vr_offset()
|
||||
# Translate VR offset using controller information
|
||||
translated_offset = translate_vr_position_by_vecs(rTouchX, rTouchY, right, forward, curr_offset, movement_speed)
|
||||
# New player position calculated
|
||||
new_player_pos = hmd_wp + translated_offset
|
||||
# New player position calculated - amplify delta in HMD positiion to account for constraints not moving body exactly to new position each frame
|
||||
new_player_pos = (hmd_wp - self.prev_hmd_wp) * self.hmd_vec_amp + translated_offset + self.prev_hmd_wp
|
||||
# Attempt to set the vr body to this new position (will stop if collides with wall, for example)
|
||||
# This involves setting translation and rotation constraint
|
||||
x, y, z = new_player_pos
|
||||
|
@ -104,7 +106,7 @@ class VrBody(Object):
|
|||
|
||||
# Use starting x and y rotation so our body does not get knocked over when we collide with low objects
|
||||
new_rot = p.getQuaternionFromEuler([self.start_x_rot, self.start_y_rot, curr_z])
|
||||
p.changeConstraint(self.movement_cid, [x, y, new_center], new_rot, maxForce=500)
|
||||
p.changeConstraint(self.movement_cid, [x, y, new_center], new_rot, maxForce=2000)
|
||||
|
||||
# Update previous HMD world position at end of frame
|
||||
self.prev_hmd_wp = hmd_wp
|
||||
|
@ -134,14 +136,15 @@ class VrHand(ArticulatedObject):
|
|||
Joint 16 has name Itip__Imiddle
|
||||
"""
|
||||
|
||||
def __init__(self, hand='right'):
|
||||
# VR hand can be one of three types - no_pbr (diffuse white/grey color), skin or metal
|
||||
def __init__(self, hand='right', tex_type='no_pbr'):
|
||||
self.vr_hand_folder = os.path.join(assets_path, 'models', 'vr_hand')
|
||||
self.hand = hand
|
||||
if self.hand not in ['left', 'right']:
|
||||
print('ERROR: hand parameter must either be left or right!')
|
||||
return
|
||||
|
||||
self.filename = os.path.join(self.vr_hand_folder, 'vr_hand_{}.urdf'.format(self.hand))
|
||||
self.filename = os.path.join(self.vr_hand_folder, tex_type, 'vr_hand_{}.urdf'.format(self.hand))
|
||||
super(VrHand, self).__init__(filename=self.filename, scale=1)
|
||||
# Hand needs to be rotated to visually align with VR controller
|
||||
if self.hand == 'right':
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
"""
|
||||
VRLog classes that write/read iGibson VR data to/from HDF5.
|
||||
|
||||
TODO: Save velocity/torque for algorithmic training? Not necessary for replay, but might be helpful.
|
||||
Can easily save velocity for joints, but might have to use link states for normal pybullet objects.
|
||||
|
||||
HDF5 hierarchy:
|
||||
/ (root)
|
||||
|
||||
|
@ -85,7 +82,6 @@ class VRLogWriter():
|
|||
# If true, will print out time it takes to save to hd5
|
||||
self.profiling_mode = profiling_mode
|
||||
# PyBullet body ids to be saved
|
||||
# TODO: Make sure this is the correct way to get the body ids!
|
||||
self.pb_ids = [p.getBodyUniqueId(i) for i in range(p.getNumBodies())]
|
||||
self.pb_id_data_len_map = dict()
|
||||
self.data_map = None
|
||||
|
@ -95,8 +91,8 @@ class VRLogWriter():
|
|||
self.frame_counter = 0
|
||||
# Counts number of frames and does not reset
|
||||
self.persistent_frame_count = 0
|
||||
# Time when last frame ended (not valid for first frame, so set to 0)
|
||||
self.last_frame_end_time = 0
|
||||
# Time when last frame ended (not valid for first frame, set to current time to get a reasonable estimate)
|
||||
self.last_frame_end_time = time.time()
|
||||
# Handle of HDF5 file
|
||||
self.hf = None
|
||||
# Name path data - used to extract data from data map and save to hd5
|
||||
|
@ -256,7 +252,7 @@ class VRLogWriter():
|
|||
self.data_map['vr']['vr_camera']['right_eye_proj'][self.frame_counter, ...] = s.renderer.P
|
||||
|
||||
for device in ['hmd', 'left_controller', 'right_controller']:
|
||||
is_valid, trans, rot = s.getDataForVRDevice(device)
|
||||
is_valid, trans, rot = s.get_data_for_vr_device(device)
|
||||
if is_valid is not None:
|
||||
data_list = [is_valid]
|
||||
data_list.extend(trans)
|
||||
|
@ -264,11 +260,11 @@ class VRLogWriter():
|
|||
self.data_map['vr']['vr_device_data'][device][self.frame_counter, ...] = np.array(data_list)
|
||||
|
||||
if device == 'left_controller' or device == 'right_controller':
|
||||
button_data_list = s.getButtonDataForController(device)
|
||||
button_data_list = s.get_button_data_for_controller(device)
|
||||
if button_data_list[0] is not None:
|
||||
self.data_map['vr']['vr_button_data'][device][self.frame_counter, ...] = np.array(button_data_list)
|
||||
|
||||
is_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.getEyeTrackingData()
|
||||
is_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_valid is not None:
|
||||
eye_data_list = [is_valid]
|
||||
eye_data_list.extend(origin)
|
||||
|
@ -406,6 +402,16 @@ class VRLogReader():
|
|||
if read_duration < frame_duration:
|
||||
time.sleep(frame_duration - read_duration)
|
||||
|
||||
def read_value(self, value_path):
|
||||
"""Reads any saved value at value_path for the current frame.
|
||||
|
||||
Args:
|
||||
value_path: /-separated string representing the value to fetch. This should be one of the
|
||||
values list in the comment at the top of this file.
|
||||
Eg. vr/vr_button_data/right_controller
|
||||
"""
|
||||
return self.hf[value_path][self.frame_counter]
|
||||
|
||||
def read_action(self, action_path):
|
||||
"""Reads the action at action_path for the current frame.
|
||||
|
||||
|
|
Loading…
Reference in New Issue