Merge branch 'igdsl2' of https://github.com/fxia22/gibsonv2 into igdsl2
This commit is contained in:
commit
ca9367d119
|
@ -94,7 +94,7 @@ objects = []
|
|||
# Lunch packing objects
|
||||
filename = 'rigid_body.urdf'
|
||||
for i in range(4):
|
||||
category = 'dish\\casserole_dish'
|
||||
category = os.path.join('dish', 'casserole_dish')
|
||||
instance = 'casserole_dish00'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.34)
|
||||
|
@ -104,7 +104,7 @@ for i in range(4):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(2):
|
||||
category = 'dish\\bowl\\'
|
||||
category = os.path.join('dish', 'bowl')
|
||||
instance = 'bowl02'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.25)
|
||||
|
@ -114,7 +114,7 @@ for i in range(2):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(2):
|
||||
category = 'dish\\bowl\\'
|
||||
category = os.path.join('dish', 'bowl')
|
||||
instance = 'bowl02'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.25)
|
||||
|
@ -124,7 +124,7 @@ for i in range(2):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(4):
|
||||
category = 'drink\\soda\\'
|
||||
category = os.path.join('drink', 'soda')
|
||||
instance = 'soda38_bluecan'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.7)
|
||||
|
@ -132,7 +132,7 @@ for i in range(4):
|
|||
obj.set_position_orientation([-5.9 - 0.1*i, -5.9, 1.7], [0, 0, 0, 1])
|
||||
objects.append(obj)
|
||||
|
||||
category = 'food\\protein\\eggs\\'
|
||||
category = os.path.join('food', 'protein', 'eggs')
|
||||
instance = 'eggs00_eggland'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=0.7)
|
||||
|
@ -141,7 +141,7 @@ obj.set_position_orientation([-5.4, -6, .5], [0, 0, 0, 1])
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(4):
|
||||
category = 'food\\vegetable\\corn\\'
|
||||
category = os.path.join('food', 'vegetable', 'corn')
|
||||
instance = 'corn00'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.45)
|
||||
|
@ -150,7 +150,7 @@ for i in range(4):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(2):
|
||||
category = 'food\\fruit\\apple\\'
|
||||
category = os.path.join('food', 'fruit', 'apple')
|
||||
instance = 'apple00'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=1)
|
||||
|
@ -159,7 +159,7 @@ for i in range(2):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(2):
|
||||
category = 'food\\fruit\\apple\\'
|
||||
category = os.path.join('food', 'fruit', 'apple')
|
||||
instance = 'apple00'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=1)
|
||||
|
@ -168,7 +168,7 @@ for i in range(2):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(2):
|
||||
category = 'food\\vegetable\\pepper\\'
|
||||
category = os.path.join('food', 'vegetable', 'pepper')
|
||||
instance = 'pepper00'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.85)
|
||||
|
@ -177,7 +177,7 @@ for i in range(2):
|
|||
objects.append(obj)
|
||||
|
||||
for i in range(2):
|
||||
category = 'food\\vegetable\\pepper\\'
|
||||
category = os.path.join('food', 'vegetable', 'pepper')
|
||||
instance = 'pepper00'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=.85)
|
||||
|
|
|
@ -130,9 +130,11 @@ for i in range(4):
|
|||
s.import_object(obj)
|
||||
obj.set_position_orientation([-5.9 - 0.1*i, -5.9, 1.7], [0, 0, 0, 1])
|
||||
objects.append(obj)
|
||||
|
||||
|
||||
pack_lunch_assets_folder = 'd:\\gibson2_assets\\processed\\pack_lunch_objects'
|
||||
category = 'food\\protein\\eggs\\'
|
||||
instance = 'eggs00_eggland'
|
||||
filename = 'rigid_body.urdf'
|
||||
modelpath = os.path.join(pack_lunch_assets_folder, category, instance, filename)
|
||||
obj = ArticulatedObject(filename=modelpath, scale=0.7)
|
||||
s.import_object(obj)
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,95 @@
|
|||
""" Demo for Karen - scenario 1 - REPLAY:
|
||||
|
||||
Instructions:
|
||||
1) Walk from A to B
|
||||
2) Pick up pepper bottle at B
|
||||
3) Go to C and release pepper bottle
|
||||
|
||||
A = dining table
|
||||
B = kitchen bar with chairs (it was hard to remove them so I kept them but picked up the pepper from the side with no chairs)
|
||||
C = fridge countertop
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogReader
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
groceries_folder = os.path.join(assets_path, 'models', 'groceries')
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# TODO for Jiangshan: set this to 1, 2 or 3 depending on which trial you want to view
|
||||
TRIAL_NUM = 1
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=True,
|
||||
fullscreen=False,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='simple', image_width=504, image_height=560, rendering_settings=vr_rendering_settings)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker, use_pbr=False, use_pbr_mapping=False, shadow_caster=False)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path, scale=0.8)
|
||||
s.import_object(basket)
|
||||
basket.set_position([-1.8278704545622642, 2.152284546319316, 1.031713969848457])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
can_1_path = os.path.join(groceries_folder, 'canned_food', '1', 'rigid_body.urdf')
|
||||
can_pos = [-0.8, 1.55, 1.1]
|
||||
can_1 = ArticulatedObject(can_1_path, scale=0.6)
|
||||
s.import_object(can_1)
|
||||
can_1.set_position(can_pos)
|
||||
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Note: the VRLogReader plays back the demo at the recorded fps, so there is not need to set this
|
||||
vr_log_path = 'data_logs/karen_demo_scenario_1_trial_{}.h5'.format(TRIAL_NUM)
|
||||
vr_reader = VRLogReader(log_filepath=vr_log_path)
|
||||
|
||||
# The VR reader automatically shuts itself down and performs cleanup once the while loop has finished running
|
||||
while vr_reader.get_data_left_to_read():
|
||||
# We need to read frame before step for various reasons - one of them is that we need to set the camera
|
||||
# matrix for this frame before rendering in step
|
||||
vr_reader.read_frame(s, fullReplay=True)
|
||||
s.step()
|
|
@ -0,0 +1,141 @@
|
|||
""" Demo for Karen - scenario 1 - SAVING:
|
||||
|
||||
Instructions:
|
||||
1) Walk from A to B
|
||||
2) Pick up pepper bottle at B
|
||||
3) Go to C and release pepper bottle
|
||||
|
||||
A = dining table
|
||||
B = kitchen bar with chairs (it was hard to remove them so I kept them but picked up the pepper from the side with no chairs)
|
||||
C = fridge countertop
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogWriter
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
groceries_folder = os.path.join(assets_path, 'models', 'groceries')
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=True,
|
||||
fullscreen=False,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', rendering_settings=vr_rendering_settings,
|
||||
vr_eye_tracking=True, vr_mode=True)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker, use_pbr=False, use_pbr_mapping=False, shadow_caster=False)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path, scale=0.8)
|
||||
s.import_object(basket)
|
||||
basket.set_position([-1.8278704545622642, 2.152284546319316, 1.031713969848457])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
can_1_path = os.path.join(groceries_folder, 'canned_food', '1', 'rigid_body.urdf')
|
||||
can_pos = [-0.8, 1.55, 1.1]
|
||||
can_1 = ArticulatedObject(can_1_path, scale=0.6)
|
||||
s.import_object(can_1)
|
||||
can_1.set_position(can_pos)
|
||||
|
||||
s.optimize_vertex_and_texture()
|
||||
# Set VR starting position in the scene
|
||||
s.set_vr_offset([0, 0, -0.1])
|
||||
|
||||
# Since vr_height_offset is set, we will use the VR HMD true height plus this offset instead of the third entry of the start pos
|
||||
s.set_vr_start_pos([1.259, -0.314, 0], vr_height_offset=-0.1)
|
||||
|
||||
# Note: I appended "trial_n" manually to each log file, corresponding to the 3 trials I performed in each scenario
|
||||
vr_log_path = 'data_logs/karen_demo_scenario_1.h5'
|
||||
# Saves every 2 seconds or so (200 / 90fps is approx 2 seconds)
|
||||
vr_writer = VRLogWriter(frames_before_write=200, log_filepath=vr_log_path, profiling_mode=False)
|
||||
|
||||
# Call set_up_data_storage once all actions have been registered (in this demo we only save states so there are none)
|
||||
# Despite having no actions, we need to call this function
|
||||
vr_writer.set_up_data_storage()
|
||||
|
||||
should_simulate = True
|
||||
while should_simulate:
|
||||
event_list = s.poll_vr_events()
|
||||
for event in event_list:
|
||||
device_type, event_type = event
|
||||
if device_type == 'right_controller' or device_type == 'left_controller':
|
||||
if event_type == 'menu_press':
|
||||
# Quit data saving once the menu button has been pressed on either controller
|
||||
should_simulate = False
|
||||
|
||||
s.step(print_time=False)
|
||||
|
||||
# VR device data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
|
||||
# VR button data
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, 0.015, 'hmd')
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
|
||||
vr_writer.process_frame(s)
|
||||
|
||||
# Note: always call this after the simulation is over to close the log file
|
||||
# and clean up resources used.
|
||||
vr_writer.end_log_session()
|
||||
s.disconnect()
|
|
@ -0,0 +1,95 @@
|
|||
""" Demo for Karen - scenario 2 - REPLAY:
|
||||
|
||||
Instructions:
|
||||
1) Walk from A to B
|
||||
2) Pick up pepper bottle at B
|
||||
3) Go to C and release pepper bottle
|
||||
|
||||
A = far end of the room
|
||||
B = kitchen bar with chairs
|
||||
C = stove top with another object
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogReader
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
groceries_folder = os.path.join(assets_path, 'models', 'groceries')
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# TODO for Jiangshan: set this to 1, 2 or 3 depending on which trial you want to view
|
||||
TRIAL_NUM = 1
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=True,
|
||||
fullscreen=False,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='simple', image_width=504, image_height=560, rendering_settings=vr_rendering_settings)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker, use_pbr=False, use_pbr_mapping=False, shadow_caster=False)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path, scale=0.8)
|
||||
s.import_object(basket)
|
||||
basket.set_position([-1.8278704545622642, 2.152284546319316, 1.031713969848457])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
can_1_path = os.path.join(groceries_folder, 'canned_food', '1', 'rigid_body.urdf')
|
||||
can_pos = [-0.8, 1.55, 1.1]
|
||||
can_1 = ArticulatedObject(can_1_path, scale=0.6)
|
||||
s.import_object(can_1)
|
||||
can_1.set_position(can_pos)
|
||||
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Note: the VRLogReader plays back the demo at the recorded fps, so there is not need to set this
|
||||
vr_log_path = 'data_logs/karen_demo_scenario_2_trial_{}.h5'.format(TRIAL_NUM)
|
||||
vr_reader = VRLogReader(log_filepath=vr_log_path)
|
||||
|
||||
# The VR reader automatically shuts itself down and performs cleanup once the while loop has finished running
|
||||
while vr_reader.get_data_left_to_read():
|
||||
# We need to read frame before step for various reasons - one of them is that we need to set the camera
|
||||
# matrix for this frame before rendering in step
|
||||
vr_reader.read_frame(s, fullReplay=True)
|
||||
s.step()
|
|
@ -0,0 +1,141 @@
|
|||
""" Demo for Karen - scenario 2 - SAVING:
|
||||
|
||||
Instructions:
|
||||
1) Walk from A to B
|
||||
2) Pick up pepper bottle at B
|
||||
3) Go to C and release pepper bottle
|
||||
|
||||
A = far end of the room
|
||||
B = kitchen bar with chairs
|
||||
C = stove top with another object
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_logging import VRLogWriter
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
groceries_folder = os.path.join(assets_path, 'models', 'groceries')
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=True,
|
||||
fullscreen=False,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', rendering_settings=vr_rendering_settings,
|
||||
vr_eye_tracking=True, vr_mode=True)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker, use_pbr=False, use_pbr_mapping=False, shadow_caster=False)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path, scale=0.8)
|
||||
s.import_object(basket)
|
||||
basket.set_position([-1.8278704545622642, 2.152284546319316, 1.031713969848457])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
can_1_path = os.path.join(groceries_folder, 'canned_food', '1', 'rigid_body.urdf')
|
||||
can_pos = [-0.8, 1.55, 1.1]
|
||||
can_1 = ArticulatedObject(can_1_path, scale=0.6)
|
||||
s.import_object(can_1)
|
||||
can_1.set_position(can_pos)
|
||||
|
||||
s.optimize_vertex_and_texture()
|
||||
# Set VR starting position in the scene
|
||||
s.set_vr_offset([0, 0, -0.1])
|
||||
|
||||
# Since vr_height_offset is set, we will use the VR HMD true height plus this offset instead of the third entry of the start pos
|
||||
s.set_vr_start_pos([1.56085938, -2.88829452, 0], vr_height_offset=-0.1)
|
||||
|
||||
# Note: I appended "trial_n" manually to each log file, corresponding to the 3 trials I performed in each scenario
|
||||
vr_log_path = 'data_logs/karen_demo_scenario_2.h5'
|
||||
# Saves every 2 seconds or so (200 / 90fps is approx 2 seconds)
|
||||
vr_writer = VRLogWriter(frames_before_write=200, log_filepath=vr_log_path, profiling_mode=False)
|
||||
|
||||
# Call set_up_data_storage once all actions have been registered (in this demo we only save states so there are none)
|
||||
# Despite having no actions, we need to call this function
|
||||
vr_writer.set_up_data_storage()
|
||||
|
||||
should_simulate = True
|
||||
while should_simulate:
|
||||
event_list = s.poll_vr_events()
|
||||
for event in event_list:
|
||||
device_type, event_type = event
|
||||
if device_type == 'right_controller' or device_type == 'left_controller':
|
||||
if event_type == 'menu_press':
|
||||
# Quit data saving once the menu button has been pressed on either controller
|
||||
should_simulate = False
|
||||
|
||||
s.step(print_time=False)
|
||||
|
||||
# VR device data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
|
||||
# VR button data
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, 0.015, 'hmd')
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
|
||||
vr_writer.process_frame(s)
|
||||
|
||||
# Note: always call this after the simulation is over to close the log file
|
||||
# and clean up resources used.
|
||||
vr_writer.end_log_session()
|
||||
s.disconnect()
|
|
@ -62,8 +62,9 @@ def run_muvr(mode='server', host='localhost', port='8887'):
|
|||
# Import 4 mustard bottles
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
m = None
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
m = mustard = YCBObject('006_mustard_bottle')
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
@ -96,6 +97,9 @@ def run_muvr(mode='server', host='localhost', port='8887'):
|
|||
# Server is the one that steps the physics simulation, not the client
|
||||
s.step()
|
||||
|
||||
# TODO: Remove jittery mustard
|
||||
m.set_position([1, -0.8 + float(np.sin(sin_accumulator)) / 2.0, 1])
|
||||
|
||||
# Send the current frame to be rendered by the client,
|
||||
# and also ingest new client data
|
||||
vr_server.refresh_server()
|
||||
|
|
|
@ -0,0 +1,186 @@
|
|||
""" VR embodiment demo with Fetch robot. """
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.robots.fetch_vr_robot import FetchVR
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.utils import parse_config
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
fetch_config = parse_config(os.path.join('..', '..', '..', 'configs', 'fetch_p2p_nav.yaml'))
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0,
|
||||
rendering_settings=MeshRendererSettings(optimized=optimize, fullscreen=False, enable_pbr=False),
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=True)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
# TODO: Change this to VR fetch!
|
||||
fvr = FetchVR(fetch_config)
|
||||
s.import_robot(fvr)
|
||||
# Set differential drive to control wheels
|
||||
fvr.set_position([0,-1.5,0])
|
||||
fvr.robot_specific_reset()
|
||||
fvr.keep_still()
|
||||
|
||||
# Load robot end-effector-tracker
|
||||
effector_marker = VisualMarker(rgba_color = [1, 0, 1, 0.2], radius=0.05)
|
||||
s.import_object(effector_marker)
|
||||
# Hide marker upon initialization
|
||||
effector_marker.set_position([0,0,-5])
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
mass_list = [5, 10, 100, 500]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
mustard_list = []
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
mustard_list.append(mustard)
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
fetch_height = 1.2
|
||||
|
||||
wheel_axle_half = 0.18738 # half of the distance between the wheels
|
||||
wheel_radius = 0.054 # radius of the wheels themselves
|
||||
|
||||
r_wheel_joint = fvr.ordered_joints[0]
|
||||
l_wheel_joint = fvr.ordered_joints[1]
|
||||
|
||||
fetch_lin_vel_multiplier = 100
|
||||
|
||||
# Variables used in IK to move end effector
|
||||
fetch_body_id = fvr.get_fetch_body_id()
|
||||
fetch_joint_num = p.getNumJoints(fetch_body_id)
|
||||
effector_link_id = 19
|
||||
|
||||
# Setting to determine whether IK should also solve for end effector orientation
|
||||
# based on the VR controller
|
||||
solve_effector_orn = True
|
||||
|
||||
# Update frequency - number of frames before update
|
||||
# TODO: Play around with this
|
||||
update_freq = 1
|
||||
|
||||
frame_num = 0
|
||||
# Main simulation loop
|
||||
while True:
|
||||
s.step()
|
||||
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
# Fetch only has one arm which is entirely controlled by the right hand
|
||||
# TODO: Use left arm for movement?
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
# Set fetch orientation directly from HMD to avoid lag when turning and resultant motion sickness
|
||||
fvr.set_z_rotation(hmd_rot)
|
||||
|
||||
# Get world position and fetch position
|
||||
hmd_world_pos = s.get_hmd_world_pos()
|
||||
fetch_pos = fvr.get_position()
|
||||
|
||||
# Calculate x and y offset to get to fetch position
|
||||
# z offset is to the desired hmd height, corresponding to fetch head height
|
||||
offset_to_fetch = [fetch_pos[0] - hmd_world_pos[0],
|
||||
fetch_pos[1] - hmd_world_pos[1],
|
||||
fetch_height - hmd_world_pos[2]]
|
||||
|
||||
s.set_vr_offset(offset_to_fetch)
|
||||
|
||||
# TODO: Consolidate this functionality into the FetchVR class
|
||||
# Update fetch arm at user-defined frequency
|
||||
if r_is_valid and frame_num % 10 == 0:
|
||||
effector_marker.set_position(r_trans)
|
||||
effector_marker.set_orientation(r_rot)
|
||||
|
||||
# Linear velocity is relative to current direction fetch is pointing,
|
||||
# so only need to know how fast we should travel in that direction (Y touchpad direction is used for this)
|
||||
lin_vel = fetch_lin_vel_multiplier * r_touch_y
|
||||
ang_vel = 0
|
||||
|
||||
left_wheel_ang_vel = (lin_vel - ang_vel * wheel_axle_half) / wheel_radius
|
||||
right_wheel_ang_vel = (lin_vel + ang_vel * wheel_axle_half) / wheel_radius
|
||||
|
||||
l_wheel_joint.set_motor_velocity(left_wheel_ang_vel)
|
||||
r_wheel_joint.set_motor_velocity(right_wheel_ang_vel)
|
||||
|
||||
# Ignore sideays rolling dimensions of controller (x axis) since fetch can't "roll" its arm
|
||||
r_euler_rot = p.getEulerFromQuaternion(r_rot)
|
||||
r_rot_no_x = p.getQuaternionFromEuler([0, r_euler_rot[1], r_euler_rot[2]])
|
||||
|
||||
# Iteration and residual threshold values are based on recommendations from PyBullet
|
||||
ik_joint_poses = None
|
||||
if solve_effector_orn:
|
||||
ik_joint_poses = p.calculateInverseKinematics(fetch_body_id,
|
||||
effector_link_id,
|
||||
r_trans,
|
||||
r_rot_no_x,
|
||||
solver=0,
|
||||
maxNumIterations=100,
|
||||
residualThreshold=.01)
|
||||
else:
|
||||
ik_joint_poses = p.calculateInverseKinematics(fetch_body_id,
|
||||
effector_link_id,
|
||||
r_trans,
|
||||
solver=0)
|
||||
|
||||
# Set joints to the results of the IK
|
||||
if ik_joint_poses is not None:
|
||||
for i in range(len(ik_joint_poses)):
|
||||
next_pose = ik_joint_poses[i]
|
||||
next_joint = fvr.ordered_joints[i]
|
||||
|
||||
# Set wheel joint back to original position so IK calculation does not affect movement
|
||||
# Note: PyBullet does not currently expose the root of the IK calculation
|
||||
if next_joint.joint_name == 'r_wheel_joint' or next_joint.joint_name == 'l_wheel_joint':
|
||||
next_pose, _, _ = next_joint.get_state()
|
||||
|
||||
p.resetJointState(fetch_body_id, next_joint.joint_index, next_pose)
|
||||
|
||||
# TODO: Arm is not moving with this function - debug!
|
||||
# TODO: This could be causing some problems with movement
|
||||
#p.setJointMotorControl2(bodyIndex=fetch_body_id,
|
||||
# jointIndex=next_joint.joint_index,
|
||||
# controlMode=p.POSITION_CONTROL,
|
||||
# targetPosition=next_pose,
|
||||
# force=500)
|
||||
|
||||
|
||||
# TODO: Implement opening/closing the end effectors
|
||||
# Something like this: fetch.set_fetch_gripper_fraction(rTrig)
|
||||
# TODO: Implement previous rest pose
|
||||
|
||||
frame_num += 1
|
||||
|
||||
s.disconnect()
|
|
@ -0,0 +1,161 @@
|
|||
""" VR playground containing various objects and VR options that can be toggled
|
||||
to experiment with the VR experience in iGibson. This playground operates in a
|
||||
PBR scene. Please see vr_playground_no_pbr.py for a non-PBR experience.
|
||||
|
||||
Important - VR functionality and where to find it:
|
||||
|
||||
1) Most VR functions can be found in the gibson2/simulator.py
|
||||
2) VR utility functions are found in gibson2/utils/vr_utils.py
|
||||
3) The VR renderer can be found in gibson2/render/mesh_renderer.py
|
||||
4) The underlying VR C++ code can be found in vr_mesh_render.h and .cpp in gibson2/render/cpp
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
groceries_folder = os.path.join(assets_path, 'models', 'groceries')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = False
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=optimize,
|
||||
fullscreen=fullscreen,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', physics_timestep = 1/90.0, render_timestep = 1/90.0, rendering_settings=vr_rendering_settings,
|
||||
vr_eye_tracking=False, vr_mode=True)
|
||||
scene = InteractiveIndoorScene('Rs_int')
|
||||
# Turn this on when debugging to speed up loading
|
||||
scene._set_first_n_objects(5)
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
# TODO: Remove later
|
||||
p.setGravity(0, 0, 0)
|
||||
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
""" if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
s.import_object(vr_body, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
vr_body.init_body([0,0]) """
|
||||
|
||||
vr_body_fpath = os.path.join(assets_path, 'models', 'vr_body', 'vr_body.urdf')
|
||||
vrb = ArticulatedObject(vr_body_fpath, scale=0.1)
|
||||
s.import_object(vrb, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
#vrb.set_position([0, 0, 1])
|
||||
vrb_cid = p.createConstraint(vrb.body_id, -1, -1, -1, p.JOINT_FIXED,
|
||||
[0, 0, 0], [0, 0, 0], [0, 0, 1.2])
|
||||
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path, scale=0.8)
|
||||
s.import_object(basket)
|
||||
basket.set_position([-1, 1.55, 1.2])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
can_1_path = os.path.join(groceries_folder, 'canned_food', '1', 'rigid_body.urdf')
|
||||
can_pos = [[-0.8, 1.55, 1.2], [-0.6, 1.55, 1.2], [-0.4, 1.55, 1.2]]
|
||||
cans = []
|
||||
for i in range (len(can_pos)):
|
||||
can_1 = ArticulatedObject(can_1_path, scale=0.6)
|
||||
cans.append(can_1)
|
||||
s.import_object(can_1)
|
||||
can_1.set_position(can_pos[i])
|
||||
|
||||
# TODO: Remove this test
|
||||
#r_hand.set_hand_no_collision(can_1.body_id)
|
||||
#r_hand.set_hand_no_collision(basket.body_id)
|
||||
#r_hand.set_hand_no_collision(vr_body.body_id)
|
||||
#p.setCollisionFilterPair(can_1.body_id, basket.body_id, -1, -1, 0) # the last argument is 0 for disabling collision, 1 for enabling collision
|
||||
#p.setCollisionFilterPair(can_1.body_id, r_hand.body_id, -1, -1, 0)
|
||||
#p.setCollisionFilterPair(can_1.body_id, l_hand.body_id, -1, -1, 0)
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Set VR starting position in the scene
|
||||
s.set_vr_offset([0, 0, 0])
|
||||
|
||||
while True:
|
||||
s.step()
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
if hmd_is_valid:
|
||||
p.changeConstraint(vrb_cid, hmd_trans, vrb.get_orientation(), maxForce=2000)
|
||||
|
||||
|
||||
"""if enable_vr_body:
|
||||
if not r_is_valid:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, 0, 0, movement_speed, relative_movement_device)
|
||||
else:
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device) """
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, 0.03, 'hmd')
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
# Note: open trigger has closed fraction of 0.05 when open, so cutoff haptic input under 0.1
|
||||
# to avoid constant rumbling
|
||||
s.trigger_haptic_pulse('right_controller', r_trig if r_trig > 0.1 else 0)
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
s.trigger_haptic_pulse('left_controller', l_trig if l_trig > 0.1 else 0)
|
||||
|
||||
s.disconnect()
|
|
@ -0,0 +1,145 @@
|
|||
""" VR demo for tuning physics parameters. Has mustard bottles, pears and a basket that can be grasped. """
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.gibson_indoor_scene import StaticIndoorScene
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
lunch_pack_folder = os.path.join(assets_path, 'pack_lunch')
|
||||
small_fruit_path = os.path.join(lunch_pack_folder, 'food', 'fruit', 'pear', 'pear00', 'rigid_body.urdf')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Enables the VR collision body
|
||||
enable_vr_body = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad-based movement
|
||||
movement_speed = 0.03
|
||||
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', rendering_settings=MeshRendererSettings(optimized=optimize, fullscreen=fullscreen, enable_pbr=False),
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=True)
|
||||
scene = StaticIndoorScene('Placida')
|
||||
s.import_scene(scene)
|
||||
|
||||
# Player body is represented by a translucent blue cylinder
|
||||
if enable_vr_body:
|
||||
vr_body = VrBody()
|
||||
s.import_object(vr_body)
|
||||
vr_body.init_body([0,0])
|
||||
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path)
|
||||
s.import_object(basket)
|
||||
basket.set_position([1, 0.2, 1])
|
||||
p.changeDynamics(basket.body_id, -1, mass=1)
|
||||
|
||||
# Experiment with heavier mustard bottles
|
||||
mass_list = [0.5, 1, 2, 5]
|
||||
mustard_start = [1, -0.2, 1]
|
||||
mustard_list = []
|
||||
for i in range(len(mass_list)):
|
||||
mustard = YCBObject('006_mustard_bottle')
|
||||
mustard_list.append(mustard)
|
||||
s.import_object(mustard)
|
||||
mustard.set_position([mustard_start[0], mustard_start[1] - i * 0.2, mustard_start[2]])
|
||||
p.changeDynamics(mustard.body_id, -1, mass=mass_list[i])
|
||||
|
||||
fruit_start = [1, -1, 1]
|
||||
for i in range(3):
|
||||
fruit = ArticulatedObject(small_fruit_path, scale=0.9)
|
||||
s.import_object(fruit)
|
||||
fruit.set_position([fruit_start[0], fruit_start[1] - i * 0.2, fruit_start[2]])
|
||||
# Normal-sized pears weigh around 200 grams
|
||||
p.changeDynamics(fruit.body_id, -1, mass=0.2)
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
# Small negative offset to account for lighthouses not being set up entirely correctly
|
||||
s.set_vr_offset([-0.5, 0.0, -0.1])
|
||||
|
||||
# Main simulation loop
|
||||
while True:
|
||||
# Step the simulator - this needs to be done every frame to actually run the simulation
|
||||
s.step()
|
||||
|
||||
# VR device data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
|
||||
# VR button data
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
# VR eye tracking data
|
||||
if use_eye_tracking:
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if enable_vr_body:
|
||||
if not r_is_valid:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, 0, 0, movement_speed, relative_movement_device)
|
||||
else:
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
# Note: open trigger has closed fraction of 0.05 when open, so cutoff haptic input under 0.1
|
||||
# to avoid constant rumbling
|
||||
s.trigger_haptic_pulse('right_controller', r_trig if r_trig > 0.1 else 0)
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
s.trigger_haptic_pulse('left_controller', l_trig if l_trig > 0.1 else 0)
|
||||
|
||||
s.disconnect()
|
|
@ -0,0 +1,152 @@
|
|||
""" VR playground containing various objects and VR options that can be toggled
|
||||
to experiment with the VR experience in iGibson. This playground operates in a
|
||||
PBR scene. Please see vr_playground_no_pbr.py for a non-PBR experience.
|
||||
|
||||
Important - VR functionality and where to find it:
|
||||
|
||||
1) Most VR functions can be found in the gibson2/simulator.py
|
||||
2) VR utility functions are found in gibson2/utils/vr_utils.py
|
||||
3) The VR renderer can be found in gibson2/render/mesh_renderer.py
|
||||
4) The underlying VR C++ code can be found in vr_mesh_render.h and .cpp in gibson2/render/cpp
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
import time
|
||||
|
||||
import gibson2
|
||||
from gibson2.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
|
||||
from gibson2.scenes.igibson_indoor_scene import InteractiveIndoorScene
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.objects.vr_objects import VrBody, VrHand
|
||||
from gibson2.objects.visual_marker import VisualMarker
|
||||
from gibson2.objects.ycb_object import YCBObject
|
||||
from gibson2.simulator import Simulator
|
||||
from gibson2.utils.vr_utils import move_player_no_body
|
||||
from gibson2 import assets_path
|
||||
sample_urdf_folder = os.path.join(assets_path, 'models', 'sample_urdfs')
|
||||
groceries_folder = os.path.join(assets_path, 'models', 'groceries')
|
||||
|
||||
# Playground configuration: edit this to change functionality
|
||||
optimize = True
|
||||
# Toggles fullscreen companion window
|
||||
fullscreen = False
|
||||
# Toggles SRAnipal eye tracking
|
||||
use_eye_tracking = True
|
||||
# Toggles movement with the touchpad (to move outside of play area)
|
||||
touchpad_movement = True
|
||||
# Set to one of hmd, right_controller or left_controller to move relative to that device
|
||||
relative_movement_device = 'hmd'
|
||||
# Movement speed for touchpad-based movement
|
||||
movement_speed = 0.03
|
||||
|
||||
# HDR files for PBR rendering
|
||||
hdr_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_02.hdr')
|
||||
hdr_texture2 = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'probe_03.hdr')
|
||||
light_modulation_map_filename = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'Rs_int', 'layout', 'floor_lighttype_0.png')
|
||||
background_texture = os.path.join(
|
||||
gibson2.ig_dataset_path, 'scenes', 'background', 'urban_street_01.jpg')
|
||||
|
||||
# VR rendering settings
|
||||
vr_rendering_settings = MeshRendererSettings(optimized=optimize,
|
||||
fullscreen=fullscreen,
|
||||
env_texture_filename=hdr_texture,
|
||||
env_texture_filename2=hdr_texture2,
|
||||
env_texture_filename3=background_texture,
|
||||
light_modulation_map_filename=light_modulation_map_filename,
|
||||
enable_shadow=True,
|
||||
enable_pbr=True,
|
||||
msaa=True,
|
||||
light_dimming_factor=1.0)
|
||||
# Initialize simulator with specific rendering settings
|
||||
s = Simulator(mode='vr', rendering_settings=vr_rendering_settings,
|
||||
vr_eye_tracking=use_eye_tracking, vr_mode=True)
|
||||
scene = InteractiveIndoorScene('Beechwood_0_int')
|
||||
# Turn this on when debugging to speed up loading
|
||||
scene._set_first_n_objects(10)
|
||||
s.import_ig_scene(scene)
|
||||
|
||||
# The hand can either be 'right' or 'left'
|
||||
# It has enough friction to pick up the basket and the mustard bottles
|
||||
r_hand = VrHand(hand='right')
|
||||
s.import_object(r_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
r_hand.set_start_state(start_pos=[0, 0, 1.5])
|
||||
|
||||
l_hand = VrHand(hand='left')
|
||||
s.import_object(l_hand, use_pbr=False, use_pbr_mapping=False, shadow_caster=True)
|
||||
# This sets the hand constraints so it can move with the VR controller
|
||||
l_hand.set_start_state(start_pos=[0, 0.5, 1.5])
|
||||
|
||||
if use_eye_tracking:
|
||||
# Eye tracking visual marker - a red marker appears in the scene to indicate gaze direction
|
||||
gaze_marker = VisualMarker(radius=0.03)
|
||||
s.import_object(gaze_marker, use_pbr=False, use_pbr_mapping=False, shadow_caster=False)
|
||||
gaze_marker.set_position([0,0,1.5])
|
||||
|
||||
basket_path = os.path.join(sample_urdf_folder, 'object_ZU6u5fvE8Z1.urdf')
|
||||
basket = ArticulatedObject(basket_path, scale=0.8)
|
||||
s.import_object(basket)
|
||||
basket.set_position([-1, 1.55, 1.2])
|
||||
p.changeDynamics(basket.body_id, -1, mass=5)
|
||||
|
||||
can_1_path = os.path.join(groceries_folder, 'canned_food', '1', 'rigid_body.urdf')
|
||||
can_pos = [[-0.8, 1.55, 1.2], [-0.6, 1.55, 1.2], [-0.4, 1.55, 1.2]]
|
||||
cans = []
|
||||
for i in range (len(can_pos)):
|
||||
can_1 = ArticulatedObject(can_1_path, scale=0.6)
|
||||
cans.append(can_1)
|
||||
s.import_object(can_1)
|
||||
can_1.set_position(can_pos[i])
|
||||
|
||||
if optimize:
|
||||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Set VR starting position in the scene
|
||||
s.set_vr_offset([0, 0, -0.1])
|
||||
|
||||
while True:
|
||||
s.step(print_time=True)
|
||||
|
||||
# VR device data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
l_is_valid, l_trans, l_rot = s.get_data_for_vr_device('left_controller')
|
||||
r_is_valid, r_trans, r_rot = s.get_data_for_vr_device('right_controller')
|
||||
|
||||
# VR button data
|
||||
l_trig, l_touch_x, l_touch_y = s.get_button_data_for_controller('left_controller')
|
||||
r_trig, r_touch_x, r_touch_y = s.get_button_data_for_controller('right_controller')
|
||||
|
||||
# VR eye tracking data
|
||||
if use_eye_tracking:
|
||||
is_eye_data_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = s.get_eye_tracking_data()
|
||||
if is_eye_data_valid:
|
||||
# Move gaze marker based on eye tracking data
|
||||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
# Note: open trigger has closed fraction of 0.05 when open, so cutoff haptic input under 0.1
|
||||
# to avoid constant rumbling
|
||||
s.trigger_haptic_pulse('right_controller', r_trig if r_trig > 0.1 else 0)
|
||||
|
||||
if l_is_valid:
|
||||
l_hand.move(l_trans, l_rot)
|
||||
l_hand.set_close_fraction(l_trig)
|
||||
s.trigger_haptic_pulse('left_controller', l_trig if l_trig > 0.1 else 0)
|
||||
|
||||
s.disconnect()
|
|
@ -93,7 +93,7 @@ if optimize:
|
|||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Start user close to counter for interaction
|
||||
s.set_vr_offset([-0.5, 0.0, -0.5])
|
||||
s.set_vr_offset([-0.5, 0.0, 0])
|
||||
|
||||
# State of mustard hiding, toggled by a menu press
|
||||
hide_mustard = False
|
||||
|
@ -131,17 +131,20 @@ while True:
|
|||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if enable_vr_body:
|
||||
if not r_is_valid:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, 0, 0, movement_speed, relative_movement_device)
|
||||
else:
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
if enable_vr_body:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
else:
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
|
|
|
@ -118,7 +118,7 @@ if optimize:
|
|||
s.optimize_vertex_and_texture()
|
||||
|
||||
# Set VR starting position in the scene
|
||||
s.set_vr_offset([0, 0, -0.6])
|
||||
s.set_vr_offset([0, 0, -0.1])
|
||||
|
||||
# State of can hiding, toggled by a menu press
|
||||
hide_can = False
|
||||
|
@ -154,17 +154,20 @@ while True:
|
|||
updated_marker_pos = [origin[0] + dir[0], origin[1] + dir[1], origin[2] + dir[2]]
|
||||
gaze_marker.set_position(updated_marker_pos)
|
||||
|
||||
if enable_vr_body:
|
||||
if not r_is_valid:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, 0, 0, movement_speed, relative_movement_device)
|
||||
else:
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
if r_is_valid:
|
||||
r_hand.move(r_trans, r_rot)
|
||||
r_hand.set_close_fraction(r_trig)
|
||||
|
||||
if enable_vr_body:
|
||||
# See VrBody class for more details on this method
|
||||
vr_body.move_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
else:
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
# Right hand used to control movement
|
||||
# Move VR system based on device coordinate system and touchpad press location
|
||||
move_player_no_body(s, r_touch_x, r_touch_y, movement_speed, relative_movement_device)
|
||||
|
||||
# Trigger haptic pulse on right touchpad, modulated by trigger close fraction
|
||||
# Close the trigger to create a stronger pulse
|
||||
|
|
|
@ -1,27 +1,29 @@
|
|||
import numpy as np
|
||||
import os
|
||||
import pybullet as p
|
||||
|
||||
from gibson2 import assets_path
|
||||
from gibson2.objects.object_base import Object
|
||||
from gibson2.objects.articulated_object import ArticulatedObject
|
||||
from gibson2.utils.utils import multQuatLists
|
||||
from gibson2.utils.vr_utils import translate_vr_position_by_vecs
|
||||
|
||||
|
||||
class VrBody(Object):
|
||||
class VrBody(ArticulatedObject):
|
||||
"""
|
||||
A simple cylinder representing a VR user's body. This stops
|
||||
them from moving through physical objects and wall, as well
|
||||
as other VR users.
|
||||
"""
|
||||
def __init__(self, height=1.2):
|
||||
super(VrBody, self).__init__()
|
||||
# Height of VR body
|
||||
self.height = 0.6
|
||||
def __init__(self):
|
||||
self.vr_body_fpath = os.path.join(assets_path, 'models', 'vr_body', 'vr_body.urdf')
|
||||
super(VrBody, self).__init__(filename=self.vr_body_fpath, scale=1)
|
||||
# Height of VR body - this is relatively tall since we have disabled collision with the floor
|
||||
# TODO: Fine tune this height variable!
|
||||
self.height = 0.8
|
||||
# Distance between shoulders
|
||||
self.shoulder_width = 0.1
|
||||
# Width of body from front to back
|
||||
self.body_width = 0.05
|
||||
self.body_width = 0.01
|
||||
# This is the start that center of the body will float at
|
||||
# We give it 0.2m of room off the floor to avoid any collisions
|
||||
self.start_height = self.height/2 + 0.2
|
||||
|
@ -37,6 +39,9 @@ class VrBody(Object):
|
|||
self.start_y_rot = 0.0
|
||||
# Need this extra factor to amplify HMD movement vector, since body doesn't reach HMD each frame (since constraints don't set position)
|
||||
self.hmd_vec_amp = 2
|
||||
# This is multiplication factor for backwards distance behind the HMD - this is the distance in m that the torso will be behind the HMD
|
||||
# TODO: Change this back after experimenting
|
||||
self.back_disp_factor = 0.2
|
||||
|
||||
# TIMELINE: Call this after loading the VR body into the simulator
|
||||
def init_body(self, start_pos):
|
||||
|
@ -44,20 +49,21 @@ class VrBody(Object):
|
|||
Initialize VR body to start in a specific location.
|
||||
Start pos should just contain an x and y value
|
||||
"""
|
||||
# TODO: Change this constraint to add rotation from the hmd!
|
||||
x, y = start_pos
|
||||
self.movement_cid = p.createConstraint(self.body_id, -1, -1, -1, p.JOINT_FIXED,
|
||||
[0, 0, 0], [0, 0, 0], [x, y, self.start_height])
|
||||
self.start_rot = self.get_orientation()
|
||||
|
||||
def _load(self):
|
||||
# Use a box to represent the player body
|
||||
col_cy = p.createCollisionShape(p.GEOM_BOX, halfExtents=[self.body_width, self.shoulder_width, self.height/2])
|
||||
# Make body a translucent blue
|
||||
vis_cy = p.createVisualShape(p.GEOM_BOX, halfExtents=[self.body_width, self.shoulder_width, self.height/2], rgbaColor=[0.65,0.65,0.65,1])
|
||||
body_id = p.createMultiBody(baseMass=1, baseCollisionShapeIndex=col_cy,
|
||||
baseVisualShapeIndex=vis_cy)
|
||||
|
||||
return body_id
|
||||
def rotate_offset_vec(self, offset_vec, theta):
|
||||
"""
|
||||
Rotate offset vector by an angle theta in the xy plane (z axis rotation). This offset vector has a z component of 0.
|
||||
"""
|
||||
x = offset_vec[0]
|
||||
y = offset_vec[1]
|
||||
x_new = x * np.cos(theta) - y * np.sin(theta)
|
||||
y_new = y * np.cos(theta) + x * np.sin(theta)
|
||||
return np.array([x_new, y_new, 0])
|
||||
|
||||
def move_body(self, s, rTouchX, rTouchY, movement_speed, relative_device):
|
||||
"""
|
||||
|
@ -68,12 +74,33 @@ class VrBody(Object):
|
|||
"""
|
||||
# Calculate right and forward vectors relative to input device
|
||||
right, _, forward = s.get_device_coordinate_system(relative_device)
|
||||
# Backwards HMD direction
|
||||
back_dir = np.array(forward) * -1
|
||||
# Project backwards direction onto horizontal plane to get body direction - just remove z component
|
||||
back_dir[2] = 0.0
|
||||
# Normalize back dir
|
||||
back_dir = back_dir / np.linalg.norm(back_dir)
|
||||
back_dir = back_dir * self.back_disp_factor
|
||||
|
||||
# Get HMD data
|
||||
hmd_is_valid, hmd_trans, hmd_rot = s.get_data_for_vr_device('hmd')
|
||||
# Set the body to the HMD position on the first frame that it is valid, to aid calculation accuracy
|
||||
if self.first_frame and hmd_is_valid:
|
||||
self.set_position(hmd_trans)
|
||||
body_pos = hmd_trans + back_dir
|
||||
# TODO: Need to do the rotation here as well
|
||||
self.set_position(body_pos)
|
||||
|
||||
# Set collision filter between body and floor so we can bend down without any obstruction
|
||||
# This is an alternative solution to scaling the body height as the player bends down
|
||||
#self.floor_ids = s.get_floor_ids()
|
||||
#for f_id in self.floor_ids:
|
||||
# p.setCollisionFilterPair(f_id, self.body_id, -1, -1, 0) # the last argument is 0 for disabling collision, 1 for enabling collision
|
||||
|
||||
#for obj_id in s.objects:
|
||||
# p.setCollisionFilterPair(obj_id, self.body_id, -1, -1, 0) # the last argument is 0 for disabling collision, 1 for enabling collision
|
||||
|
||||
# TODO: Disable collision with VR hands as well
|
||||
|
||||
self.first_frame = False
|
||||
|
||||
# First frame will not register HMD offset, since no previous hmd position has been recorded
|
||||
|
@ -81,31 +108,40 @@ class VrBody(Object):
|
|||
self.prev_hmd_wp = s.get_hmd_world_pos()
|
||||
|
||||
# Get offset to VR body
|
||||
offset_to_body = self.get_position() - self.prev_hmd_wp
|
||||
# offset_to_body = self.get_position() - self.prev_hmd_wp - back_dir
|
||||
# Move the HMD to be aligned with the VR body
|
||||
# Set x and y coordinate offsets, but keep current system height (otherwise we teleport into the VR body)
|
||||
s.set_vr_offset([offset_to_body[0], offset_to_body[1], s.get_vr_offset()[2]])
|
||||
# s.set_vr_offset([offset_to_body[0], offset_to_body[1], s.get_vr_offset()[2]])
|
||||
|
||||
# Get current HMD world position and VR offset
|
||||
hmd_wp = s.get_hmd_world_pos()
|
||||
curr_offset = s.get_vr_offset()
|
||||
# curr_offset = s.get_vr_offset()
|
||||
# Translate VR offset using controller information
|
||||
translated_offset = translate_vr_position_by_vecs(rTouchX, rTouchY, right, forward, curr_offset, movement_speed)
|
||||
# translated_offset = translate_vr_position_by_vecs(rTouchX, rTouchY, right, forward, curr_offset, movement_speed)
|
||||
# New player position calculated - amplify delta in HMD positiion to account for constraints not moving body exactly to new position each frame
|
||||
new_player_pos = (hmd_wp - self.prev_hmd_wp) * self.hmd_vec_amp + translated_offset + self.prev_hmd_wp
|
||||
# new_player_pos = (hmd_wp - self.prev_hmd_wp) * self.hmd_vec_amp + translated_offset + self.prev_hmd_wp + back_dir
|
||||
new_body_pos = hmd_wp + back_dir
|
||||
# Attempt to set the vr body to this new position (will stop if collides with wall, for example)
|
||||
# This involves setting translation and rotation constraint
|
||||
x, y, z = new_player_pos
|
||||
x, y, z = new_body_pos
|
||||
new_center = z - self.dist_below_hmd - self.height/2
|
||||
|
||||
# Extract only z rotation from HMD so we can spin the body on the vertical axis
|
||||
_, _, curr_z = p.getEulerFromQuaternion(self.get_orientation())
|
||||
_, _, old_body_z = p.getEulerFromQuaternion(self.get_orientation())
|
||||
delta_hmd_z = 0
|
||||
if hmd_is_valid:
|
||||
_, _, hmd_z = p.getEulerFromQuaternion(hmd_rot)
|
||||
curr_z = hmd_z
|
||||
delta_hmd_z = hmd_z - old_body_z
|
||||
|
||||
# Use starting x and y rotation so our body does not get knocked over when we collide with low objects
|
||||
new_rot = p.getQuaternionFromEuler([self.start_x_rot, self.start_y_rot, curr_z])
|
||||
new_rot = p.getQuaternionFromEuler([self.start_x_rot, self.start_y_rot, old_body_z + delta_hmd_z])
|
||||
# Finally move the body based on the rotation - it pivots around the HMD in a circle whose circumference
|
||||
# is defined by self.back_disp_factor. We can calculate this translation vector by drawing a vector triangle
|
||||
# where the two radii are back_dir and the angle is delta_hmd_z. Some 2D trigonometry gets us the final result
|
||||
self.rot_trans_vec = self.rotate_offset_vec(back_dir, -1 * delta_hmd_z) - back_dir
|
||||
# Add translated vector to current offset value
|
||||
x += self.rot_trans_vec[0]
|
||||
y += self.rot_trans_vec[1]
|
||||
p.changeConstraint(self.movement_cid, [x, y, new_center], new_rot, maxForce=2000)
|
||||
|
||||
# Update previous HMD world position at end of frame
|
||||
|
@ -174,25 +210,40 @@ class VrHand(ArticulatedObject):
|
|||
for jointIndex in range(p.getNumJoints(self.body_id)):
|
||||
# Make masses larger for greater stability
|
||||
# Mass is in kg, friction is coefficient
|
||||
p.changeDynamics(self.body_id, jointIndex, mass=0.2, lateralFriction=3)
|
||||
p.changeDynamics(self.body_id, jointIndex, mass=0.2, lateralFriction=4)
|
||||
open_pos = self.open_pos[jointIndex]
|
||||
p.resetJointState(self.body_id, jointIndex, open_pos)
|
||||
p.setJointMotorControl2(self.body_id, jointIndex, p.POSITION_CONTROL, targetPosition=open_pos, force=500)
|
||||
# Keep base light for easier hand movement
|
||||
p.changeDynamics(self.body_id, -1, mass=0.05, lateralFriction=0.8)
|
||||
p.changeDynamics(self.body_id, -1, mass=0.2, lateralFriction=2)
|
||||
# Create constraint that can be used to move the hand
|
||||
self.movement_cid = p.createConstraint(self.body_id, -1, -1, -1, p.JOINT_FIXED, [0, 0, 0], [0, 0, 0], start_pos)
|
||||
|
||||
# TODO: Get this working!
|
||||
def set_hand_no_collision(self, no_col_id):
|
||||
"""
|
||||
Sets VrHand to not collide with the body specified by no_col_id.
|
||||
"""
|
||||
p.setCollisionFilterPair(self.body_id, no_col_id, -1, -1, 0)
|
||||
hand_joint_num = p.getNumJoints(self.body_id)
|
||||
no_col_joint_num = p.getNumJoints(no_col_id)
|
||||
# Set all links to ignore collision, if no_col_id has joints
|
||||
if no_col_joint_num == 0:
|
||||
return
|
||||
|
||||
for i in range(hand_joint_num):
|
||||
for j in range(no_col_joint_num):
|
||||
p.setCollisionFilterPair(self.body_id, no_col_id, i, j, 0)
|
||||
|
||||
# Close frac of 1 indicates fully closed joint, and close frac of 0 indicates fully open joint
|
||||
# Joints move smoothly between their values in self.open_pos and self.close_pos
|
||||
def set_close_fraction(self, close_frac, maxForce=500):
|
||||
def set_close_fraction(self, close_frac):
|
||||
for jointIndex in range(p.getNumJoints(self.body_id)):
|
||||
open_pos = self.open_pos[jointIndex]
|
||||
close_pos = self.close_pos[jointIndex]
|
||||
interp_frac = (close_pos - open_pos) * close_frac
|
||||
target_pos = open_pos + interp_frac
|
||||
p.setJointMotorControl2(self.body_id, jointIndex, p.POSITION_CONTROL, targetPosition=target_pos, force=maxForce)
|
||||
p.setJointMotorControl2(self.body_id, jointIndex, p.POSITION_CONTROL, targetPosition=target_pos, force=2000)
|
||||
|
||||
def move(self, trans, rot, maxForce=500):
|
||||
def move(self, trans, rot):
|
||||
final_rot = multQuatLists(rot, self.base_rot)
|
||||
p.changeConstraint(self.movement_cid, trans, final_rot, maxForce=maxForce)
|
||||
p.changeConstraint(self.movement_cid, trans, final_rot, maxForce=2000)
|
|
@ -280,6 +280,7 @@ PYBIND11_MODULE(EGLRendererContext, m) {
|
|||
pymodule.def("updateUVData", &EGLRendererContext::updateUVData, "TBA");
|
||||
pymodule.def("updateDynamicData", &EGLRendererContext::updateDynamicData, "TBA");
|
||||
pymodule.def("renderOptimized", &EGLRendererContext::renderOptimized, "TBA");
|
||||
pymodule.def("renderOptimized", &EGLRendererContext::renderOptimized, "TBA");
|
||||
pymodule.def("clean_meshrenderer_optimized", &EGLRendererContext::clean_meshrenderer_optimized, "TBA");
|
||||
|
||||
//for skybox
|
||||
|
|
|
@ -34,8 +34,8 @@ class MeshRendererVR(MeshRenderer):
|
|||
self.P = left_proj
|
||||
# Set camera to be at the camera position of the VR eye
|
||||
self.camera = left_cam_pos
|
||||
# Set camera once for both VR eyes
|
||||
self.set_light_position_direction([self.camera[0], self.camera[1], 10], [self.camera[0], self.camera[1], 0])
|
||||
# Set camera once for both VR eyes - use the right eye since this is what we save in data save and replay
|
||||
self.set_light_position_direction([right_cam_pos[0], right_cam_pos[1], 10], [right_cam_pos[0], right_cam_pos[1], 0])
|
||||
|
||||
super().render(modes=('rgb'), return_buffer=False, render_shadow_pass=True)
|
||||
self.vrsys.postRenderVRForEye("left", self.color_tex_rgb)
|
||||
|
|
|
@ -0,0 +1,126 @@
|
|||
import gym
|
||||
import numpy as np
|
||||
import pybullet as p
|
||||
|
||||
from gibson2.external.pybullet_tools.utils import joints_from_names, set_joint_positions
|
||||
from gibson2.robots.robot_locomotor import LocomotorRobot
|
||||
|
||||
|
||||
class FetchVR(LocomotorRobot):
|
||||
"""
|
||||
Fetch robot used in VR embodiment demos.
|
||||
"""
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.wheel_velocity = config.get('wheel_velocity', 1.0)
|
||||
self.torso_lift_velocity = config.get('torso_lift_velocity', 1.0)
|
||||
self.arm_velocity = config.get('arm_velocity', 1.0)
|
||||
self.wheel_dim = 2
|
||||
self.torso_lift_dim = 1
|
||||
self.arm_dim = 7
|
||||
LocomotorRobot.__init__(self,
|
||||
"fetch/fetch_vr.urdf",
|
||||
action_dim=self.wheel_dim + self.torso_lift_dim + self.arm_dim,
|
||||
scale=config.get("robot_scale", 1.0),
|
||||
is_discrete=config.get("is_discrete", False),
|
||||
control="velocity",
|
||||
self_collision=True)
|
||||
|
||||
def set_up_continuous_action_space(self):
|
||||
self.action_high = np.array([self.wheel_velocity] * self.wheel_dim +
|
||||
[self.torso_lift_velocity] * self.torso_lift_dim +
|
||||
[self.arm_velocity] * self.arm_dim)
|
||||
self.action_low = -self.action_high
|
||||
self.action_space = gym.spaces.Box(shape=(self.action_dim,),
|
||||
low=-1.0,
|
||||
high=1.0,
|
||||
dtype=np.float32)
|
||||
|
||||
def robot_specific_reset(self):
|
||||
super(FetchVR, self).robot_specific_reset()
|
||||
|
||||
# roll the arm to its body
|
||||
robot_id = self.robot_ids[0]
|
||||
arm_joints = joints_from_names(robot_id,
|
||||
[
|
||||
'torso_lift_joint',
|
||||
'shoulder_pan_joint',
|
||||
'shoulder_lift_joint',
|
||||
'upperarm_roll_joint',
|
||||
'elbow_flex_joint',
|
||||
'forearm_roll_joint',
|
||||
'wrist_flex_joint',
|
||||
'wrist_roll_joint'
|
||||
])
|
||||
|
||||
rest_position = (0.02, np.pi / 2.0 - 0.4, np.pi / 2.0 - 0.1, -0.4, np.pi / 2.0 + 0.1, 0.0, np.pi / 2.0, 0.0)
|
||||
# might be a better pose to initiate manipulation
|
||||
# rest_position = (0.30322468280792236, -1.414019864768982,
|
||||
# 1.5178184935241699, 0.8189625336474915,
|
||||
# 2.200358942909668, 2.9631312579803466,
|
||||
# -1.2862852996643066, 0.0008453550418615341)
|
||||
|
||||
set_joint_positions(robot_id, arm_joints, rest_position)
|
||||
|
||||
def get_end_effector_position(self):
|
||||
return self.parts['gripper_link'].get_position()
|
||||
|
||||
# Return body id of fetch robot
|
||||
def get_fetch_body_id(self):
|
||||
return self.robot_body.bodies[self.robot_body.body_index]
|
||||
|
||||
def set_z_rotation(self, hmd_rot):
|
||||
"""
|
||||
Sets the z rotation of the fetch VR robot using the provided HMD rotation.
|
||||
"""
|
||||
# Get z component of hmd rotation
|
||||
_, _, hmd_z = p.getEulerFromQuaternion(hmd_rot)
|
||||
prev_x, prev_y, _ = p.getEulerFromQuaternion(self.get_orientation())
|
||||
# Preserve pre-existing x and y rotations, just force z rotation to be same as HMD
|
||||
fetch_rot = p.getQuaternionFromEuler([prev_x, prev_y, hmd_z])
|
||||
self.set_orientation(fetch_rot)
|
||||
|
||||
# Set open/close fraction of the end grippers
|
||||
def set_fetch_gripper_fraction(self, frac, maxForce=500):
|
||||
min_joint = 0.0
|
||||
max_joint = 0.05
|
||||
right_finger_joint_idx = 20
|
||||
left_finger_joint_idx = 21
|
||||
# TODO: Set more friction on grippers using p.changeDynamics?
|
||||
# min_joint + frac * (max_joint - min_joint)
|
||||
target_pos = 0.05
|
||||
p.setJointMotorControl2(self.get_fetch_body_id(),
|
||||
right_finger_joint_idx,
|
||||
p.POSITION_CONTROL,
|
||||
targetPosition=target_pos,
|
||||
force=maxForce)
|
||||
|
||||
p.setJointMotorControl2(self.get_fetch_body_id(),
|
||||
left_finger_joint_idx,
|
||||
p.POSITION_CONTROL,
|
||||
targetPosition=target_pos,
|
||||
force=maxForce)
|
||||
|
||||
def get_end_effector_position(self):
|
||||
return self.parts['gripper_link'].get_position()
|
||||
|
||||
def load(self):
|
||||
ids = super(FetchVR, self).load()
|
||||
robot_id = self.robot_ids[0]
|
||||
|
||||
# disable collision between torso_lift_joint and shoulder_lift_joint
|
||||
# between torso_lift_joint and torso_fixed_joint
|
||||
# between caster_wheel_joint and estop_joint
|
||||
# between caster_wheel_joint and laser_joint
|
||||
# between caster_wheel_joint and torso_fixed_joint
|
||||
# between caster_wheel_joint and l_wheel_joint
|
||||
# between caster_wheel_joint and r_wheel_joint
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 3, 13, 0)
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 3, 22, 0)
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 0, 20, 0)
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 0, 21, 0)
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 0, 22, 0)
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 0, 1, 0)
|
||||
p.setCollisionFilterPair(robot_id, robot_id, 0, 2, 0)
|
||||
|
||||
return ids
|
|
@ -90,6 +90,8 @@ class Simulator:
|
|||
# renderer + VR
|
||||
self.vr_eye_tracking = vr_eye_tracking
|
||||
self.vr_mode = vr_mode
|
||||
# Starting position for the VR (default set to None if no starting position is specified by the user)
|
||||
self.vr_start_pos = None
|
||||
self.max_haptic_duration = 4000
|
||||
self.image_width = image_width
|
||||
self.image_height = image_height
|
||||
|
@ -100,6 +102,17 @@ class Simulator:
|
|||
self.optimized_renderer = rendering_settings.optimized
|
||||
self.rendering_settings = rendering_settings
|
||||
self.viewer = None
|
||||
|
||||
# Settings for adjusting physics and render timestep in vr
|
||||
# Fraction to multiple previous render timestep by in low-pass filter
|
||||
self.lp_filter_frac = 0.9
|
||||
|
||||
# Variables for data saving and replay in VR
|
||||
self.last_physics_timestep = -1
|
||||
self.last_render_timestep = -1
|
||||
self.last_physics_step_num = -1
|
||||
self.last_frame_dur = -1
|
||||
|
||||
self.load()
|
||||
|
||||
self.class_name_to_class_id = get_class_name_to_class_id()
|
||||
|
@ -116,6 +129,12 @@ class Simulator:
|
|||
self.render_timestep = render_timestep
|
||||
p.setTimeStep(self.physics_timestep)
|
||||
|
||||
def set_render_timestep(self, render_timestep):
|
||||
"""
|
||||
:param render_timestep: render timestep to set in the Simulator
|
||||
"""
|
||||
self.render_timestep = render_timestep
|
||||
|
||||
def add_viewer(self):
|
||||
"""
|
||||
Attach a debugging viewer to the renderer.
|
||||
|
@ -603,13 +622,44 @@ class Simulator:
|
|||
if instance.dynamic:
|
||||
self.update_position(instance)
|
||||
|
||||
def step(self):
|
||||
def step(self, print_time=False, use_render_timestep_lpf=True):
|
||||
"""
|
||||
Step the simulation at self.render_timestep and update positions in renderer
|
||||
"""
|
||||
for _ in range(int(self.render_timestep / self.physics_timestep)):
|
||||
physics_start_time = time.time()
|
||||
physics_timestep_num = int(
|
||||
self.render_timestep / self.physics_timestep)
|
||||
for _ in range(physics_timestep_num):
|
||||
p.stepSimulation()
|
||||
self.sync()
|
||||
physics_dur = time.time() - physics_start_time
|
||||
|
||||
render_start_time = time.time()
|
||||
render_dur = time.time() - render_start_time
|
||||
# Update render timestep using low-pass filter function
|
||||
if use_render_timestep_lpf:
|
||||
self.render_timestep = self.lp_filter_frac * \
|
||||
self.render_timestep + (1 - self.lp_filter_frac) * render_dur
|
||||
frame_dur = physics_dur + render_dur
|
||||
|
||||
# Set variables for data saving and replay
|
||||
self.last_physics_timestep = physics_dur
|
||||
self.last_render_timestep = render_dur
|
||||
self.last_physics_step_num = physics_timestep_num
|
||||
self.last_frame_dur = frame_dur
|
||||
|
||||
# Sets the VR starting position if one has been specified by the user
|
||||
self.perform_vr_start_pos_move()
|
||||
|
||||
if print_time:
|
||||
print('Total frame duration: {} and FPS: {}'.format(
|
||||
round(frame_dur, 2), round(1/max(frame_dur, 0.002), 2)))
|
||||
print('Total physics duration: {} and FPS: {}'.format(
|
||||
round(physics_dur, 2), round(1/max(physics_dur, 0.002), 2)))
|
||||
print('Number of 1/120 physics steps: {}'.format(physics_timestep_num))
|
||||
print('Total render duration: {} and FPS: {}'.format(
|
||||
round(render_dur, 2), round(1/max(render_dur, 0.002), 2)))
|
||||
print('-------------------------')
|
||||
|
||||
def sync(self):
|
||||
"""
|
||||
|
@ -622,6 +672,22 @@ class Simulator:
|
|||
if (self.use_ig_renderer or self.use_vr_renderer or self.use_simple_viewer) and self.viewer is not None:
|
||||
self.viewer.update()
|
||||
|
||||
# Sets the VR position on the first step iteration where the hmd tracking is valid. Not to be confused
|
||||
# with self.set_vr_start_pos, which simply records the desired start position before the simulator starts running.
|
||||
def perform_vr_start_pos_move(self):
|
||||
# Update VR start position if it is not None and the hmd is valid
|
||||
# This will keep checking until we can successfully set the start position
|
||||
if self.vr_start_pos:
|
||||
hmd_is_valid, _, _, _ = self.renderer.vrsys.getDataForVRDevice(
|
||||
'hmd')
|
||||
if hmd_is_valid:
|
||||
offset_to_start = np.array(
|
||||
self.vr_start_pos) - self.get_hmd_world_pos()
|
||||
if self.vr_height_offset:
|
||||
offset_to_start[2] = self.vr_height_offset
|
||||
self.set_vr_offset(offset_to_start)
|
||||
self.vr_start_pos = None
|
||||
|
||||
# Returns event data as list of lists. Each sub-list contains deviceType and eventType. List is empty is all
|
||||
# events are invalid.
|
||||
# deviceType: left_controller, right_controller
|
||||
|
@ -677,9 +743,24 @@ class Simulator:
|
|||
is_valid, origin, dir, left_pupil_diameter, right_pupil_diameter = self.renderer.vrsys.getEyeTrackingData()
|
||||
return [is_valid, origin, dir, left_pupil_diameter, right_pupil_diameter]
|
||||
|
||||
# Sets the starting position of the VR system in iGibson space
|
||||
def set_vr_start_pos(self, start_pos=None, vr_height_offset=None):
|
||||
if not self.use_vr_renderer or not start_pos:
|
||||
return
|
||||
|
||||
# The VR headset will actually be set to this position during the first frame.
|
||||
# This is because we need to know where the headset is in space when it is first picked
|
||||
# up to set the initial offset correctly.
|
||||
self.vr_start_pos = start_pos
|
||||
# This value can be set to specify a height offset instead of an absolute height.
|
||||
# We might want to adjust the height of the camera based on the height of the person using VR,
|
||||
# but still offset this height. When this option is non-zero it offsets the height by the amount
|
||||
# specified instead of overwriting the VR system height output.
|
||||
self.vr_height_offset = vr_height_offset
|
||||
|
||||
# Sets the world position of the VR system in iGibson space
|
||||
def set_vr_pos(self, pos=None):
|
||||
if not self.use_vr_renderer:
|
||||
if not self.use_vr_renderer or not pos:
|
||||
return
|
||||
|
||||
offset_to_pos = np.array(pos) - self.get_hmd_world_pos()
|
||||
|
@ -745,6 +826,17 @@ class Simulator:
|
|||
self.renderer.update_hidden_state([instance])
|
||||
return
|
||||
|
||||
def get_floor_ids(self):
|
||||
"""
|
||||
Gets the body ids for all floor objects in the scene. This is used internally
|
||||
by the VrBody class to disable collisions with the floor.
|
||||
"""
|
||||
floor_ids = []
|
||||
for body_id in self.objects:
|
||||
if body_id in self.scene.objects_by_id.keys() and self.scene.objects_by_id[body_id].category == 'floors':
|
||||
floor_ids.append(body_id)
|
||||
return floor_ids
|
||||
|
||||
@staticmethod
|
||||
def update_position(instance):
|
||||
"""
|
||||
|
|
|
@ -178,7 +178,7 @@ class iGTNTask(TaskNetTask):
|
|||
pass
|
||||
|
||||
def main():
|
||||
igtn_task = iGTNTask('kinematic_checker_testing')
|
||||
igtn_task = iGTNTask('kinematic_checker_testing', 2)
|
||||
igtn_task.initialize_simulator()
|
||||
|
||||
for i in range(500):
|
||||
|
|
|
@ -8,13 +8,8 @@ HDF5 hierarchy:
|
|||
|
||||
------ N x action_path (group) - these are paths introduced by the user and are of the form Y x group_name + dataset name
|
||||
|
||||
--- frame_data (group)
|
||||
|
||||
------ frame_number (dataset)
|
||||
--------- DATA: int
|
||||
|
||||
------ last_frame_time (dataset) - the time the last frame took to simulate and render
|
||||
--------- DATA: float
|
||||
--- frame_data (dataset)
|
||||
------ DATA: [frame_number, last_frame_physics_time, last_frame_render_time, last_frame_physics_step_num, last_frame_time] (len 5 x float)
|
||||
|
||||
--- physics_data (group)
|
||||
|
||||
|
@ -31,6 +26,8 @@ the computer's display when the VR is running
|
|||
------------ DATA: 4x4 mat
|
||||
--------- right_eye_proj (dataset)
|
||||
------------ DATA: 4x4 mat
|
||||
--------- right_camera_pos (dataset)
|
||||
------------ DATA: [x, y, z] (len 3)
|
||||
|
||||
------ vr_device_data (group)
|
||||
|
||||
|
@ -91,8 +88,6 @@ class VRLogWriter():
|
|||
self.frame_counter = 0
|
||||
# Counts number of frames and does not reset
|
||||
self.persistent_frame_count = 0
|
||||
# Time when last frame ended (not valid for first frame, set to current time to get a reasonable estimate)
|
||||
self.last_frame_end_time = time.time()
|
||||
# Handle of HDF5 file
|
||||
self.hf = None
|
||||
# Name path data - used to extract data from data map and save to hd5
|
||||
|
@ -104,10 +99,7 @@ class VRLogWriter():
|
|||
def generate_name_path_data(self):
|
||||
"""Generates lists of name paths for resolution in hd5 saving.
|
||||
Eg. ['vr', 'vr_camera', 'right_eye_view']."""
|
||||
self.name_path_data.extend([
|
||||
['frame_data', 'frame_number'],
|
||||
['frame_data', 'last_frame_duration'],
|
||||
])
|
||||
self.name_path_data.extend([['frame_data']])
|
||||
|
||||
for n in self.pb_ids:
|
||||
self.name_path_data.append(['physics_data', 'body_id_{0}'.format(n)])
|
||||
|
@ -115,6 +107,7 @@ class VRLogWriter():
|
|||
self.name_path_data.extend([
|
||||
['vr', 'vr_camera', 'right_eye_view'],
|
||||
['vr', 'vr_camera', 'right_eye_proj'],
|
||||
['vr', 'vr_camera', 'right_camera_pos'],
|
||||
['vr', 'vr_device_data', 'hmd'],
|
||||
['vr', 'vr_device_data', 'left_controller'],
|
||||
['vr', 'vr_device_data', 'right_controller'],
|
||||
|
@ -128,9 +121,7 @@ class VRLogWriter():
|
|||
map is reset after every self.frames_before_write frames, by refresh_data_map."""
|
||||
self.data_map = dict()
|
||||
self.data_map['action'] = dict()
|
||||
self.data_map['frame_data'] = dict()
|
||||
self.data_map['frame_data']['frame_number'] = np.full((self.frames_before_write, 1), self.default_fill_sentinel)
|
||||
self.data_map['frame_data']['last_frame_duration'] = np.full((self.frames_before_write, 1), self.default_fill_sentinel)
|
||||
self.data_map['frame_data'] = np.full((self.frames_before_write, 5), self.default_fill_sentinel)
|
||||
|
||||
self.data_map['physics_data'] = dict()
|
||||
for pb_id in self.pb_ids:
|
||||
|
@ -142,7 +133,8 @@ class VRLogWriter():
|
|||
self.data_map['vr'] = {
|
||||
'vr_camera': {
|
||||
'right_eye_view': np.full((self.frames_before_write, 4, 4), self.default_fill_sentinel),
|
||||
'right_eye_proj': np.full((self.frames_before_write, 4, 4), self.default_fill_sentinel)
|
||||
'right_eye_proj': np.full((self.frames_before_write, 4, 4), self.default_fill_sentinel),
|
||||
'right_camera_pos': np.full((self.frames_before_write, 3), self.default_fill_sentinel)
|
||||
},
|
||||
'vr_device_data': {
|
||||
'hmd': np.full((self.frames_before_write, 8), self.default_fill_sentinel),
|
||||
|
@ -229,11 +221,21 @@ class VRLogWriter():
|
|||
act_data = self.get_data_for_name_path(full_action_path.split('/'))
|
||||
act_data[self.frame_counter, ...] = action
|
||||
|
||||
def write_frame_data_to_map(self):
|
||||
"""Writes frame data to the data map."""
|
||||
self.data_map['frame_data']['frame_number'][self.frame_counter, ...] = self.persistent_frame_count
|
||||
self.data_map['frame_data']['last_frame_duration'][self.frame_counter, ...] = time.time() - self.last_frame_end_time
|
||||
self.last_frame_end_time = time.time()
|
||||
def write_frame_data_to_map(self, s):
|
||||
"""Writes frame data to the data map.
|
||||
|
||||
Args:
|
||||
s (simulator): used to extract information about VR system
|
||||
"""
|
||||
frame_data = np.array([
|
||||
self.persistent_frame_count,
|
||||
s.last_physics_timestep,
|
||||
s.last_render_timestep,
|
||||
s.last_physics_step_num,
|
||||
s.last_frame_dur
|
||||
])
|
||||
|
||||
self.data_map['frame_data'][self.frame_counter, ...] = frame_data[:]
|
||||
|
||||
def write_vr_data_to_map(self, s):
|
||||
"""Writes all VR data to map. This will write data
|
||||
|
@ -250,6 +252,7 @@ class VRLogWriter():
|
|||
# At end of each frame, renderer has camera information for VR right eye
|
||||
self.data_map['vr']['vr_camera']['right_eye_view'][self.frame_counter, ...] = s.renderer.V
|
||||
self.data_map['vr']['vr_camera']['right_eye_proj'][self.frame_counter, ...] = s.renderer.P
|
||||
self.data_map['vr']['vr_camera']['right_camera_pos'][self.frame_counter, ...] = s.renderer.camera
|
||||
|
||||
for device in ['hmd', 'left_controller', 'right_controller']:
|
||||
is_valid, trans, rot = s.get_data_for_vr_device(device)
|
||||
|
@ -292,7 +295,7 @@ class VRLogWriter():
|
|||
Args:
|
||||
s (simulator): used to extract information about VR system
|
||||
"""
|
||||
self.write_frame_data_to_map()
|
||||
self.write_frame_data_to_map(s)
|
||||
self.write_vr_data_to_map(s)
|
||||
self.write_pybullet_data_to_map()
|
||||
self.frame_counter += 1
|
||||
|
@ -330,6 +333,7 @@ class VRLogWriter():
|
|||
|
||||
def end_log_session(self):
|
||||
"""Closes hdf5 log file at end of logging session."""
|
||||
print('VR LOGGER INFO: Ending log writing session after {} frames'.format(self.persistent_frame_count))
|
||||
self.hf.close()
|
||||
|
||||
class VRLogReader():
|
||||
|
@ -373,13 +377,17 @@ class VRLogReader():
|
|||
if self.frame_counter >= self.total_frame_num:
|
||||
return
|
||||
|
||||
# Get recorded frame duration for this frame
|
||||
frame_duration = self.hf['frame_data']['last_frame_duration'][self.frame_counter][0]
|
||||
# Get all frame statistics for the most recent frame
|
||||
_, _, render_t, _, frame_duration = list(self.hf['frame_data'][self.frame_counter])
|
||||
s.set_render_timestep(render_t)
|
||||
|
||||
read_start_time = time.time()
|
||||
# Each frame we first set the camera data
|
||||
s.renderer.V = self.hf['vr/vr_camera/right_eye_view'][self.frame_counter]
|
||||
s.renderer.P = self.hf['vr/vr_camera/right_eye_proj'][self.frame_counter]
|
||||
right_cam_pos = self.hf['vr/vr_camera/right_camera_pos'][self.frame_counter]
|
||||
s.renderer.camera = right_cam_pos
|
||||
s.renderer.set_light_position_direction([right_cam_pos[0], right_cam_pos[1], 10], [right_cam_pos[0], right_cam_pos[1], 0])
|
||||
|
||||
if fullReplay:
|
||||
# If doing full replay we update the physics manually each frame
|
||||
|
|
Loading…
Reference in New Issue