1
- from pathlib import Path
2
1
import multiprocessing
3
2
import time
3
+ from pathlib import Path
4
+ from queue import Empty
5
+ from typing import Optional
6
+
4
7
import cv2
5
- from loguru import logger
6
8
import numpy as np
7
9
import sapien
10
+ import tyro
11
+ from loguru import logger
8
12
from sapien .asset import create_dome_envmap
9
13
from sapien .utils import Viewer
10
14
11
15
from dex_retargeting .constants import RobotName , RetargetingType , HandType , get_default_config_path
12
16
from dex_retargeting .retargeting_config import RetargetingConfig
13
- from dex_retargeting .seq_retarget import SeqRetargeting
14
17
from single_hand_detector import SingleHandDetector
15
18
16
19
17
- def start_retargeting (queue : multiprocessing .Queue , robot_dir :str , config_path : str , robot_name : str ):
20
+ def start_retargeting (queue : multiprocessing .Queue , robot_dir : str , config_path : str ):
18
21
RetargetingConfig .set_default_urdf_dir (str (robot_dir ))
19
22
logger .info (f"Start retargeting with config { config_path } " )
20
23
retargeting = RetargetingConfig .load_from_file (config_path ).build ()
21
- detector = SingleHandDetector (hand_type = "Right" , selfie = False )
22
-
23
- # init show ui
24
- meta_data = {
25
- "dof" : len (retargeting .optimizer .robot .dof_joint_names ),
26
- "joint_names" :retargeting .optimizer .robot .dof_joint_names ,
27
- }
28
-
29
- headless = False
30
- if not headless :
31
- sapien .render .set_viewer_shader_dir ("default" )
32
- sapien .render .set_camera_shader_dir ("default" )
33
- else :
34
- sapien .render .set_viewer_shader_dir ("rt" )
35
- sapien .render .set_camera_shader_dir ("rt" )
36
- sapien .render .set_ray_tracing_samples_per_pixel (16 )
37
- sapien .render .set_ray_tracing_path_depth (8 )
38
- sapien .render .set_ray_tracing_denoiser ("oidn" )
39
-
24
+
25
+ hand_type = "Right" if "right" in config_path .lower () else "Left"
26
+ detector = SingleHandDetector (hand_type = hand_type , selfie = False )
27
+
28
+ sapien .render .set_viewer_shader_dir ("default" )
29
+ sapien .render .set_camera_shader_dir ("default" )
30
+
40
31
config = RetargetingConfig .load_from_file (config_path )
41
32
42
33
# Setup
43
34
scene = sapien .Scene ()
44
-
45
- # Ground
46
35
render_mat = sapien .render .RenderMaterial ()
47
36
render_mat .base_color = [0.06 , 0.08 , 0.12 , 1 ]
48
37
render_mat .metallic = 0.0
@@ -61,15 +50,14 @@ def start_retargeting(queue: multiprocessing.Queue, robot_dir:str, config_path:
61
50
cam = scene .add_camera (name = "Cheese!" , width = 600 , height = 600 , fovy = 1 , near = 0.1 , far = 10 )
62
51
cam .set_local_pose (sapien .Pose ([0.50 , 0 , 0.0 ], [0 , 0 , 0 , - 1 ]))
63
52
64
-
65
53
viewer = Viewer ()
66
54
viewer .set_scene (scene )
67
55
viewer .control_window .show_origin_frame = False
68
56
viewer .control_window .move_speed = 0.01
69
57
viewer .control_window .toggle_camera_lines (False )
70
58
viewer .set_camera_pose (cam .get_local_pose ())
71
-
72
- # Load robot and set it to a good pose to take picture
59
+
60
+ # Load robot and set it to a good pose to take picture
73
61
loader = scene .create_urdf_loader ()
74
62
filepath = Path (config .urdf_path )
75
63
robot_name = filepath .stem
@@ -89,14 +77,13 @@ def start_retargeting(queue: multiprocessing.Queue, robot_dir:str, config_path:
89
77
elif "svh" in robot_name :
90
78
loader .scale = 1.5
91
79
92
-
93
80
if "glb" not in robot_name :
94
81
filepath = str (filepath ).replace (".urdf" , "_glb.urdf" )
95
82
else :
96
83
filepath = str (filepath )
97
-
84
+
98
85
robot = loader .load (filepath )
99
-
86
+
100
87
if "ability" in robot_name :
101
88
robot .set_pose (sapien .Pose ([0 , 0 , - 0.15 ]))
102
89
elif "shadow" in robot_name :
@@ -111,70 +98,79 @@ def start_retargeting(queue: multiprocessing.Queue, robot_dir:str, config_path:
111
98
robot .set_pose (sapien .Pose ([0 , 0 , - 0.15 ]))
112
99
elif "svh" in robot_name :
113
100
robot .set_pose (sapien .Pose ([0 , 0 , - 0.13 ]))
114
-
101
+
115
102
# Different robot loader may have different orders for joints
116
103
sapien_joint_names = [joint .get_name () for joint in robot .get_active_joints ()]
117
- retargeting_joint_names = meta_data [ " joint_names" ]
104
+ retargeting_joint_names = retargeting . joint_names
118
105
retargeting_to_sapien = np .array ([retargeting_joint_names .index (name ) for name in sapien_joint_names ]).astype (int )
119
-
120
- for _ in range (2 ):
121
- viewer .render ()
122
106
123
107
while True :
124
- rgb = queue .get ()
125
- if rgb is None :
126
- time .sleep (.1 )
127
- continue
108
+ try :
109
+ rgb = queue .get (timeout = 5 )
110
+ except Empty :
111
+ logger .error (f"Fail to fetch image from camera in 5 secs. Please check your web camera device." )
112
+ return
128
113
129
114
_ , joint_pos , _ , _ = detector .detect (rgb )
130
- logger .info ("join pos {}" , joint_pos )
131
115
if joint_pos is None :
132
- continue
133
-
134
- retargeting_type = retargeting .optimizer .retargeting_type
135
- indices = retargeting .optimizer .target_link_human_indices
136
- if retargeting_type == "POSITION" :
137
- indices = indices
138
- ref_value = joint_pos [indices , :]
116
+ logger .warning (f"{ hand_type } hand is not detected." )
139
117
else :
140
- origin_indices = indices [0 , :]
141
- task_indices = indices [1 , :]
142
- ref_value = joint_pos [task_indices , :] - joint_pos [origin_indices , :]
143
- qpos = retargeting .retarget (ref_value )
144
- logger .info (f"qpos { qpos } " )
145
- robot .set_qpos ((qpos )[retargeting_to_sapien ])
146
-
118
+ retargeting_type = retargeting .optimizer .retargeting_type
119
+ indices = retargeting .optimizer .target_link_human_indices
120
+ if retargeting_type == "POSITION" :
121
+ indices = indices
122
+ ref_value = joint_pos [indices , :]
123
+ else :
124
+ origin_indices = indices [0 , :]
125
+ task_indices = indices [1 , :]
126
+ ref_value = joint_pos [task_indices , :] - joint_pos [origin_indices , :]
127
+ qpos = retargeting .retarget (ref_value )
128
+ robot .set_qpos (qpos [retargeting_to_sapien ])
129
+
147
130
for _ in range (2 ):
148
131
viewer .render ()
149
-
150
132
151
- def produce_frame (queue : multiprocessing .Queue ):
152
- cap = cv2 .VideoCapture (0 )
133
+
134
+ def produce_frame (queue : multiprocessing .Queue , camera_path : Optional [str ] = None ):
135
+ if camera_path is None :
136
+ cap = cv2 .VideoCapture (4 )
137
+ else :
138
+ cap = cv2 .VideoCapture (camera_path )
139
+
153
140
while cap .isOpened ():
154
141
success , image = cap .read ()
155
142
if not success :
156
143
continue
157
144
frame = image
158
145
image = cv2 .cvtColor (image , cv2 .COLOR_BGR2RGB )
159
146
queue .put (image )
160
- time .sleep (.2 )
161
- cv2 .imshow ('demo' , frame )
162
-
163
- if cv2 .waitKey (1 ) & 0xFF == ord ('q' ):
164
- break
147
+ time .sleep (1 / 30.0 )
148
+ cv2 .imshow ("demo" , frame )
165
149
150
+ if cv2 .waitKey (1 ) & 0xFF == ord ("q" ):
151
+ break
166
152
167
- def main ():
168
- robot_name : RobotName = RobotName .allegro
169
- retargeting_type : RetargetingType = RetargetingType .vector
170
- hand_type : HandType = HandType .right
171
153
154
+ def main (
155
+ robot_name : RobotName , retargeting_type : RetargetingType , hand_type : HandType , camera_path : Optional [str ] = None
156
+ ):
157
+ """
158
+ Detects the human hand pose from a video and translates the human pose trajectory into a robot pose trajectory.
159
+
160
+ Args:
161
+ robot_name: The identifier for the robot. This should match one of the default supported robots.
162
+ retargeting_type: The type of retargeting, each type corresponds to a different retargeting algorithm.
163
+ hand_type: Specifies which hand is being tracked, either left or right.
164
+ Please note that retargeting is specific to the same type of hand: a left robot hand can only be retargeted
165
+ to another left robot hand, and the same applies for the right hand.
166
+ camera_path: the device path to feed to opencv to open the web camera. It will use 0 by default.
167
+ """
172
168
config_path = get_default_config_path (robot_name , retargeting_type , hand_type )
173
169
robot_dir = Path (__file__ ).absolute ().parent .parent .parent / "assets" / "robots" / "hands"
174
170
175
171
queue = multiprocessing .Queue (maxsize = 1000 )
176
- producer_process = multiprocessing .Process (target = produce_frame , args = (queue ,))
177
- consumer_process = multiprocessing .Process (target = start_retargeting , args = (queue , str (robot_dir ), str (config_path ), robot_name ))
172
+ producer_process = multiprocessing .Process (target = produce_frame , args = (queue , camera_path ))
173
+ consumer_process = multiprocessing .Process (target = start_retargeting , args = (queue , str (robot_dir ), str (config_path )))
178
174
179
175
producer_process .start ()
180
176
consumer_process .start ()
@@ -186,5 +182,5 @@ def main():
186
182
print ("done" )
187
183
188
184
189
- if __name__ == ' __main__' :
190
- main ( )
185
+ if __name__ == " __main__" :
186
+ tyro . cli ( main )
0 commit comments