jeffacce commited on
Commit
393d3de
·
1 Parent(s): cd11fb7

initial commit

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. LICENSE +21 -0
  3. README.md +140 -3
  4. conda_env.yml +204 -0
  5. configs/encoder/identity.yaml +2 -0
  6. configs/encoder/resnet18_random.yaml +4 -0
  7. configs/env/block_push_multiview.yaml +12 -0
  8. configs/env/libero_goal.yaml +9 -0
  9. configs/env/pusht.yaml +11 -0
  10. configs/env/sim_kitchen.yaml +11 -0
  11. configs/env/your_dataset.yaml +8 -0
  12. configs/env_vars/env_vars.yaml +8 -0
  13. configs/projector/inverse_dynamics_blockpush.yaml +8 -0
  14. configs/projector/inverse_dynamics_libero.yaml +8 -0
  15. configs/projector/inverse_dynamics_pusht.yaml +8 -0
  16. configs/projector/inverse_dynamics_sim_kitchen.yaml +8 -0
  17. configs/projector/inverse_dynamics_your_dataset.yaml +11 -0
  18. configs/ssl/dynamo_blockpush.yaml +20 -0
  19. configs/ssl/dynamo_libero.yaml +20 -0
  20. configs/ssl/dynamo_pusht.yaml +20 -0
  21. configs/ssl/dynamo_sim_kitchen.yaml +20 -0
  22. configs/ssl/dynamo_your_dataset.yaml +20 -0
  23. configs/train_blockpush.yaml +53 -0
  24. configs/train_libero_goal.yaml +53 -0
  25. configs/train_pusht.yaml +54 -0
  26. configs/train_sim_kitchen.yaml +53 -0
  27. configs/train_your_dataset.yaml +53 -0
  28. datasets/__init__.py +5 -0
  29. datasets/block_pushing.py +79 -0
  30. datasets/core.py +345 -0
  31. datasets/libero.py +120 -0
  32. datasets/pusht.py +63 -0
  33. datasets/sim_kitchen.py +58 -0
  34. datasets/vqbet_repro.py +120 -0
  35. datasets/your_dataset.py +22 -0
  36. envs/assets/block.urdf +31 -0
  37. envs/assets/block2.urdf +31 -0
  38. envs/assets/blocks/blue_cube.urdf +30 -0
  39. envs/assets/blocks/cube.obj +446 -0
  40. envs/assets/blocks/green_star.urdf +30 -0
  41. envs/assets/blocks/moon.obj +446 -0
  42. envs/assets/blocks/pentagon.obj +419 -0
  43. envs/assets/blocks/red_moon.urdf +30 -0
  44. envs/assets/blocks/star.obj +689 -0
  45. envs/assets/blocks/yellow_pentagon.urdf +30 -0
  46. envs/assets/insert.urdf +66 -0
  47. envs/assets/plane.obj +18 -0
  48. envs/assets/suction/base.obj +396 -0
  49. envs/assets/suction/cylinder.urdf +98 -0
  50. envs/assets/suction/cylinder_real.urdf +98 -0
.gitattributes CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.msh filter=lfs diff=lfs merge=lfs -text
37
+ *.jpg filter=lfs diff=lfs merge=lfs -text
38
+ *.png filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Zichen Jeff Cui
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,3 +1,140 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # **DynaMo**: In-Domain Dynamics Pretraining for Visuo-Motor Control
2
+ [[Paper]](https://arxiv.org/abs/2409.12192) [[Project Website]](https://dynamo-ssl.github.io/) [[Data]](https://osf.io/kxehw/)
3
+
4
+ [Zichen Jeff Cui](https://jeffcui.com/), [Hengkai Pan](https://www.ri.cmu.edu/ri-people/hengkai-pan/), [Aadhithya Iyer](https://aadhithya14.github.io/), [Siddhant Haldar](https://siddhanthaldar.github.io/) and [Lerrel Pinto](https://www.lerrelpinto.com/), New York University
5
+
6
+ This repo contains code for DynaMo visual pretraining, and for reproducing sim environment experiments. Datasets will be uploaded soon.
7
+
8
+
9
+ ## Getting started
10
+ The following assumes our current working directory is the root directory of this project repo; tested on Ubuntu 22.04 LTS (amd64).
11
+ ### Setting up the project environments
12
+ - Install the project environment:
13
+ ```
14
+ conda env create --file=conda_env.yml
15
+ ```
16
+ - Activate the environment:
17
+ ```
18
+ conda activate dynamo-repro
19
+ ```
20
+ - To enable logging, log in with a `wandb` account:
21
+ ```
22
+ wandb login
23
+ ```
24
+ Alternatively, to disable logging altogether, set the environment variable `WANDB_MODE`:
25
+ ```
26
+ export WANDB_MODE=disabled
27
+ ```
28
+
29
+ ### Getting the training datasets
30
+ [Get the dataset here](https://osf.io/kxehw/).
31
+
32
+ (Updated Sep 29: sim kitchen dataset now supports lazy loading: set `prefetch=False` in the sim kitchen configs. If you encounter errors, try downloading the latest dataset zips from the link above.)
33
+ - Download all files in the `datasets` directory, combine all partitions, and unzip:
34
+ ```
35
+ zip -s- dynamo_repro_datasets.zip -O combined.zip
36
+ unzip combined.zip
37
+ ```
38
+ - In `./configs/env_vars/env_vars.yaml`, set `dataset_root` to the unzipped parent directory containing all datasets.
39
+ - In `./eval_configs/env_vars/env_vars.yaml`, set `dataset_root` to the unzipped parent directory containing all datasets.
40
+ - In `./eval_configs/env_vars/env_vars.yaml`, set `save_path` to where you want to save the rollout results (e.g. root directory of this repo).
41
+ - Environments:
42
+ - `sim_kitchen`: Franka kitchen environment
43
+ - `block_push_multiview`: Block push environment
44
+ - `libero_goal`: LIBERO Goal environment
45
+ - `pusht`: Push-T environment
46
+
47
+ ## Reproducing experiments
48
+ The following assumes our current working directory is the root directory of this project repo.
49
+
50
+ To reproduce the experiment results, the overall steps are:
51
+ 1. Activate the conda environment with
52
+ ```
53
+ conda activate dynamo-repro
54
+ ```
55
+
56
+ 2. Train the visual encoder with `python3 train.py --config-name=train_*`. A model snapshot will be saved to `./exp_local/...`;
57
+ 3. In `eval_configs/encoder`, in the corresponding environment config, set the encoder file path `f` to the saved snapshot;
58
+ 4. Eval with `python3 online_eval.py --config-name=train_*`.
59
+
60
+ See below for detailed steps for each environment.
61
+
62
+
63
+ ### Franka Kitchen
64
+ - Train the encoder:
65
+ ```
66
+ python3 train.py --config-name=train_sim_kitchen
67
+ ```
68
+ Snapshots will be saved to a new timestamped directory `./exp_local/{date}/{time}_train_sim_kitchen_dynamo`.
69
+
70
+ The encoder snapshot will be at `./exp_local/{date}/{time}_train_sim_kitchen_dynamo/encoder.pt`.
71
+ - In `eval_configs/encoder/kitchen_dynamo.yaml`, set `SNAPSHOT_PATH` to the absolute path of the encoder snapshot above.
72
+ - Evaluation:
73
+ ```
74
+ MUJOCO_GL=egl python3 online_eval.py --config-name=train_sim_kitchen
75
+ ```
76
+
77
+ ### Block Pushing
78
+ - Train the encoder:
79
+ ```
80
+ python3 train.py --config-name=train_blockpush
81
+ ```
82
+ Snapshots will be saved to a new timestamped directory `./exp_local/{date}/{time}_train_blockpush_dynamo`.
83
+
84
+ The encoder snapshot will be at `./exp_local/{date}/{time}_train_blockpush_dynamo/encoder.pt`.
85
+ - In `eval_configs/encoder/blockpush_dynamo.yaml`, set `SNAPSHOT_PATH` to the absolute path of the encoder snapshot above.
86
+ - Evaluation:
87
+ ```
88
+ ASSET_PATH=$(pwd) python3 online_eval.py --config-name=train_blockpush
89
+ ```
90
+ (Evaluation requires including this repository in `ASSET_PATH`.)
91
+
92
+ ### Push-T
93
+ - Train:
94
+ ```
95
+ python3 train.py --config-name=train_pusht
96
+ ```
97
+ Snapshots will be saved to a new timestamped directory `./exp_local/{date}/{time}_train_pusht_dynamo`.
98
+
99
+ The encoder snapshot will be at `./exp_local/{date}/{time}_train_pusht_dynamo/encoder.pt`
100
+ - In `eval_configs/encoder/pusht_dynamo.yaml`, set `SNAPSHOT_PATH` to the absolute path of the encoder snapshot above.
101
+ - Evaluation:
102
+ ```
103
+ python3 online_eval.py --config-name=train_pusht
104
+ ```
105
+
106
+ ### LIBERO Goal
107
+ - Train:
108
+ ```
109
+ python3 train.py --config-name=train_libero_goal
110
+ ```
111
+ Snapshots will be saved to a new timestamped directory `./exp_local/{date}/{time}_train_libero_goal_dynamo`.
112
+
113
+ The encoder snapshot will be at `./exp_local/{date}/{time}_train_libero_goal_dynamo/encoder.pt`
114
+ - In `eval_configs/encoder/libero_dynamo.yaml`, set `SNAPSHOT_PATH` to the absolute path of the encoder snapshot above.
115
+ - Evaluation:
116
+ ```
117
+ MUJOCO_GL=egl python3 online_eval.py --config-name=train_libero_goal
118
+ ```
119
+
120
+ ## Train on your own dataset
121
+ - Plug in your dataset in these files:
122
+ - `datasets/your_dataset.py`
123
+ - `configs/env/your_dataset.yaml`
124
+ - `configs/env_vars/env_vars.yaml`
125
+
126
+ - Check the inverse/forward model configs:
127
+ - `configs/train_your_dataset.yaml`
128
+ - This is the main config.
129
+ - `configs/ssl/dynamo_your_dataset.yaml`
130
+ - If the model converges slowly, try setting `ema_beta` to `null` to use SimSiam instead of EMA encoder during training.
131
+ - `configs/projector/inverse_dynamics_your_dataset.yaml`
132
+ - We find that setting the inverse dynamics `output_dim` to approximately the underlying state dimension usually works well.
133
+ - For sim environments, this is the state-based observation dimension.
134
+ - For real environments, e.g. a 7DoF robot arm + gripper (1D) manipulating a rigid object (6D), this would be ~16 dimensions.
135
+
136
+ - Add linear probes for training diagnostics:
137
+ - `workspaces/your_workspace.py`
138
+ - This template computes linear probe and nearest neighbor MSE from the image embeddings to states/actions, for monitoring training convergence.
139
+ - It assumes that your dataset class has `states` (`batch` x `time` x `state_dim`) and `actions` (`batch` x `time` x `action_dim`) attributes.
140
+ - For a real-world dataset, you can use proprioception as the state.
conda_env.yml ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: dynamo-repro
2
+ channels:
3
+ - conda-forge
4
+ - defaults
5
+ dependencies:
6
+ - pip=20.0.2=py38_1
7
+ - python=3.8.17=he550d4f_0_cpython
8
+ - readline=8.2=h8228510_1
9
+ - setuptools=68.0.0=pyhd8ed1ab_0
10
+ - tk=8.6.12=h27826a3_0
11
+ - wheel=0.41.1=pyhd8ed1ab_0
12
+ - xz=5.2.6=h166bdaf_0
13
+ - pip:
14
+ - absl-py==1.4.0
15
+ - accelerate==0.22.0
16
+ - antlr4-python3-runtime==4.9.3
17
+ - anyio==3.7.1
18
+ - appdirs==1.4.4
19
+ - argon2-cffi==23.1.0
20
+ - argon2-cffi-bindings==21.2.0
21
+ - asttokens==2.2.1
22
+ - async-lru==2.0.4
23
+ - attrs==23.1.0
24
+ - av==10.0.0
25
+ - babel==2.12.1
26
+ - backcall==0.2.0
27
+ - bddl==3.5.0
28
+ - beautifulsoup4==4.12.2
29
+ - bleach==6.0.0
30
+ - certifi==2023.7.22
31
+ - cffi==1.15.1
32
+ - charset-normalizer==3.2.0
33
+ - click==8.1.7
34
+ - cloudpickle==2.2.1
35
+ - cmake==3.27.2
36
+ - comm==0.1.4
37
+ - contourpy==1.1.0
38
+ - cycler==0.11.0
39
+ - cython==0.29.37
40
+ - git+https://github.com/Farama-Foundation/d4rl@71a9549f2091accff93eeff68f1f3ab2c0e0a288#egg=d4rl
41
+ - debugpy==1.6.7.post1
42
+ - decorator==4.4.2
43
+ - decord==0.6.0
44
+ - defusedxml==0.7.1
45
+ - dm-control==1.0.14
46
+ - dm-env==1.6
47
+ - dm-tree==0.1.8
48
+ - docker-pycreds==0.4.0
49
+ - easydict==1.13
50
+ - einops==0.6.1
51
+ - evdev==1.6.1
52
+ - exceptiongroup==1.1.3
53
+ - executing==1.2.0
54
+ - fasteners==0.18
55
+ - fastjsonschema==2.18.0
56
+ - filelock==3.12.2
57
+ - fonttools==4.42.1
58
+ - future==0.18.3
59
+ - gitdb==4.0.10
60
+ - gitpython==3.1.32
61
+ - glfw==2.6.2
62
+ - gym==0.23.1
63
+ - gdown==5.1.0
64
+ - h5py==3.9.0
65
+ - huggingface-hub==0.22.2
66
+ - hydra-core==1.3.2
67
+ - hydra-submitit-launcher==1.2.0
68
+ - idna==3.4
69
+ - imageio==2.31.1
70
+ - imageio-ffmpeg==0.4.8
71
+ - importlib-resources==6.0.1
72
+ - iopath==0.1.10
73
+ - ipdb==0.13.13
74
+ - ipykernel==6.25.1
75
+ - ipython==8.12.2
76
+ - ipywidgets==8.1.0
77
+ - jedi==0.19.0
78
+ - jinja2==3.1.2
79
+ - joblib==1.3.2
80
+ - json5==0.9.14
81
+ - jsonschema==4.19.0
82
+ - jsonschema-specifications==2023.7.1
83
+ - jupyter-client==8.3.0
84
+ - jupyter-core==5.3.1
85
+ - jupyter-events==0.7.0
86
+ - jupyter-lsp==2.2.0
87
+ - jupyter-server==2.7.2
88
+ - jupyter-server-terminals==0.4.4
89
+ - jupyterlab==4.0.5
90
+ - jupyterlab-pygments==0.2.2
91
+ - jupyterlab-server==2.24.0
92
+ - jupyterlab-widgets==3.0.8
93
+ - kiwisolver==1.4.4
94
+ - labmaze==1.0.6
95
+ - lit==16.0.6
96
+ - lxml==4.9.3
97
+ - markupsafe==2.1.3
98
+ - matplotlib==3.7.2
99
+ - matplotlib-inline==0.1.6
100
+ - mistune==3.0.1
101
+ - moviepy==1.0.3
102
+ - mpmath==1.3.0
103
+ - msgpack==1.0.5
104
+ - mujoco==2.3.7
105
+ - mujoco-py==2.1.2.14
106
+ - nbclient==0.8.0
107
+ - nbconvert==7.7.4
108
+ - nbformat==5.9.2
109
+ - nest-asyncio==1.5.7
110
+ - networkx==3.1
111
+ - notebook-shim==0.2.3
112
+ - numpy==1.24.4
113
+ - nvidia-cublas-cu11==11.10.3.66
114
+ - nvidia-cuda-cupti-cu11==11.7.101
115
+ - nvidia-cuda-nvrtc-cu11==11.7.99
116
+ - nvidia-cuda-runtime-cu11==11.7.99
117
+ - nvidia-cudnn-cu11==8.5.0.96
118
+ - nvidia-cufft-cu11==10.9.0.58
119
+ - nvidia-curand-cu11==10.2.10.91
120
+ - nvidia-cusolver-cu11==11.4.0.1
121
+ - nvidia-cusparse-cu11==11.7.4.91
122
+ - nvidia-nccl-cu11==2.14.3
123
+ - nvidia-nvtx-cu11==11.7.91
124
+ - omegaconf==2.3.0
125
+ - opencv-python==4.8.0.76
126
+ - overrides==7.4.0
127
+ - packaging==23.1
128
+ - pandas==2.0.3
129
+ - pandocfilters==1.5.0
130
+ - parso==0.8.3
131
+ - patchelf==0.17.2.1
132
+ - pathtools==0.1.2
133
+ - pexpect==4.8.0
134
+ - pickleshare==0.7.5
135
+ - pillow==10.0.0
136
+ - pkgutil-resolve-name==1.3.10
137
+ - platformdirs==3.10.0
138
+ - prettytable==3.8.0
139
+ - proglog==0.1.10
140
+ - prometheus-client==0.17.1
141
+ - prompt-toolkit==3.0.39
142
+ - protobuf==4.24.1
143
+ - psutil==5.9.5
144
+ - ptyprocess==0.7.0
145
+ - pure-eval==0.2.2
146
+ - pybullet==3.2.5
147
+ - pycparser==2.21
148
+ - pygame==2.5.2
149
+ - pygments==2.16.1
150
+ - pymunk==6.6.0
151
+ - pynput==1.7.6
152
+ - pynvml==11.5.0
153
+ - pyopengl==3.1.7
154
+ - pyopengl-accelerate==3.1.7
155
+ - pyparsing==3.0.9
156
+ - python-json-logger==2.0.7
157
+ - python-xlib==0.33
158
+ - pyyaml==6.0.1
159
+ - pyzmq==25.1.0
160
+ - referencing==0.30.2
161
+ - requests==2.31.0
162
+ - rfc3339-validator==0.1.4
163
+ - rfc3986-validator==0.1.1
164
+ - robosuite==1.4.1
165
+ - rpds-py==0.9.2
166
+ - scikit-image==0.19.3
167
+ - scikit-learn==1.3.2
168
+ - scipy==1.10.1
169
+ - send2trash==1.8.2
170
+ - sentry-sdk==1.29.2
171
+ - setproctitle==1.3.2
172
+ - shapely==2.0.3
173
+ - six==1.16.0
174
+ - smmap==5.0.0
175
+ - sniffio==1.3.0
176
+ - soupsieve==2.4.1
177
+ - stack-data==0.6.2
178
+ - submitit==1.5.1
179
+ - sympy==1.12
180
+ - tables==3.8.0
181
+ - tabulate==0.9.0
182
+ - termcolor==2.3.0
183
+ - terminado==0.17.1
184
+ - threadpoolctl==3.2.0
185
+ - tifffile==2023.7.10
186
+ - timm==0.9.16
187
+ - tinycss2==1.2.1
188
+ - tomli==2.0.1
189
+ - torch==2.0.1
190
+ - torchvision==0.15.2
191
+ - tornado==6.3.3
192
+ - tqdm==4.66.1
193
+ - traitlets==5.9.0
194
+ - triton==2.0.0
195
+ - typing-extensions==4.7.1
196
+ - urllib3==2.0.4
197
+ - wandb==0.15.8
198
+ - wcwidth==0.2.6
199
+ - webencodings==0.5.1
200
+ - websocket-client==1.6.1
201
+ - widgetsnbextension==4.0.8
202
+ - zarr==2.16.1
203
+ - zipp==3.16.2
204
+ - git+https://github.com/Farama-Foundation/d4rl.git
configs/encoder/identity.yaml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ _target_: torch.nn.Identity
2
+ output_dim: ${env.obs_dim}
configs/encoder/resnet18_random.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ _target_: models.encoder.resnet.resnet18
2
+ pretrained: False
3
+ output_dim: 512
4
+ unit_norm: False
configs/env/block_push_multiview.yaml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ views: 2
2
+ action_dim: 2
3
+
4
+ workspace:
5
+ _target_: workspaces.block_push_multiview.BlockPushMultiviewWorkspace
6
+
7
+ dataset:
8
+ _target_: datasets.block_pushing.PushMultiviewTrajectoryDataset
9
+ data_directory: ${env_vars.datasets.block_push}
10
+ onehot_goals: False
11
+ subset_fraction: ${subset_fraction}
12
+ prefetch: True
configs/env/libero_goal.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ views: 2
2
+ action_dim: 7
3
+
4
+ workspace:
5
+ _target_: workspaces.libero_goal.LiberoGoalWorkspace
6
+
7
+ dataset:
8
+ _target_: datasets.libero.LiberoGoalDataset
9
+ data_directory: ${env_vars.datasets.libero}
configs/env/pusht.yaml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ views: 1
2
+ action_dim: 2
3
+
4
+ workspace:
5
+ _target_: workspaces.pusht.PushTWorkspace
6
+
7
+ dataset:
8
+ _target_: datasets.pusht.PushTDataset
9
+ data_directory: ${env_vars.datasets.pusht}
10
+ subset_fraction: ${subset_fraction}
11
+ relative: ${relative}
configs/env/sim_kitchen.yaml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ views: 1
2
+ action_dim: 9
3
+
4
+ workspace:
5
+ _target_: workspaces.sim_kitchen.SimKitchenWorkspace
6
+
7
+ dataset:
8
+ _target_: datasets.sim_kitchen.SimKitchenTrajectoryDataset
9
+ data_directory: ${env_vars.datasets.sim_kitchen}
10
+ onehot_goals: False
11
+ prefetch: True
configs/env/your_dataset.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ views: NUM_VIEWS
2
+
3
+ workspace:
4
+ _target_: workspaces.your_workspace.YourWorkspace
5
+
6
+ dataset:
7
+ _target_: datasets.your_dataset.YourTrajectoryDataset
8
+ data_directory: ${env_vars.datasets.your_trajectory_dataset}
configs/env_vars/env_vars.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ dataset_root: /PATH/TO/DATASET/ROOT # e.g. set this to the unzipped directory of all datasets
2
+
3
+ datasets:
4
+ pusht: ${env_vars.dataset_root}/pusht_dataset
5
+ sim_kitchen: ${env_vars.dataset_root}/sim_kitchen_dataset
6
+ libero: ${env_vars.dataset_root}/libero_dataset
7
+ block_push: ${env_vars.dataset_root}/block_push_dataset
8
+ your_trajectory_dataset: YOUR_DATASET_PATH
configs/projector/inverse_dynamics_blockpush.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ _target_: models.projector.inverse_dynamics.InverseDynamicsProjector
2
+ window_size: ${window_size}
3
+ input_dim: ${encoder.output_dim}
4
+ n_layer: 4
5
+ n_head: 4
6
+ n_embd: 72
7
+ output_dim: 16
8
+ dropout: 0.0
configs/projector/inverse_dynamics_libero.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ _target_: models.projector.inverse_dynamics.InverseDynamicsProjector
2
+ window_size: ${window_size}
3
+ input_dim: ${encoder.output_dim}
4
+ n_layer: 6
5
+ n_head: 6
6
+ n_embd: 120
7
+ output_dim: 32
8
+ dropout: 0.0
configs/projector/inverse_dynamics_pusht.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ _target_: models.projector.inverse_dynamics.InverseDynamicsProjector
2
+ window_size: ${window_size}
3
+ input_dim: ${encoder.output_dim}
4
+ n_layer: 6
5
+ n_head: 6
6
+ n_embd: 120
7
+ output_dim: 8
8
+ dropout: 0.0
configs/projector/inverse_dynamics_sim_kitchen.yaml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ _target_: models.projector.inverse_dynamics.InverseDynamicsProjector
2
+ window_size: ${window_size}
3
+ input_dim: ${encoder.output_dim}
4
+ n_layer: 6
5
+ n_head: 6
6
+ n_embd: 120
7
+ output_dim: 64
8
+ dropout: 0.0
configs/projector/inverse_dynamics_your_dataset.yaml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _target_: models.projector.inverse_dynamics.InverseDynamicsProjector
2
+ window_size: ${window_size}
3
+ input_dim: ${encoder.output_dim}
4
+ n_layer: 6
5
+ n_head: 6
6
+ n_embd: 120
7
+ # output_dim: for sim environments, set it to the state-based observation dimension.
8
+ # for real environments, set it to the estimated underlying environment state dimension.
9
+ # (e.g. if we have a 7DoF robot arm and a free rigid object, 16 should work fine)
10
+ output_dim: OUTPUT_DIM
11
+ dropout: 0.0
configs/ssl/dynamo_blockpush.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _target_: models.ssl.dynamo.DynaMoSSL
2
+ window_size: ${window_size}
3
+ feature_dim: ${encoder.output_dim}
4
+ projection_dim: ${projector.output_dim}
5
+ n_layer: 4
6
+ n_head: 4
7
+ n_embd: 72
8
+
9
+ dropout: 0.3 # dropout on the forward dynamics model
10
+ covariance_reg_coef: 0.04
11
+ dynamics_loss_coef: 1.0
12
+
13
+ ema_beta: 0.99
14
+ beta_scheduling: True
15
+ projector_use_ema: True
16
+
17
+ lr: ${ssl_lr}
18
+ weight_decay: ${ssl_weight_decay}
19
+ betas: ${betas}
20
+ separate_single_views: True
configs/ssl/dynamo_libero.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _target_: models.ssl.dynamo.DynaMoSSL
2
+ window_size: ${window_size}
3
+ feature_dim: ${encoder.output_dim}
4
+ projection_dim: ${projector.output_dim}
5
+ n_layer: 6
6
+ n_head: 6
7
+ n_embd: 120
8
+
9
+ dropout: 0.0 # dropout on the forward dynamics model
10
+ covariance_reg_coef: 0.04
11
+ dynamics_loss_coef: 1.0
12
+
13
+ ema_beta: null
14
+ beta_scheduling: True
15
+ projector_use_ema: True
16
+
17
+ lr: ${ssl_lr}
18
+ weight_decay: ${ssl_weight_decay}
19
+ betas: ${betas}
20
+ separate_single_views: True
configs/ssl/dynamo_pusht.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _target_: models.ssl.dynamo.DynaMoSSL
2
+ window_size: ${window_size}
3
+ feature_dim: ${encoder.output_dim}
4
+ projection_dim: ${projector.output_dim}
5
+ n_layer: 6
6
+ n_head: 6
7
+ n_embd: 120
8
+
9
+ dropout: 0.0 # dropout on the forward dynamics model
10
+ covariance_reg_coef: 0.04
11
+ dynamics_loss_coef: 1.0
12
+
13
+ ema_beta: null
14
+ beta_scheduling: True
15
+ projector_use_ema: True
16
+
17
+ lr: ${ssl_lr}
18
+ weight_decay: ${ssl_weight_decay}
19
+ betas: ${betas}
20
+ separate_single_views: True
configs/ssl/dynamo_sim_kitchen.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _target_: models.ssl.dynamo.DynaMoSSL
2
+ window_size: ${window_size}
3
+ feature_dim: ${encoder.output_dim}
4
+ projection_dim: ${projector.output_dim}
5
+ n_layer: 6
6
+ n_head: 6
7
+ n_embd: 120
8
+
9
+ dropout: 0.0 # dropout on the forward dynamics model
10
+ covariance_reg_coef: 0.04
11
+ dynamics_loss_coef: 1.0
12
+
13
+ ema_beta: null
14
+ beta_scheduling: True
15
+ projector_use_ema: True
16
+
17
+ lr: ${ssl_lr}
18
+ weight_decay: ${ssl_weight_decay}
19
+ betas: ${betas}
20
+ separate_single_views: True
configs/ssl/dynamo_your_dataset.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _target_: models.ssl.dynamo.DynaMoSSL
2
+ window_size: ${window_size}
3
+ feature_dim: ${encoder.output_dim}
4
+ projection_dim: ${projector.output_dim}
5
+ n_layer: 6
6
+ n_head: 6
7
+ n_embd: 120
8
+
9
+ dropout: 0.0 # dropout on the forward dynamics model
10
+ covariance_reg_coef: 0.04
11
+ dynamics_loss_coef: 1.0
12
+
13
+ ema_beta: 0.99 # set to null for SimSiam instead of EMA
14
+ beta_scheduling: True
15
+ projector_use_ema: True
16
+
17
+ lr: ${ssl_lr}
18
+ weight_decay: ${ssl_weight_decay}
19
+ betas: ${betas}
20
+ separate_single_views: True
configs/train_blockpush.yaml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - _self_
3
+ - encoder: resnet18_random
4
+ - projector: inverse_dynamics_blockpush
5
+ - ssl: dynamo_blockpush
6
+ - env: block_push_multiview
7
+ - env_vars: env_vars
8
+
9
+ # Dataset details
10
+ subset_fraction: null
11
+ train_fraction: 0.95
12
+ batch_size: 64 # across all processes
13
+ num_workers: 15 # per process
14
+ window_size: 5
15
+ goal_conditional: null
16
+ goal_seq_len: 0
17
+ min_future_sep: 0
18
+ num_extra_predicted_actions: 0
19
+
20
+ # Training details
21
+ ssl_lr: 1e-4
22
+ ssl_weight_decay: 0.0
23
+ betas:
24
+ - 0.9
25
+ - 0.999
26
+ clip_grad_norm: 0.1
27
+ seed: 42
28
+ timeout_seconds: 18000
29
+
30
+ sync_bn: True
31
+ use_lr_scheduling: True
32
+ warmup_epochs: 5
33
+ num_epochs: 40
34
+
35
+ save_every_epochs: 10
36
+
37
+ # Eval config
38
+ eval_offline: True
39
+
40
+ # Wandb config
41
+ project: dynamo-repro
42
+ experiment: train_blockpush_dynamo
43
+
44
+ # hydra config
45
+ hydra:
46
+ job:
47
+ override_dirname: ${experiment}
48
+ chdir: False
49
+ run:
50
+ dir: ./exp_local/${now:%Y.%m.%d}/${now:%H%M%S}_${experiment}
51
+ sweep:
52
+ dir: ./exp_local/${now:%Y.%m.%d}/sweep_${now:%H%M%S}_${experiment}
53
+ subdir: ${hydra.job.num}
configs/train_libero_goal.yaml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - _self_
3
+ - encoder: resnet18_random
4
+ - projector: inverse_dynamics_libero
5
+ - ssl: dynamo_libero
6
+ - env: libero_goal
7
+ - env_vars: env_vars
8
+
9
+ # Dataset details
10
+ subset_fraction: null
11
+ train_fraction: 0.95
12
+ batch_size: 64 # across all processes
13
+ num_workers: 15 # per process
14
+ window_size: 5
15
+ goal_conditional: null
16
+ goal_seq_len: 0
17
+ min_future_sep: 0
18
+ num_extra_predicted_actions: 0
19
+
20
+ # Training details
21
+ ssl_lr: 1e-4
22
+ ssl_weight_decay: 0.0
23
+ betas:
24
+ - 0.9
25
+ - 0.999
26
+ clip_grad_norm: 0.1
27
+ seed: 42
28
+ timeout_seconds: 18000
29
+
30
+ sync_bn: True
31
+ use_lr_scheduling: False
32
+ warmup_epochs: 5
33
+ num_epochs: 40
34
+
35
+ save_every_epochs: 10
36
+
37
+ # Eval config
38
+ eval_offline: True
39
+
40
+ # Wandb config
41
+ project: dynamo-repro
42
+ experiment: train_libero_goal_dynamo
43
+
44
+ # hydra config
45
+ hydra:
46
+ job:
47
+ override_dirname: ${experiment}
48
+ chdir: False
49
+ run:
50
+ dir: ./exp_local/${now:%Y.%m.%d}/${now:%H%M%S}_${experiment}
51
+ sweep:
52
+ dir: ./exp_local/${now:%Y.%m.%d}/sweep_${now:%H%M%S}_${experiment}
53
+ subdir: ${hydra.job.num}
configs/train_pusht.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - _self_
3
+ - encoder: resnet18_random
4
+ - projector: inverse_dynamics_pusht
5
+ - ssl: dynamo_pusht
6
+ - env: pusht
7
+ - env_vars: env_vars
8
+
9
+ # Dataset details
10
+ subset_fraction: null
11
+ train_fraction: 0.95
12
+ batch_size: 64
13
+ num_workers: 15
14
+ window_size: 5
15
+ goal_conditional: null
16
+ goal_seq_len: 0
17
+ min_future_sep: 0
18
+ num_extra_predicted_actions: 5
19
+ relative: False
20
+
21
+ # Training details
22
+ ssl_lr: 1e-4
23
+ ssl_weight_decay: 1e-6
24
+ betas:
25
+ - 0.9
26
+ - 0.999
27
+ clip_grad_norm: 0.1
28
+ seed: 42
29
+ timeout_seconds: 18000
30
+
31
+ sync_bn: True
32
+ use_lr_scheduling: True
33
+ warmup_epochs: 5
34
+ num_epochs: 40
35
+
36
+ save_every_epochs: 10
37
+
38
+ # Eval config
39
+ eval_offline: True
40
+
41
+ # Wandb config
42
+ project: dynamo-repro
43
+ experiment: train_pusht_dynamo
44
+
45
+ # hydra config
46
+ hydra:
47
+ job:
48
+ override_dirname: ${experiment}
49
+ chdir: False
50
+ run:
51
+ dir: ./exp_local/${now:%Y.%m.%d}/${now:%H%M%S}_${experiment}
52
+ sweep:
53
+ dir: ./exp_local/${now:%Y.%m.%d}/sweep_${now:%H%M%S}_${experiment}
54
+ subdir: ${hydra.job.num}
configs/train_sim_kitchen.yaml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - _self_
3
+ - encoder: resnet18_random
4
+ - projector: inverse_dynamics_sim_kitchen
5
+ - ssl: dynamo_sim_kitchen
6
+ - env: sim_kitchen
7
+ - env_vars: env_vars
8
+
9
+ # Dataset details
10
+ subset_fraction: null
11
+ train_fraction: 0.95
12
+ batch_size: 64 # across all processes
13
+ num_workers: 15 # per process
14
+ window_size: 2
15
+ goal_conditional: null
16
+ goal_seq_len: 3
17
+ min_future_sep: 10
18
+ num_extra_predicted_actions: 0
19
+
20
+ # Training details
21
+ ssl_lr: 1e-4
22
+ ssl_weight_decay: 0.0
23
+ betas:
24
+ - 0.9
25
+ - 0.999
26
+ clip_grad_norm: 0.1
27
+ seed: 42
28
+ timeout_seconds: 18000
29
+
30
+ sync_bn: True
31
+ use_lr_scheduling: True
32
+ warmup_epochs: 5
33
+ num_epochs: 40
34
+
35
+ save_every_epochs: 10
36
+
37
+ # Eval config
38
+ eval_offline: True
39
+
40
+ # Wandb config
41
+ project: dynamo-repro
42
+ experiment: train_sim_kitchen_dynamo
43
+
44
+ # hydra config
45
+ hydra:
46
+ job:
47
+ override_dirname: ${experiment}
48
+ chdir: False
49
+ run:
50
+ dir: ./exp_local/${now:%Y.%m.%d}/${now:%H%M%S}_${experiment}
51
+ sweep:
52
+ dir: ./exp_local/${now:%Y.%m.%d}/sweep_${now:%H%M%S}_${experiment}
53
+ subdir: ${hydra.job.num}
configs/train_your_dataset.yaml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - _self_
3
+ - encoder: resnet18_random
4
+ - projector: inverse_dynamics_your_dataset
5
+ - ssl: dynamo_your_dataset
6
+ - env: your_dataset
7
+ - env_vars: env_vars
8
+
9
+ # Dataset details
10
+ subset_fraction: null
11
+ train_fraction: 0.95
12
+ batch_size: 64 # across all processes
13
+ num_workers: 15 # per process
14
+ window_size: 5
15
+ goal_conditional: null
16
+ goal_seq_len: 0
17
+ min_future_sep: 0
18
+ num_extra_predicted_actions: 0
19
+
20
+ # Training details
21
+ ssl_lr: 1e-4
22
+ ssl_weight_decay: 0.0
23
+ betas:
24
+ - 0.9
25
+ - 0.999
26
+ clip_grad_norm: 0.1
27
+ seed: 42
28
+ timeout_seconds: 18000
29
+
30
+ sync_bn: True
31
+ use_lr_scheduling: True
32
+ warmup_epochs: 5
33
+ num_epochs: 40
34
+
35
+ save_every_epochs: 10
36
+
37
+ # Eval config
38
+ eval_offline: True
39
+
40
+ # Wandb config
41
+ project: dynamo-repro
42
+ experiment: train_your_dataset_dynamo
43
+
44
+ # hydra config
45
+ hydra:
46
+ job:
47
+ override_dirname: ${experiment}
48
+ chdir: False
49
+ run:
50
+ dir: ./exp_local/${now:%Y.%m.%d}/${now:%H%M%S}_${experiment}
51
+ sweep:
52
+ dir: ./exp_local/${now:%Y.%m.%d}/sweep_${now:%H%M%S}_${experiment}
53
+ subdir: ${hydra.job.num}
datasets/__init__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from . import core
2
+ from . import block_pushing
3
+ from . import libero
4
+ from . import sim_kitchen
5
+ from . import pusht
datasets/block_pushing.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ import einops
4
+ import numpy as np
5
+ from pathlib import Path
6
+ from typing import Optional
7
+ from datasets.core import TrajectoryDataset
8
+
9
+
10
+ class PushMultiviewTrajectoryDataset(TrajectoryDataset):
11
+ def __init__(
12
+ self,
13
+ data_directory: os.PathLike,
14
+ onehot_goals=False,
15
+ subset_fraction: Optional[float] = None,
16
+ prefetch: bool = False,
17
+ ):
18
+ self.data_directory = Path(data_directory)
19
+ self.states = np.load(self.data_directory / "multimodal_push_observations.npy")
20
+ self.actions = np.load(self.data_directory / "multimodal_push_actions.npy")
21
+ self.masks = np.load(self.data_directory / "multimodal_push_masks.npy")
22
+
23
+ self.subset_fraction = subset_fraction
24
+ if self.subset_fraction:
25
+ assert self.subset_fraction > 0 and self.subset_fraction <= 1
26
+ n = int(len(self.states) * self.subset_fraction)
27
+ else:
28
+ n = len(self.states)
29
+ self.states = self.states[:n]
30
+ self.actions = self.actions[:n]
31
+ self.masks = self.masks[:n]
32
+
33
+ self.states = torch.from_numpy(self.states).float()
34
+ self.actions = torch.from_numpy(self.actions).float() / 0.03
35
+ self.masks = torch.from_numpy(self.masks).bool()
36
+ self.prefetch = prefetch
37
+ if self.prefetch:
38
+ self.obses = []
39
+ for i in range(n):
40
+ vid_path = self.data_directory / "obs_multiview" / f"{i:03d}.pth"
41
+ self.obses.append(torch.load(vid_path))
42
+ self.onehot_goals = onehot_goals
43
+ if self.onehot_goals:
44
+ self.goals = torch.load(self.data_directory / "onehot_goals.pth").float()
45
+ self.goals = self.goals[:n]
46
+
47
+ def get_seq_length(self, idx):
48
+ return int(self.masks[idx].sum().item())
49
+
50
+ def get_all_actions(self):
51
+ result = []
52
+ # mask out invalid actions
53
+ for i in range(len(self.masks)):
54
+ T = int(self.masks[i].sum().item())
55
+ result.append(self.actions[i, :T, :])
56
+ return torch.cat(result, dim=0)
57
+
58
+ def get_frames(self, idx, frames):
59
+ if self.prefetch:
60
+ obs = self.obses[idx][frames]
61
+ else:
62
+ obs = torch.load(self.data_directory / "obs_multiview" / f"{idx:03d}.pth")[
63
+ frames
64
+ ]
65
+ obs = einops.rearrange(obs, "T V H W C -> T V C H W") / 255.0
66
+ act = self.actions[idx, frames]
67
+ mask = self.masks[idx, frames]
68
+ if self.onehot_goals:
69
+ goal = self.goals[idx, frames]
70
+ return obs, act, mask, goal
71
+ else:
72
+ return obs, act, mask
73
+
74
+ def __getitem__(self, idx):
75
+ T = self.masks[idx].sum().int().item()
76
+ return self.get_frames(idx, range(T))
77
+
78
+ def __len__(self):
79
+ return len(self.states)
datasets/core.py ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import utils
3
+ import torch
4
+ import numpy as np
5
+ from torch import default_generator, randperm
6
+ from torch.utils.data import Dataset, Subset
7
+ from typing import Callable, Optional, Sequence, List, Any
8
+ from torch.nn.utils.rnn import pad_sequence
9
+
10
+
11
+ # Taken from python 3.5 docs
12
+ def _accumulate(iterable, fn=lambda x, y: x + y):
13
+ "Return running totals"
14
+ # _accumulate([1,2,3,4,5]) --> 1 3 6 10 15
15
+ # _accumulate([1,2,3,4,5], operator.mul) --> 1 2 6 24 120
16
+ it = iter(iterable)
17
+ try:
18
+ total = next(it)
19
+ except StopIteration:
20
+ return
21
+ yield total
22
+ for element in it:
23
+ total = fn(total, element)
24
+ yield total
25
+
26
+
27
+ class TrajectoryDataset(Dataset, abc.ABC):
28
+ """
29
+ A dataset containing trajectories.
30
+ TrajectoryDataset[i] returns: (observations, actions, mask)
31
+ observations: Tensor[T, ...], T frames of observations
32
+ actions: Tensor[T, ...], T frames of actions
33
+ mask: Tensor[T]: False: invalid; True: valid
34
+ """
35
+
36
+ @abc.abstractmethod
37
+ def get_seq_length(self, idx):
38
+ """
39
+ Returns the length of the idx-th trajectory.
40
+ """
41
+ raise NotImplementedError
42
+
43
+ @abc.abstractmethod
44
+ def get_frames(self, idx, frames):
45
+ """
46
+ Returns the frames from the idx-th trajectory at the specified frames.
47
+ Used to speed up slicing.
48
+ """
49
+ raise NotImplementedError
50
+
51
+
52
+ class TrajectorySubset(TrajectoryDataset, Subset):
53
+ """
54
+ Subset of a trajectory dataset at specified indices.
55
+
56
+ Args:
57
+ dataset (TrajectoryDataset): The whole Dataset
58
+ indices (sequence): Indices in the whole set selected for subset
59
+ """
60
+
61
+ def __init__(self, dataset: TrajectoryDataset, indices: Sequence[int]):
62
+ Subset.__init__(self, dataset, indices)
63
+
64
+ def get_seq_length(self, idx):
65
+ return self.dataset.get_seq_length(self.indices[idx])
66
+
67
+ def get_all_actions(self):
68
+ return self.dataset.get_all_actions()
69
+
70
+ def get_frames(self, idx, frames):
71
+ return self.dataset.get_frames(self.indices[idx], frames)
72
+
73
+
74
+ class TrajectorySlicerDataset:
75
+ def __init__(
76
+ self,
77
+ dataset: TrajectoryDataset,
78
+ window: int,
79
+ future_conditional: bool = False,
80
+ min_future_sep: int = 0,
81
+ future_seq_len: Optional[int] = None,
82
+ only_sample_tail: bool = False,
83
+ transform: Optional[Callable] = None,
84
+ num_extra_predicted_actions: Optional[int] = None,
85
+ frame_step: int = 1,
86
+ repeat_first_frame: bool = False,
87
+ ):
88
+ """
89
+ Slice a trajectory dataset into unique (but overlapping) sequences of length `window`.
90
+
91
+ dataset: a trajectory dataset that satisfies:
92
+ dataset.get_seq_length(i) is implemented to return the length of sequence i
93
+ dataset[i] = (observations, actions, mask)
94
+ observations: Tensor[T, ...]
95
+ actions: Tensor[T, ...]
96
+ mask: Tensor[T]
97
+ False: invalid
98
+ True: valid
99
+ window: int
100
+ number of timesteps to include in each slice
101
+ future_conditional: bool = False
102
+ if True, observations will be augmented with future observations sampled from the same trajectory
103
+ min_future_sep: int = 0
104
+ minimum number of timesteps between the end of the current sequence and the start of the future sequence
105
+ for the future conditional
106
+ future_seq_len: Optional[int] = None
107
+ the length of the future conditional sequence;
108
+ required if future_conditional is True
109
+ only_sample_tail: bool = False
110
+ if True, only sample future sequences from the tail of the trajectory
111
+ transform: function (observations, actions, mask[, goal]) -> (observations, actions, mask[, goal])
112
+ """
113
+ if future_conditional:
114
+ assert future_seq_len is not None, "must specify a future_seq_len"
115
+ self.dataset = dataset
116
+ self.window = window
117
+ self.future_conditional = future_conditional
118
+ self.min_future_sep = min_future_sep
119
+ self.future_seq_len = future_seq_len
120
+ self.only_sample_tail = only_sample_tail
121
+ self.transform = transform
122
+ self.num_extra_predicted_actions = num_extra_predicted_actions or 0
123
+ self.slices = []
124
+ self.frame_step = frame_step
125
+ min_seq_length = np.inf
126
+ if num_extra_predicted_actions:
127
+ window = window + num_extra_predicted_actions
128
+ for i in range(len(self.dataset)): # type: ignore
129
+ T = self.dataset.get_seq_length(i) # avoid reading actual seq (slow)
130
+ min_seq_length = min(T, min_seq_length)
131
+ if T - window < 0:
132
+ print(f"Ignored short sequence #{i}: len={T}, window={window}")
133
+ else:
134
+ if repeat_first_frame:
135
+ self.slices += [(i, 0, end + 1) for end in range(window - 1)]
136
+ window_len_with_step = (window - 1) * frame_step + 1
137
+ last_start = T - window_len_with_step
138
+ self.slices += [
139
+ (i, start, start + window_len_with_step)
140
+ for start in range(last_start)
141
+ ] # slice indices follow convention [start, end)
142
+
143
+ if min_seq_length < window:
144
+ print(
145
+ f"Ignored short sequences. To include all, set window <= {min_seq_length}."
146
+ )
147
+
148
+ def get_seq_length(self, idx: int) -> int:
149
+ if self.future_conditional:
150
+ return self.future_seq_len + self.window
151
+ else:
152
+ return self.window
153
+
154
+ def get_all_actions(self) -> torch.Tensor:
155
+ return self.dataset.get_all_actions()
156
+
157
+ def __len__(self):
158
+ return len(self.slices)
159
+
160
+ def __getitem__(self, idx):
161
+ i, start, end = self.slices[idx]
162
+ T = self.dataset.get_seq_length(i)
163
+
164
+ if (
165
+ self.num_extra_predicted_actions is not None
166
+ and self.num_extra_predicted_actions != 0
167
+ ):
168
+ assert self.frame_step == 1, "NOT TESTED"
169
+ if self.future_conditional:
170
+ raise NotImplementedError(
171
+ "num_extra_predicted_actions with future_conditional not implemented"
172
+ )
173
+ assert end <= T, f"end={end} > T={T}"
174
+ observations, actions, mask = self.dataset.get_frames(i, range(start, end))
175
+ observations = observations[: self.window]
176
+
177
+ values = [observations, actions, mask.bool()]
178
+ else:
179
+ if self.future_conditional:
180
+ assert self.frame_step == 1, "NOT TESTED"
181
+ valid_start_range = (
182
+ end + self.min_future_sep,
183
+ self.dataset.get_seq_length(i) - self.future_seq_len,
184
+ )
185
+ if valid_start_range[0] < valid_start_range[1]:
186
+ if self.only_sample_tail:
187
+ future_obs_range = range(T - self.future_seq_len, T)
188
+ else:
189
+ future_start = np.random.randint(*valid_start_range)
190
+ future_end = future_start + self.future_seq_len
191
+ future_obs_range = range(future_start, future_end)
192
+ obs, actions, mask = self.dataset.get_frames(
193
+ i, list(range(start, end)) + list(future_obs_range)
194
+ )
195
+ future_obs = obs[end - start :]
196
+ obs = obs[: end - start]
197
+ actions = actions[: end - start]
198
+ mask = mask[: end - start]
199
+ else:
200
+ # zeros placeholder T x obs_dim
201
+ obs, actions, mask = self.dataset.get_frames(i, range(start, end))
202
+ obs_dims = obs.shape[1:]
203
+ future_obs = torch.zeros((self.future_seq_len, *obs_dims))
204
+
205
+ # [observations, actions, mask, future_obs (goal conditional)]
206
+ values = [obs, actions, mask.bool(), future_obs]
207
+ else:
208
+ observations, actions, mask = self.dataset.get_frames(
209
+ i, range(start, end, self.frame_step)
210
+ )
211
+ values = [observations, actions, mask.bool()]
212
+
213
+ if end - start < self.window + self.num_extra_predicted_actions:
214
+ # this only happens for repeating the very first frames
215
+ values = [
216
+ utils.inference.repeat_start_to_length(
217
+ x, self.window + self.num_extra_predicted_actions, dim=0
218
+ )
219
+ for x in values
220
+ ]
221
+ values[0] = values[0][: self.window]
222
+
223
+ # optionally apply transform
224
+ if self.transform is not None:
225
+ values = self.transform(values)
226
+ return tuple(values)
227
+
228
+
229
+ class TrajectoryEmbeddingDataset(TrajectoryDataset):
230
+ def __init__(
231
+ self,
232
+ model,
233
+ dataset: TrajectoryDataset,
234
+ device="cpu",
235
+ embed_goal=False,
236
+ ):
237
+ self.data = utils.inference.embed_trajectory_dataset(
238
+ model,
239
+ dataset,
240
+ obs_only=False,
241
+ device=device,
242
+ embed_goal=embed_goal,
243
+ )
244
+ assert len(self.data) == len(dataset)
245
+
246
+ self.seq_lengths = [len(x[0]) for x in self.data]
247
+ self.on_device_data = []
248
+ n_tensors = len(self.data[0])
249
+ for i in range(n_tensors):
250
+ self.on_device_data.append(
251
+ pad_sequence([x[i] for x in self.data], batch_first=True).to(device)
252
+ )
253
+ self.data = self.on_device_data
254
+
255
+ def get_seq_length(self, idx):
256
+ return self.seq_lengths[idx]
257
+
258
+ def get_all_actions(self):
259
+ return torch.cat([x[1] for x in self.data], dim=0)
260
+
261
+ def get_frames(self, idx, frames):
262
+ return [x[idx, frames] for x in self.data]
263
+
264
+ def __getitem__(self, idx):
265
+ return self.get_frames(idx, range(self.get_seq_length(idx)))
266
+
267
+ def __len__(self):
268
+ return len(self.seq_lengths)
269
+
270
+
271
+ def get_train_val_sliced(
272
+ traj_dataset: TrajectoryDataset,
273
+ train_fraction: float = 0.9,
274
+ random_seed: int = 42,
275
+ window_size: int = 10,
276
+ future_conditional: bool = False,
277
+ min_future_sep: int = 0,
278
+ future_seq_len: Optional[int] = None,
279
+ only_sample_tail: bool = False,
280
+ transform: Optional[Callable[[Any], Any]] = None,
281
+ num_extra_predicted_actions: Optional[int] = None,
282
+ frame_step: int = 1,
283
+ ):
284
+ train, val = split_traj_datasets(
285
+ traj_dataset,
286
+ train_fraction=train_fraction,
287
+ random_seed=random_seed,
288
+ )
289
+ traj_slicer_kwargs = {
290
+ "window": window_size,
291
+ "future_conditional": future_conditional,
292
+ "min_future_sep": min_future_sep,
293
+ "future_seq_len": future_seq_len,
294
+ "only_sample_tail": only_sample_tail,
295
+ "transform": transform,
296
+ "num_extra_predicted_actions": num_extra_predicted_actions,
297
+ "frame_step": frame_step,
298
+ }
299
+
300
+ train_slices = TrajectorySlicerDataset(train, **traj_slicer_kwargs)
301
+ val_slices = TrajectorySlicerDataset(val, **traj_slicer_kwargs)
302
+ return train_slices, val_slices
303
+
304
+
305
+ def random_split_traj(
306
+ dataset: TrajectoryDataset,
307
+ lengths: Sequence[int],
308
+ generator: Optional[torch.Generator] = default_generator,
309
+ ) -> List[TrajectorySubset]:
310
+ """
311
+ (Modified from torch.utils.data.dataset.random_split)
312
+
313
+ Randomly split a trajectory dataset into non-overlapping new datasets of given lengths.
314
+ Optionally fix the generator for reproducible results, e.g.:
315
+
316
+ >>> random_split_traj(range(10), [3, 7], generator=torch.Generator().manual_seed(42))
317
+
318
+ Args:
319
+ dataset (TrajectoryDataset): TrajectoryDataset to be split
320
+ lengths (sequence): lengths of splits to be produced
321
+ generator (Generator): Generator used for the random permutation.
322
+ """
323
+ # Cannot verify that dataset is Sized
324
+ if sum(lengths) != len(dataset): # type: ignore[arg-type]
325
+ raise ValueError(
326
+ "Sum of input lengths does not equal the length of the input dataset!"
327
+ )
328
+
329
+ indices = randperm(sum(lengths), generator=generator).tolist()
330
+ return [
331
+ TrajectorySubset(dataset, indices[offset - length : offset])
332
+ for offset, length in zip(_accumulate(lengths), lengths)
333
+ ]
334
+
335
+
336
+ def split_traj_datasets(dataset, train_fraction=0.95, random_seed=42):
337
+ dataset_length = len(dataset)
338
+ lengths = [
339
+ int(train_fraction * dataset_length),
340
+ dataset_length - int(train_fraction * dataset_length),
341
+ ]
342
+ train_set, val_set = random_split_traj(
343
+ dataset, lengths, generator=torch.Generator().manual_seed(random_seed)
344
+ )
345
+ return train_set, val_set
datasets/libero.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import einops
3
+ import numpy as np
4
+ from pathlib import Path
5
+ from typing import Optional
6
+ from torch.nn.utils.rnn import pad_sequence
7
+ from datasets.core import TrajectoryDataset
8
+
9
+
10
+ class LiberoGoalDataset(TrajectoryDataset):
11
+ # data structure:
12
+ # libero_goal
13
+ # task_name
14
+ # demo_{i}
15
+ # agentview_image.mp4
16
+ # robot0_eye_in_hand_image.mp4
17
+ # robot0_joint_pos.npy
18
+ # robot0_eef.npy
19
+ # robot0_gripper_qpos.npy
20
+ # object_states.npy
21
+ # actions.npy
22
+ def __init__(self, data_directory, subset_fraction: Optional[float] = None):
23
+ self.dir = Path(data_directory) / "libero_goal"
24
+ self.task_names = list(self.dir.iterdir())
25
+ self.task_names.sort()
26
+ self.demos = []
27
+ for task_name in self.task_names:
28
+ self.demos += list(task_name.iterdir())
29
+
30
+ self.subset_fraction = subset_fraction
31
+ if self.subset_fraction:
32
+ assert 0 < self.subset_fraction <= 1
33
+ n = int(len(self.demos) * self.subset_fraction)
34
+ self.demos = self.demos[:n]
35
+
36
+ # prefetch all npy data
37
+ self.joint_pos = []
38
+ self.eef = []
39
+ self.gripper_qpos = []
40
+ self.object_states = []
41
+ self.states = []
42
+ self.actions = []
43
+ for demo in self.demos:
44
+ self.joint_pos.append(np.load(demo / "robot0_joint_pos.npy"))
45
+ self.eef.append(np.load(demo / "robot0_eef.npy"))
46
+ self.gripper_qpos.append(np.load(demo / "robot0_gripper_pos.npy"))
47
+ self.object_states.append(np.load(demo / "object_states.npy"))
48
+ state = np.concatenate(
49
+ [
50
+ self.joint_pos[-1],
51
+ self.eef[-1],
52
+ self.gripper_qpos[-1],
53
+ self.object_states[-1],
54
+ ],
55
+ axis=1,
56
+ )
57
+ act = np.load(demo / "actions.npy")
58
+ self.states.append(torch.from_numpy(state))
59
+ self.actions.append(torch.from_numpy(act))
60
+
61
+ # pad state dimension to same length for linear probe diagnostics
62
+ MAX_DIM = 128
63
+ for i in range(len(self.states)):
64
+ self.states[i] = torch.cat(
65
+ [
66
+ self.states[i],
67
+ torch.zeros(
68
+ self.states[i].shape[0], MAX_DIM - self.states[i].shape[1]
69
+ ),
70
+ ],
71
+ dim=1,
72
+ )
73
+ # pad states and actions to the same time length
74
+ self.states = pad_sequence(self.states, batch_first=True).float()
75
+ self.actions = pad_sequence(self.actions, batch_first=True).float()
76
+
77
+ # last frame goal
78
+ self.goals = None
79
+ goals = []
80
+ for i in range(0, 500, 50):
81
+ last_obs, _, _ = self.get_frames(i, [-1]) # 1 V C H W
82
+ goals.append(last_obs)
83
+ self.goals = goals
84
+
85
+ def __len__(self):
86
+ return len(self.demos)
87
+
88
+ def get_frames(self, idx, frames):
89
+ demo = self.demos[idx]
90
+ agentview_obs = torch.load(
91
+ str(demo / "agentview_image.pth"),
92
+ )
93
+ robotview_obs = torch.load(
94
+ str(demo / "robot0_eye_in_hand_image.pth"),
95
+ )
96
+ agentview = agentview_obs[frames]
97
+ robotview = robotview_obs[frames]
98
+ obs = torch.stack([agentview, robotview], dim=1)
99
+ obs = einops.rearrange(obs, "T V H W C -> T V C H W") / 255.0
100
+ act = self.actions[idx][frames]
101
+
102
+ if self.goals is not None:
103
+ task_idx = idx // 50
104
+ goal = self.goals[task_idx].repeat(len(frames), 1, 1, 1, 1)
105
+ return obs, act, goal
106
+ else:
107
+ return obs, act, None
108
+
109
+ def __getitem__(self, idx):
110
+ return self.get_frames(idx, range(len(self.joint_pos[idx])))
111
+
112
+ def get_seq_length(self, idx):
113
+ return len(self.joint_pos[idx])
114
+
115
+ def get_all_actions(self):
116
+ actions = []
117
+ for i in range(len(self.demos)):
118
+ T = self.get_seq_length(i)
119
+ actions.append(self.actions[i][:T])
120
+ return torch.cat(actions, dim=0)
datasets/pusht.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import einops
3
+ import pickle
4
+ from pathlib import Path
5
+ from typing import Optional
6
+ from datasets.core import TrajectoryDataset
7
+
8
+
9
+ class PushTDataset(TrajectoryDataset):
10
+ def __init__(
11
+ self,
12
+ data_directory,
13
+ subset_fraction: Optional[float] = None,
14
+ relative=False,
15
+ ):
16
+ self.data_directory = Path(data_directory)
17
+ self.relative = relative
18
+ self.states = torch.load(self.data_directory / "states.pth")
19
+ if relative:
20
+ self.actions = torch.load(self.data_directory / "rel_actions.pth")
21
+ else:
22
+ self.actions = torch.load(self.data_directory / "abs_actions.pth")
23
+ with open(self.data_directory / "seq_lengths.pkl", "rb") as f:
24
+ self.seq_lengths = pickle.load(f)
25
+
26
+ self.subset_fraction = subset_fraction
27
+ if self.subset_fraction:
28
+ assert self.subset_fraction > 0 and self.subset_fraction <= 1
29
+ n = int(len(self.states) * self.subset_fraction)
30
+ else:
31
+ n = len(self.states)
32
+ self.states = self.states[:n]
33
+ self.actions = self.actions[:n]
34
+ self.seq_lengths = self.seq_lengths[:n]
35
+
36
+ for i in range(n):
37
+ T = self.seq_lengths[i]
38
+ self.actions[i, T:] = 0 # redo zero padding
39
+
40
+ def get_seq_length(self, idx):
41
+ return self.seq_lengths[idx]
42
+
43
+ def get_all_actions(self):
44
+ result = []
45
+ for i in range(len(self.seq_lengths)):
46
+ T = self.seq_lengths[i]
47
+ result.append(self.actions[i, :T, :])
48
+ return torch.cat(result, dim=0)
49
+
50
+ def get_frames(self, idx, frames):
51
+ vid_dir = self.data_directory / "obses"
52
+ obs = torch.load(str(vid_dir / f"episode_{idx:03d}.pth"))
53
+ obs = obs[frames] # THWC
54
+ obs = einops.rearrange(obs, "T H W C -> T 1 C H W") / 255.0 # T V C H W, 1 view
55
+ act = self.actions[idx, frames]
56
+ mask = torch.ones(len(act)).bool()
57
+ return obs, act, mask
58
+
59
+ def __getitem__(self, idx):
60
+ return self.get_frames(idx, range(self.get_seq_length(idx)))
61
+
62
+ def __len__(self):
63
+ return len(self.seq_lengths)
datasets/sim_kitchen.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import utils
2
+ import torch
3
+ import numpy as np
4
+ from pathlib import Path
5
+ from datasets.core import TrajectoryDataset
6
+
7
+
8
+ class SimKitchenTrajectoryDataset(TrajectoryDataset):
9
+ def __init__(self, data_directory, prefetch=True, onehot_goals=False):
10
+ self.data_directory = Path(data_directory)
11
+ states = torch.from_numpy(np.load(self.data_directory / "observations_seq.npy"))
12
+ actions = torch.from_numpy(np.load(self.data_directory / "actions_seq.npy"))
13
+ goals = torch.load(self.data_directory / "onehot_goals.pth")
14
+ # The current values are in shape T x N x Dim, move to N x T x Dim
15
+ self.states, self.actions, self.goals = utils.transpose_batch_timestep(
16
+ states, actions, goals
17
+ )
18
+ self.Ts = np.load(self.data_directory / "existence_mask.npy").sum(axis=0).astype(int).tolist()
19
+
20
+ self.prefetch = prefetch
21
+ if self.prefetch:
22
+ self.obses = []
23
+ for i in range(len(self.Ts)):
24
+ self.obses.append(torch.load(self.data_directory / "obses" / f"{i:03d}.pth"))
25
+ self.onehot_goals = onehot_goals
26
+
27
+ def get_seq_length(self, idx):
28
+ return self.Ts[idx]
29
+
30
+ def get_all_actions(self):
31
+ result = []
32
+ # mask out invalid actions
33
+ for i in range(len(self.Ts)):
34
+ T = self.Ts[i]
35
+ result.append(self.actions[i, :T, :])
36
+ return torch.cat(result, dim=0)
37
+
38
+ def get_frames(self, idx, frames):
39
+ # obs, act, mask / obs, act, mask, goal
40
+ if self.prefetch:
41
+ obs = self.obses[idx][frames]
42
+ else:
43
+ obs = torch.load(self.data_directory / "obses" / f"{idx:03d}.pth")[frames]
44
+ obs = obs / 255.0
45
+ act = self.actions[idx, frames]
46
+ mask = torch.ones((len(frames)))
47
+ if self.onehot_goals:
48
+ goal = self.goals[idx, frames]
49
+ return obs, act, mask, goal
50
+ else:
51
+ return obs, act, mask
52
+
53
+ def __getitem__(self, idx):
54
+ T = self.Ts[idx]
55
+ return self.get_frames(idx, range(T))
56
+
57
+ def __len__(self):
58
+ return len(self.Ts)
datasets/vqbet_repro.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import utils
3
+ import torch
4
+ import numpy as np
5
+ from torch.utils.data import Dataset
6
+ from typing import Optional, Callable
7
+
8
+
9
+ class TrajectoryDataset(Dataset, abc.ABC):
10
+ """
11
+ A dataset containing trajectories.
12
+ TrajectoryDataset[i] returns: (observations, actions, mask)
13
+ observations: Tensor[T, ...], T frames of observations
14
+ actions: Tensor[T, ...], T frames of actions
15
+ mask: Tensor[T]: 0: invalid; 1: valid
16
+ """
17
+
18
+ @abc.abstractmethod
19
+ def get_seq_length(self, idx):
20
+ """
21
+ Returns the length of the idx-th trajectory.
22
+ """
23
+ raise NotImplementedError
24
+
25
+
26
+ class TrajectorySlicerDataset(TrajectoryDataset):
27
+ def __init__(
28
+ self,
29
+ dataset: TrajectoryDataset,
30
+ window: int,
31
+ action_window: int,
32
+ vqbet_get_future_action_chunk: bool = True,
33
+ future_conditional: bool = False,
34
+ min_future_sep: int = 0,
35
+ future_seq_len: Optional[int] = None,
36
+ only_sample_tail: bool = False,
37
+ transform: Optional[Callable] = None,
38
+ use_libero_goal: bool = False,
39
+ ):
40
+ if future_conditional:
41
+ assert future_seq_len is not None, "must specify a future_seq_len"
42
+ self.dataset = dataset
43
+ self.window = window
44
+ self.action_window = action_window
45
+ self.vqbet_get_future_action_chunk = vqbet_get_future_action_chunk
46
+ self.future_conditional = future_conditional
47
+ self.min_future_sep = min_future_sep
48
+ self.future_seq_len = future_seq_len
49
+ self.only_sample_tail = only_sample_tail
50
+ self.transform = transform
51
+ self.slices = []
52
+ self.use_libero_goal = use_libero_goal
53
+ min_seq_length = np.inf
54
+ if vqbet_get_future_action_chunk:
55
+ min_window_required = window + action_window
56
+ else:
57
+ min_window_required = max(window, action_window)
58
+ for i in range(len(self.dataset)): # type: ignore
59
+ T = self.dataset.get_seq_length(i) # avoid reading actual seq (slow)
60
+ min_seq_length = min(T, min_seq_length)
61
+ if T - min_window_required < 0:
62
+ print(
63
+ f"Ignored short sequence #{i}: len={T}, window={min_window_required}"
64
+ )
65
+ else:
66
+ self.slices += [
67
+ (i, 0, end + 1) for end in range(window - 1)
68
+ ] # slice indices follow convention [start, end)
69
+ self.slices += [
70
+ (i, start, start + window)
71
+ for start in range(T - min_window_required)
72
+ ] # slice indices follow convention [start, end)
73
+
74
+ if min_seq_length < min_window_required:
75
+ print(
76
+ f"Ignored short sequences. To include all, set window <= {min_seq_length}."
77
+ )
78
+
79
+ def get_seq_length(self, idx: int) -> int:
80
+ if self.future_conditional:
81
+ return self.future_seq_len + self.window
82
+ else:
83
+ return self.window
84
+
85
+ def __len__(self):
86
+ return len(self.slices)
87
+
88
+ def __getitem__(self, idx):
89
+ i, start, end = self.slices[idx]
90
+ if end - start < self.window:
91
+ obs, act, *others = self.dataset[i]
92
+ obs = utils.inference.repeat_start_to_length(
93
+ obs[start:end], self.window, dim=0
94
+ )
95
+ act = utils.inference.repeat_start_to_length(
96
+ act[start : end - 1 + self.action_window],
97
+ self.window + self.action_window - 1,
98
+ dim=0,
99
+ )
100
+ values = [obs, act]
101
+ else:
102
+ values = [
103
+ self.dataset[i][0][start:end],
104
+ self.dataset[i][1][start : end - 1 + self.action_window],
105
+ ]
106
+
107
+ if self.use_libero_goal:
108
+ goals = self.dataset[i][2][start:end]
109
+ if end - start < self.window:
110
+ goals = utils.inference.repeat_start_to_length(
111
+ goals, self.window, dim=0
112
+ )
113
+ values.append(goals)
114
+
115
+ # optionally apply transform
116
+ if self.transform is not None:
117
+ values = self.transform(values)
118
+ if len(values) == 2: # placeholder goal
119
+ values.append(torch.ones([1, 1, 1]))
120
+ return tuple(values)
datasets/your_dataset.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import utils
2
+ import torch
3
+ import numpy as np
4
+ from pathlib import Path
5
+ from torch.utils.data import TensorDataset
6
+ from datasets.core import TrajectoryDataset
7
+
8
+
9
+ class YourTrajectoryDataset(TensorDataset, TrajectoryDataset):
10
+ def __init__(self, data_directory):
11
+ data_directory = Path(data_directory)
12
+
13
+ def get_seq_length(self, idx):
14
+ raise NotImplementedError
15
+
16
+ def get_frames(self, idx, frames):
17
+ raise NotImplementedError
18
+ # return obs / 255.0, actions, masks
19
+
20
+ def __getitem__(self, idx):
21
+ T = self.get_seq_length(idx)
22
+ return self.get_frames(idx, range(T))
envs/assets/block.urdf ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="0.0" ?>
2
+ <robot name="box.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="1.0"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="3.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".01"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <box size="0.04 0.04 0.04"/>
18
+ </geometry>
19
+ <material name="red">
20
+ <color rgba="1 0.3412 0.3490 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <box size="0.04 0.04 0.04"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+ </robot>
31
+
envs/assets/block2.urdf ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="0.0" ?>
2
+ <robot name="box2.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="1.0"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="3.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".01"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <box size="0.04 0.04 0.04"/>
18
+ </geometry>
19
+ <material name="red">
20
+ <color rgba="0.3412 1 0.3490 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <box size="0.04 0.04 0.04"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+ </robot>
31
+
envs/assets/blocks/blue_cube.urdf ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" ?>
2
+ <robot name="blue_cube.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="0.5"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="1.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".01"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <mesh filename="cube.obj" scale="1.0 1.0 1.0"/>
18
+ </geometry>
19
+ <material name="blue">
20
+ <color rgba="0.4 0.4 1.0 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <mesh filename="cube.obj" scale="1.0 1.0 1.0"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+ </robot>
envs/assets/blocks/cube.obj ADDED
@@ -0,0 +1,446 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.92.0 OBJ File: ''
2
+ # www.blender.org
3
+ mtllib cube.mtl
4
+ o square_blue_block_Cube.001
5
+ v 0.000000 0.000000 -0.000000
6
+ v 0.000000 0.038100 -0.000000
7
+ v -0.014865 0.000000 0.018106
8
+ v -0.016277 0.002032 0.019826
9
+ v -0.015863 0.000595 0.019322
10
+ v -0.019826 0.002032 0.016277
11
+ v -0.018106 0.000000 0.014865
12
+ v -0.019322 0.000595 0.015863
13
+ v -0.018052 0.002032 0.019351
14
+ v -0.016494 0.000000 0.017681
15
+ v -0.017595 0.000595 0.018862
16
+ v -0.019351 0.002032 0.018052
17
+ v -0.017681 0.000000 0.016494
18
+ v -0.018862 0.000595 0.017595
19
+ v -0.018106 0.038100 0.014865
20
+ v -0.019826 0.036068 0.016277
21
+ v -0.019322 0.037505 0.015863
22
+ v -0.016277 0.036068 0.019826
23
+ v -0.014865 0.038100 0.018106
24
+ v -0.015863 0.037505 0.019322
25
+ v -0.017681 0.038100 0.016494
26
+ v -0.019351 0.036068 0.018052
27
+ v -0.018862 0.037505 0.017595
28
+ v -0.016494 0.038100 0.017681
29
+ v -0.018052 0.036068 0.019351
30
+ v -0.017595 0.037505 0.018862
31
+ v -0.018106 0.000000 -0.014865
32
+ v -0.019826 0.002032 -0.016277
33
+ v -0.019322 0.000595 -0.015863
34
+ v -0.016277 0.002032 -0.019826
35
+ v -0.014865 0.000000 -0.018106
36
+ v -0.015863 0.000595 -0.019322
37
+ v -0.017681 0.000000 -0.016494
38
+ v -0.019351 0.002032 -0.018052
39
+ v -0.018862 0.000595 -0.017595
40
+ v -0.016494 0.000000 -0.017681
41
+ v -0.018052 0.002032 -0.019351
42
+ v -0.017595 0.000595 -0.018862
43
+ v -0.014865 0.038100 -0.018106
44
+ v -0.016277 0.036068 -0.019826
45
+ v -0.015863 0.037505 -0.019322
46
+ v -0.019826 0.036068 -0.016277
47
+ v -0.018106 0.038100 -0.014865
48
+ v -0.019322 0.037505 -0.015863
49
+ v -0.018052 0.036068 -0.019351
50
+ v -0.016494 0.038100 -0.017681
51
+ v -0.017595 0.037505 -0.018862
52
+ v -0.019351 0.036068 -0.018052
53
+ v -0.017681 0.038100 -0.016494
54
+ v -0.018862 0.037505 -0.017595
55
+ v 0.018106 0.000000 0.014865
56
+ v 0.019826 0.002032 0.016277
57
+ v 0.019322 0.000595 0.015863
58
+ v 0.016277 0.002032 0.019826
59
+ v 0.014865 0.000000 0.018106
60
+ v 0.015863 0.000595 0.019322
61
+ v 0.017681 0.000000 0.016494
62
+ v 0.019351 0.002032 0.018052
63
+ v 0.018862 0.000595 0.017595
64
+ v 0.016494 0.000000 0.017681
65
+ v 0.018052 0.002032 0.019351
66
+ v 0.017595 0.000595 0.018862
67
+ v 0.014865 0.038100 0.018106
68
+ v 0.016277 0.036068 0.019826
69
+ v 0.015863 0.037505 0.019322
70
+ v 0.019826 0.036068 0.016277
71
+ v 0.018106 0.038100 0.014865
72
+ v 0.019322 0.037505 0.015863
73
+ v 0.018052 0.036068 0.019351
74
+ v 0.016494 0.038100 0.017681
75
+ v 0.017595 0.037505 0.018862
76
+ v 0.019351 0.036068 0.018052
77
+ v 0.017681 0.038100 0.016494
78
+ v 0.018862 0.037505 0.017595
79
+ v 0.014865 0.000000 -0.018106
80
+ v 0.016277 0.002032 -0.019826
81
+ v 0.015863 0.000595 -0.019322
82
+ v 0.019826 0.002032 -0.016277
83
+ v 0.018106 0.000000 -0.014865
84
+ v 0.019322 0.000595 -0.015863
85
+ v 0.016494 0.000000 -0.017681
86
+ v 0.018052 0.002032 -0.019351
87
+ v 0.017595 0.000595 -0.018862
88
+ v 0.017681 0.000000 -0.016494
89
+ v 0.019351 0.002032 -0.018052
90
+ v 0.018862 0.000595 -0.017595
91
+ v 0.018106 0.038100 -0.014865
92
+ v 0.019826 0.036068 -0.016277
93
+ v 0.019322 0.037505 -0.015863
94
+ v 0.016277 0.036068 -0.019826
95
+ v 0.014865 0.038100 -0.018106
96
+ v 0.015863 0.037505 -0.019322
97
+ v 0.019351 0.036068 -0.018052
98
+ v 0.017681 0.038100 -0.016494
99
+ v 0.018862 0.037505 -0.017595
100
+ v 0.018052 0.036068 -0.019351
101
+ v 0.016494 0.038100 -0.017681
102
+ v 0.017595 0.037505 -0.018862
103
+ vt 0.811987 0.285513
104
+ vt 0.811986 0.268118
105
+ vt 0.822561 0.276819
106
+ vt 0.831255 0.287393
107
+ vt 0.813860 0.287394
108
+ vt 0.836073 0.319870
109
+ vt 0.856069 0.319870
110
+ vt 0.856069 0.320950
111
+ vt 0.836073 0.320950
112
+ vt 0.813867 0.266245
113
+ vt 0.831262 0.266244
114
+ vt 0.832202 0.266496
115
+ vt 0.832884 0.267184
116
+ vt 0.856069 0.297507
117
+ vt 0.836074 0.297507
118
+ vt 0.836074 0.278381
119
+ vt 0.856070 0.278381
120
+ vt 0.812238 0.267178
121
+ vt 0.812926 0.266496
122
+ vt 0.856070 0.275143
123
+ vt 0.836074 0.275143
124
+ vt 0.836074 0.256018
125
+ vt 0.856070 0.256018
126
+ vt 0.833135 0.268125
127
+ vt 0.833136 0.285520
128
+ vt 0.833136 0.260613
129
+ vt 0.832884 0.261553
130
+ vt 0.822561 0.251912
131
+ vt 0.836074 0.342234
132
+ vt 0.856070 0.342234
133
+ vt 0.856070 0.343313
134
+ vt 0.836074 0.343313
135
+ vt 0.832196 0.287142
136
+ vt 0.831262 0.241337
137
+ vt 0.832202 0.241589
138
+ vt 0.832884 0.286460
139
+ vt 0.812238 0.286454
140
+ vt 0.811986 0.243211
141
+ vt 0.812238 0.242271
142
+ vt 0.836073 0.300745
143
+ vt 0.856069 0.300745
144
+ vt 0.836073 0.298586
145
+ vt 0.856069 0.298586
146
+ vt 0.856069 0.299665
147
+ vt 0.836073 0.299665
148
+ vt 0.856070 0.276222
149
+ vt 0.836074 0.276222
150
+ vt 0.836074 0.277302
151
+ vt 0.856070 0.277302
152
+ vt 0.836074 0.253859
153
+ vt 0.856070 0.253859
154
+ vt 0.856070 0.254938
155
+ vt 0.836074 0.254938
156
+ vt 0.812920 0.287142
157
+ vt 0.856069 0.322029
158
+ vt 0.836073 0.322029
159
+ vt 0.813860 0.262487
160
+ vt 0.812920 0.262235
161
+ vt 0.836073 0.323108
162
+ vt 0.856069 0.323108
163
+ vt 0.832884 0.242277
164
+ vt 0.833135 0.243218
165
+ vt 0.812238 0.261547
166
+ vt 0.811987 0.260606
167
+ vt 0.813868 0.241338
168
+ vt 0.812926 0.241589
169
+ vt 0.831255 0.262486
170
+ vt 0.832196 0.262235
171
+ vt 0.811196 0.267507
172
+ vt 0.811506 0.266500
173
+ vt 0.810258 0.267085
174
+ vt 0.810726 0.265896
175
+ vt 0.811523 0.262225
176
+ vt 0.811189 0.261224
177
+ vt 0.810732 0.262868
178
+ vt 0.810258 0.261658
179
+ vt 0.812248 0.265781
180
+ vt 0.811605 0.264990
181
+ vt 0.812242 0.262967
182
+ vt 0.811638 0.263747
183
+ vt 0.813249 0.265447
184
+ vt 0.812815 0.264516
185
+ vt 0.813249 0.263277
186
+ vt 0.812827 0.264215
187
+ vt 0.833926 0.261224
188
+ vt 0.833616 0.262231
189
+ vt 0.834864 0.261646
190
+ vt 0.834396 0.262835
191
+ vt 0.833599 0.266506
192
+ vt 0.833933 0.267507
193
+ vt 0.834390 0.265863
194
+ vt 0.834864 0.267073
195
+ vt 0.832874 0.262950
196
+ vt 0.833517 0.263741
197
+ vt 0.832880 0.265764
198
+ vt 0.833484 0.264984
199
+ vt 0.831873 0.263284
200
+ vt 0.832307 0.264215
201
+ vt 0.831873 0.265453
202
+ vt 0.832295 0.264516
203
+ vt 0.831873 0.240547
204
+ vt 0.832880 0.240857
205
+ vt 0.832295 0.239609
206
+ vt 0.833484 0.240077
207
+ vt 0.832874 0.287857
208
+ vt 0.831873 0.288191
209
+ vt 0.833517 0.288648
210
+ vt 0.832307 0.289122
211
+ vt 0.833599 0.241599
212
+ vt 0.834390 0.240956
213
+ vt 0.833616 0.287138
214
+ vt 0.834396 0.287742
215
+ vt 0.833933 0.242600
216
+ vt 0.834864 0.242166
217
+ vt 0.833926 0.286131
218
+ vt 0.834864 0.286553
219
+ vt 0.811196 0.242600
220
+ vt 0.811506 0.241593
221
+ vt 0.810258 0.242178
222
+ vt 0.810726 0.240989
223
+ vt 0.811523 0.287132
224
+ vt 0.811189 0.286131
225
+ vt 0.810732 0.287775
226
+ vt 0.810258 0.286565
227
+ vt 0.812248 0.240874
228
+ vt 0.811605 0.240083
229
+ vt 0.812242 0.287874
230
+ vt 0.811638 0.288654
231
+ vt 0.813249 0.240540
232
+ vt 0.812815 0.239609
233
+ vt 0.813249 0.288184
234
+ vt 0.812827 0.289122
235
+ vn 0.0000 -1.0000 0.0000
236
+ vn 0.9739 0.1816 -0.1363
237
+ vn 0.9739 -0.1816 -0.1363
238
+ vn 0.8444 -0.2052 -0.4948
239
+ vn 0.8444 0.2052 -0.4948
240
+ vn 0.1363 -0.1816 0.9739
241
+ vn 0.1363 0.1816 0.9739
242
+ vn -0.1363 0.1816 0.9739
243
+ vn -0.1363 -0.1816 0.9739
244
+ vn -0.9739 -0.1816 0.1362
245
+ vn -0.9739 0.1816 0.1362
246
+ vn -0.9739 0.1816 -0.1362
247
+ vn -0.9739 -0.1816 -0.1362
248
+ vn -0.0000 1.0000 -0.0000
249
+ vn -0.1363 0.1816 -0.9739
250
+ vn -0.1363 -0.1816 -0.9739
251
+ vn -0.4948 -0.2052 -0.8444
252
+ vn -0.4948 0.2052 -0.8444
253
+ vn 0.9739 0.1816 0.1363
254
+ vn 0.9739 -0.1816 0.1363
255
+ vn 0.4948 0.2052 0.8444
256
+ vn 0.4948 -0.2052 0.8444
257
+ vn 0.8444 -0.2052 0.4948
258
+ vn 0.8444 0.2052 0.4948
259
+ vn -0.8444 -0.2052 0.4948
260
+ vn -0.8444 0.2052 0.4948
261
+ vn -0.4948 0.2052 0.8444
262
+ vn -0.4948 -0.2052 0.8444
263
+ vn -0.8444 -0.2052 -0.4948
264
+ vn -0.8444 0.2052 -0.4948
265
+ vn 0.4948 -0.2052 -0.8444
266
+ vn 0.4948 0.2052 -0.8444
267
+ vn 0.1363 0.1816 -0.9739
268
+ vn 0.1363 -0.1816 -0.9739
269
+ vn -0.0965 -0.9775 0.1874
270
+ vn -0.0187 -0.9731 0.2298
271
+ vn -0.0935 -0.6743 0.7325
272
+ vn -0.3529 -0.6994 0.6215
273
+ vn -0.0187 0.9731 0.2298
274
+ vn -0.0965 0.9775 0.1874
275
+ vn -0.3529 0.6994 0.6215
276
+ vn -0.0935 0.6743 0.7325
277
+ vn -0.1874 -0.9775 0.0965
278
+ vn -0.6215 -0.6994 0.3529
279
+ vn -0.1874 0.9775 0.0965
280
+ vn -0.6215 0.6994 0.3529
281
+ vn -0.2298 -0.9731 0.0187
282
+ vn -0.7325 -0.6743 0.0935
283
+ vn -0.2298 0.9731 0.0187
284
+ vn -0.7325 0.6743 0.0935
285
+ vn -0.0965 0.9775 -0.1874
286
+ vn -0.0187 0.9731 -0.2298
287
+ vn -0.0935 0.6743 -0.7325
288
+ vn -0.3529 0.6994 -0.6215
289
+ vn -0.0187 -0.9731 -0.2298
290
+ vn -0.0965 -0.9775 -0.1874
291
+ vn -0.3529 -0.6994 -0.6215
292
+ vn -0.0935 -0.6743 -0.7325
293
+ vn -0.1874 0.9775 -0.0965
294
+ vn -0.6215 0.6994 -0.3529
295
+ vn -0.1874 -0.9775 -0.0965
296
+ vn -0.6215 -0.6994 -0.3529
297
+ vn -0.2298 0.9731 -0.0187
298
+ vn -0.7325 0.6743 -0.0935
299
+ vn -0.2298 -0.9731 -0.0187
300
+ vn -0.7325 -0.6743 -0.0935
301
+ vn 0.1874 0.9775 -0.0965
302
+ vn 0.2298 0.9731 -0.0187
303
+ vn 0.7325 0.6743 -0.0935
304
+ vn 0.6215 0.6994 -0.3529
305
+ vn 0.2298 -0.9731 -0.0187
306
+ vn 0.1874 -0.9775 -0.0965
307
+ vn 0.6215 -0.6994 -0.3529
308
+ vn 0.7325 -0.6743 -0.0935
309
+ vn 0.0965 0.9775 -0.1874
310
+ vn 0.3529 0.6994 -0.6215
311
+ vn 0.0965 -0.9775 -0.1874
312
+ vn 0.3529 -0.6994 -0.6215
313
+ vn 0.0187 0.9731 -0.2298
314
+ vn 0.0935 0.6743 -0.7325
315
+ vn 0.0187 -0.9731 -0.2298
316
+ vn 0.0935 -0.6743 -0.7325
317
+ vn 0.0965 0.9775 0.1874
318
+ vn 0.0187 0.9731 0.2298
319
+ vn 0.0935 0.6743 0.7325
320
+ vn 0.3529 0.6994 0.6215
321
+ vn 0.0187 -0.9731 0.2298
322
+ vn 0.0965 -0.9775 0.1874
323
+ vn 0.3529 -0.6994 0.6215
324
+ vn 0.0935 -0.6743 0.7325
325
+ vn 0.1874 0.9775 0.0965
326
+ vn 0.6215 0.6994 0.3529
327
+ vn 0.1874 -0.9775 0.0965
328
+ vn 0.6215 -0.6994 0.3529
329
+ vn 0.2298 0.9731 0.0187
330
+ vn 0.7325 0.6743 0.0935
331
+ vn 0.2298 -0.9731 0.0187
332
+ vn 0.7325 -0.6743 0.0935
333
+ usemtl toybox.001
334
+ s 1
335
+ f 55/1/1 3/2/1 1/3/1
336
+ f 79/4/1 51/5/1 1/3/1
337
+ f 88/6/2 78/7/3 85/8/4 93/9/5
338
+ f 7/10/1 27/11/1 1/3/1
339
+ f 33/12/1 36/13/1 1/3/1
340
+ f 54/14/6 64/15/7 18/16/8 4/17/9
341
+ f 10/18/1 13/19/1 1/3/1
342
+ f 6/20/10 16/21/11 42/22/12 28/23/13
343
+ f 31/24/1 75/25/1 1/3/1
344
+ f 39/26/14 46/27/14 2/28/14
345
+ f 40/29/15 30/30/16 37/31/17 45/32/18
346
+ f 84/33/1 79/4/1 1/3/1
347
+ f 87/34/14 94/35/14 2/28/14
348
+ f 75/25/1 81/36/1 1/3/1
349
+ f 60/37/1 55/1/1 1/3/1
350
+ f 63/38/14 70/39/14 2/28/14
351
+ f 78/7/3 88/6/2 66/40/19 52/41/20
352
+ f 69/42/21 61/43/22 58/44/23 72/45/24
353
+ f 13/19/1 7/10/1 1/3/1
354
+ f 12/46/25 22/47/26 16/21/11 6/20/10
355
+ f 81/36/1 84/33/1 1/3/1
356
+ f 4/17/9 18/16/8 25/48/27 9/49/28
357
+ f 45/50/18 37/51/17 34/52/29 48/53/30
358
+ f 72/45/24 58/44/23 52/41/20 66/40/19
359
+ f 57/54/1 60/37/1 1/3/1
360
+ f 93/9/5 85/8/4 82/55/31 96/56/32
361
+ f 9/49/28 25/48/27 22/47/26 12/46/25
362
+ f 48/53/30 34/52/29 28/23/13 42/22/12
363
+ f 36/13/1 31/24/1 1/3/1
364
+ f 15/57/14 21/58/14 2/28/14
365
+ f 30/30/16 40/29/15 90/59/33 76/60/34
366
+ f 64/15/7 54/14/6 61/43/22 69/42/21
367
+ f 97/61/14 91/62/14 2/28/14
368
+ f 51/5/1 57/54/1 1/3/1
369
+ f 24/63/14 19/64/14 2/28/14
370
+ f 67/65/14 87/34/14 2/28/14
371
+ f 73/66/14 67/65/14 2/28/14
372
+ f 21/58/14 24/63/14 2/28/14
373
+ f 43/67/14 15/57/14 2/28/14
374
+ f 70/39/14 73/66/14 2/28/14
375
+ f 19/64/14 63/38/14 2/28/14
376
+ f 46/27/14 49/68/14 2/28/14
377
+ f 49/68/14 43/67/14 2/28/14
378
+ f 91/62/14 39/26/14 2/28/14
379
+ f 96/56/32 82/55/31 76/60/34 90/59/33
380
+ f 94/35/14 97/61/14 2/28/14
381
+ f 27/11/1 33/12/1 1/3/1
382
+ f 10/18/35 3/2/36 5/69/37 11/70/38
383
+ f 11/70/38 5/69/37 4/71/9 9/72/28
384
+ f 19/64/39 24/63/40 26/73/41 20/74/42
385
+ f 20/74/42 26/73/41 25/75/27 18/76/8
386
+ f 13/19/43 10/18/35 11/70/38 14/77/44
387
+ f 14/77/44 11/70/38 9/72/28 12/78/25
388
+ f 24/63/40 21/58/45 23/79/46 26/73/41
389
+ f 26/73/41 23/79/46 22/80/26 25/75/27
390
+ f 7/10/47 13/19/43 14/77/44 8/81/48
391
+ f 8/81/48 14/77/44 12/78/25 6/82/10
392
+ f 21/58/45 15/57/49 17/83/50 23/79/46
393
+ f 23/79/46 17/83/50 16/84/11 22/80/26
394
+ f 46/27/51 39/26/52 41/85/53 47/86/54
395
+ f 47/86/54 41/85/53 40/87/15 45/88/18
396
+ f 31/24/55 36/13/56 38/89/57 32/90/58
397
+ f 32/90/58 38/89/57 37/91/17 30/92/16
398
+ f 49/68/59 46/27/51 47/86/54 50/93/60
399
+ f 50/93/60 47/86/54 45/88/18 48/94/30
400
+ f 36/13/56 33/12/61 35/95/62 38/89/57
401
+ f 38/89/57 35/95/62 34/96/29 37/91/17
402
+ f 43/67/63 49/68/59 50/93/60 44/97/64
403
+ f 44/97/64 50/93/60 48/94/30 42/98/12
404
+ f 33/12/61 27/11/65 29/99/66 35/95/62
405
+ f 35/95/62 29/99/66 28/100/13 34/96/29
406
+ f 94/35/67 87/34/68 89/101/69 95/102/70
407
+ f 95/102/70 89/101/69 88/103/2 93/104/5
408
+ f 79/4/71 84/33/72 86/105/73 80/106/74
409
+ f 80/106/74 86/105/73 85/107/4 78/108/3
410
+ f 97/61/75 94/35/67 95/102/70 98/109/76
411
+ f 98/109/76 95/102/70 93/104/5 96/110/32
412
+ f 84/33/72 81/36/77 83/111/78 86/105/73
413
+ f 86/105/73 83/111/78 82/112/31 85/107/4
414
+ f 91/62/79 97/61/75 98/109/76 92/113/80
415
+ f 92/113/80 98/109/76 96/110/32 90/114/33
416
+ f 81/36/77 75/25/81 77/115/82 83/111/78
417
+ f 83/111/78 77/115/82 76/116/34 82/112/31
418
+ f 70/39/83 63/38/84 65/117/85 71/118/86
419
+ f 71/118/86 65/117/85 64/119/7 69/120/21
420
+ f 55/1/87 60/37/88 62/121/89 56/122/90
421
+ f 56/122/90 62/121/89 61/123/22 54/124/6
422
+ f 73/66/91 70/39/83 71/118/86 74/125/92
423
+ f 74/125/92 71/118/86 69/120/21 72/126/24
424
+ f 60/37/88 57/54/93 59/127/94 62/121/89
425
+ f 62/121/89 59/127/94 58/128/23 61/123/22
426
+ f 67/65/95 73/66/91 74/125/92 68/129/96
427
+ f 68/129/96 74/125/92 72/126/24 66/130/19
428
+ f 57/54/93 51/5/97 53/131/98 59/127/94
429
+ f 59/127/94 53/131/98 52/132/20 58/128/23
430
+ f 6/82/10 28/100/13 29/99/66 8/81/48
431
+ f 8/81/48 29/99/66 27/11/65 7/10/47
432
+ f 30/92/16 76/116/34 77/115/82 32/90/58
433
+ f 32/90/58 77/115/82 75/25/81 31/24/55
434
+ f 78/108/3 52/132/20 53/131/98 80/106/74
435
+ f 80/106/74 53/131/98 51/5/97 79/4/71
436
+ f 54/124/6 4/71/9 5/69/37 56/122/90
437
+ f 56/122/90 5/69/37 3/2/36 55/1/87
438
+ f 15/57/49 43/67/63 44/97/64 17/83/50
439
+ f 17/83/50 44/97/64 42/98/12 16/84/11
440
+ f 63/38/84 19/64/39 20/74/42 65/117/85
441
+ f 65/117/85 20/74/42 18/76/8 64/119/7
442
+ f 66/130/19 88/103/2 89/101/69 68/129/96
443
+ f 68/129/96 89/101/69 87/34/68 67/65/95
444
+ f 90/114/33 40/87/15 41/85/53 92/113/80
445
+ f 92/113/80 41/85/53 39/26/52 91/62/79
446
+ f 3/2/1 10/18/1 1/3/1
envs/assets/blocks/green_star.urdf ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" ?>
2
+ <robot name="green_star.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="0.5"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="1.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".01"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <mesh filename="star.obj" scale="1.0 1.0 1.0"/>
18
+ </geometry>
19
+ <material name="green">
20
+ <color rgba="0.4 1.0 0.4 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <mesh filename="star.obj" scale="1.0 1.0 1.0"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+ </robot>
envs/assets/blocks/moon.obj ADDED
@@ -0,0 +1,446 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.92.0 OBJ File: ''
2
+ # www.blender.org
3
+ mtllib block.mtl
4
+ o moon_red_block_Cylinder.012
5
+ v -0.009181 0.000000 -0.006901
6
+ v -0.012205 0.002032 -0.009549
7
+ v -0.011319 0.000595 -0.008773
8
+ v -0.021856 0.000000 -0.011560
9
+ v -0.021140 0.002032 -0.012614
10
+ v -0.021633 0.000595 -0.012305
11
+ v -0.021140 0.036068 -0.012614
12
+ v -0.021856 0.038100 -0.011560
13
+ v -0.021633 0.037505 -0.012305
14
+ v -0.014039 0.002032 0.009151
15
+ v -0.012620 0.000000 0.008341
16
+ v -0.013624 0.000595 0.008914
17
+ v -0.012620 0.038100 0.008341
18
+ v -0.014039 0.036068 0.009151
19
+ v -0.013624 0.037505 0.008914
20
+ v -0.024317 0.002032 -0.000986
21
+ v -0.021966 0.000000 -0.000780
22
+ v -0.023628 0.000595 -0.000925
23
+ v -0.012205 0.036068 -0.009549
24
+ v -0.009181 0.038100 -0.006901
25
+ v -0.011319 0.037505 -0.008773
26
+ v -0.021966 0.038100 -0.000780
27
+ v -0.024317 0.036068 -0.000986
28
+ v -0.023628 0.037505 -0.000925
29
+ v -0.023768 0.000000 -0.007207
30
+ v -0.026243 0.002032 -0.008076
31
+ v -0.025518 0.000595 -0.007821
32
+ v -0.023815 0.000000 -0.010019
33
+ v -0.026267 0.002032 -0.011168
34
+ v -0.025549 0.000595 -0.010832
35
+ v -0.024498 0.002032 -0.012861
36
+ v -0.023166 0.000000 -0.011548
37
+ v -0.023825 0.000595 -0.012476
38
+ v -0.026243 0.036068 -0.008076
39
+ v -0.023768 0.038100 -0.007207
40
+ v -0.025518 0.037505 -0.007821
41
+ v -0.023166 0.038100 -0.011548
42
+ v -0.024498 0.036068 -0.012861
43
+ v -0.023825 0.037505 -0.012476
44
+ v -0.026267 0.036068 -0.011168
45
+ v -0.023815 0.038100 -0.010019
46
+ v -0.025549 0.037505 -0.010832
47
+ v -0.000000 0.038100 0.001138
48
+ v -0.000000 0.000000 0.001138
49
+ v -0.000000 0.000000 -0.006070
50
+ v -0.000000 0.002032 -0.008427
51
+ v -0.000000 0.000595 -0.007737
52
+ v -0.000000 0.036068 -0.008427
53
+ v -0.000000 0.038100 -0.006070
54
+ v -0.000000 0.037505 -0.007737
55
+ v 0.009181 0.000000 -0.006901
56
+ v 0.012205 0.002032 -0.009549
57
+ v 0.011319 0.000595 -0.008773
58
+ v 0.021856 0.000000 -0.011560
59
+ v 0.021140 0.002032 -0.012614
60
+ v 0.021633 0.000595 -0.012305
61
+ v 0.021140 0.036068 -0.012614
62
+ v 0.021856 0.038100 -0.011560
63
+ v 0.021633 0.037505 -0.012305
64
+ v -0.000000 0.002032 0.012861
65
+ v -0.000000 0.000000 0.011641
66
+ v -0.000000 0.000595 0.012504
67
+ v -0.000000 0.038100 0.011640
68
+ v -0.000000 0.036068 0.012861
69
+ v -0.000000 0.037505 0.012503
70
+ v 0.014039 0.002032 0.009151
71
+ v 0.012620 0.000000 0.008341
72
+ v 0.013624 0.000595 0.008914
73
+ v 0.012620 0.038100 0.008341
74
+ v 0.014039 0.036068 0.009151
75
+ v 0.013624 0.037505 0.008914
76
+ v 0.024317 0.002032 -0.000986
77
+ v 0.021966 0.000000 -0.000780
78
+ v 0.023628 0.000595 -0.000925
79
+ v 0.012205 0.036068 -0.009549
80
+ v 0.009181 0.038100 -0.006901
81
+ v 0.011319 0.037505 -0.008773
82
+ v 0.021966 0.038100 -0.000780
83
+ v 0.024317 0.036068 -0.000986
84
+ v 0.023628 0.037505 -0.000925
85
+ v 0.023768 0.000000 -0.007207
86
+ v 0.026243 0.002032 -0.008076
87
+ v 0.025518 0.000595 -0.007821
88
+ v 0.023815 0.000000 -0.010019
89
+ v 0.026267 0.002032 -0.011168
90
+ v 0.025549 0.000595 -0.010832
91
+ v 0.024498 0.002032 -0.012861
92
+ v 0.023166 0.000000 -0.011548
93
+ v 0.023825 0.000595 -0.012476
94
+ v 0.026243 0.036068 -0.008076
95
+ v 0.023768 0.038100 -0.007207
96
+ v 0.025518 0.037505 -0.007821
97
+ v 0.023166 0.038100 -0.011548
98
+ v 0.024498 0.036068 -0.012861
99
+ v 0.023825 0.037505 -0.012476
100
+ v 0.026267 0.036068 -0.011168
101
+ v 0.023815 0.038100 -0.010019
102
+ v 0.025549 0.037505 -0.010832
103
+ vt 0.710579 0.167405
104
+ vt 0.710864 0.161788
105
+ vt 0.714864 0.161788
106
+ vt 0.686288 0.163021
107
+ vt 0.706306 0.163021
108
+ vt 0.706306 0.165002
109
+ vt 0.686288 0.165002
110
+ vt 0.708556 0.175784
111
+ vt 0.708475 0.175088
112
+ vt 0.721180 0.161788
113
+ vt 0.719512 0.169043
114
+ vt 0.714650 0.174731
115
+ vt 0.711007 0.176048
116
+ vt 0.720186 0.145021
117
+ vt 0.718589 0.144931
118
+ vt 0.714732 0.130671
119
+ vt 0.709411 0.176139
120
+ vt 0.721121 0.143971
121
+ vt 0.721041 0.144667
122
+ vt 0.686288 0.138411
123
+ vt 0.706306 0.138411
124
+ vt 0.706306 0.146951
125
+ vt 0.686288 0.146951
126
+ vt 0.706306 0.155441
127
+ vt 0.686288 0.155441
128
+ vt 0.706306 0.159762
129
+ vt 0.686288 0.159762
130
+ vt 0.714946 0.143614
131
+ vt 0.686288 0.177766
132
+ vt 0.686288 0.170557
133
+ vt 0.706306 0.170557
134
+ vt 0.706306 0.177766
135
+ vt 0.686288 0.161581
136
+ vt 0.706306 0.161581
137
+ vt 0.718732 0.130671
138
+ vt 0.719018 0.136287
139
+ vt 0.710084 0.137925
140
+ vt 0.708416 0.130671
141
+ vt 0.709447 0.168778
142
+ vt 0.709791 0.161788
143
+ vt 0.708408 0.169256
144
+ vt 0.708838 0.161788
145
+ vt 0.707932 0.175020
146
+ vt 0.707041 0.174762
147
+ vt 0.722722 0.161788
148
+ vt 0.720733 0.170191
149
+ vt 0.719979 0.169645
150
+ vt 0.721789 0.161788
151
+ vt 0.708863 0.139073
152
+ vt 0.706874 0.130671
153
+ vt 0.707807 0.130671
154
+ vt 0.709618 0.138527
155
+ vt 0.715051 0.176717
156
+ vt 0.714702 0.175778
157
+ vt 0.719806 0.130670
158
+ vt 0.720149 0.137661
159
+ vt 0.720758 0.130670
160
+ vt 0.721188 0.138138
161
+ vt 0.721665 0.143903
162
+ vt 0.722555 0.143645
163
+ vt 0.714546 0.145599
164
+ vt 0.714894 0.144661
165
+ vt 0.721564 0.145099
166
+ vt 0.720661 0.146046
167
+ vt 0.722312 0.145803
168
+ vt 0.721098 0.146965
169
+ vt 0.708935 0.177163
170
+ vt 0.708032 0.176217
171
+ vt 0.708498 0.178082
172
+ vt 0.707284 0.176921
173
+ vt 0.718902 0.146070
174
+ vt 0.719002 0.147136
175
+ vt 0.710695 0.177187
176
+ vt 0.710594 0.178254
177
+ vt 0.710579 0.156171
178
+ vt 0.686288 0.192510
179
+ vt 0.686288 0.190530
180
+ vt 0.706306 0.190530
181
+ vt 0.706306 0.192510
182
+ vt 0.708556 0.147792
183
+ vt 0.708475 0.148488
184
+ vt 0.719512 0.154534
185
+ vt 0.714650 0.148845
186
+ vt 0.711007 0.147528
187
+ vt 0.720186 0.116320
188
+ vt 0.718589 0.116410
189
+ vt 0.709411 0.147437
190
+ vt 0.721121 0.117370
191
+ vt 0.721041 0.116674
192
+ vt 0.686288 0.129870
193
+ vt 0.706306 0.129870
194
+ vt 0.686288 0.121380
195
+ vt 0.706306 0.121380
196
+ vt 0.686288 0.117059
197
+ vt 0.706306 0.117059
198
+ vt 0.714946 0.117727
199
+ vt 0.706306 0.184974
200
+ vt 0.686288 0.184974
201
+ vt 0.706306 0.193950
202
+ vt 0.686288 0.193950
203
+ vt 0.719018 0.125054
204
+ vt 0.710084 0.123416
205
+ vt 0.706306 0.115240
206
+ vt 0.686288 0.115240
207
+ vt 0.709447 0.154798
208
+ vt 0.708408 0.154321
209
+ vt 0.707932 0.148556
210
+ vt 0.707041 0.148814
211
+ vt 0.719979 0.153931
212
+ vt 0.720733 0.153385
213
+ vt 0.708863 0.122268
214
+ vt 0.709618 0.122814
215
+ vt 0.714702 0.147798
216
+ vt 0.715051 0.146860
217
+ vt 0.720149 0.123680
218
+ vt 0.721188 0.123203
219
+ vt 0.721665 0.117438
220
+ vt 0.722555 0.117696
221
+ vt 0.714546 0.115742
222
+ vt 0.714894 0.116680
223
+ vt 0.720661 0.115295
224
+ vt 0.721564 0.116241
225
+ vt 0.721098 0.114376
226
+ vt 0.722312 0.115538
227
+ vt 0.708032 0.147359
228
+ vt 0.708935 0.146413
229
+ vt 0.707284 0.146655
230
+ vt 0.708498 0.145494
231
+ vt 0.718901 0.115271
232
+ vt 0.719002 0.114205
233
+ vt 0.710695 0.146389
234
+ vt 0.710595 0.145323
235
+ vn 0.0000 -1.0000 -0.0000
236
+ vn -0.3609 -0.1941 -0.9122
237
+ vn -0.3609 0.1941 -0.9122
238
+ vn 0.2092 0.1359 -0.9684
239
+ vn 0.2092 -0.1359 -0.9684
240
+ vn 0.0000 1.0000 -0.0000
241
+ vn -0.0000 -0.1176 0.9931
242
+ vn -0.0000 0.1176 0.9931
243
+ vn -0.5010 0.1369 0.8546
244
+ vn -0.5010 -0.1369 0.8546
245
+ vn -0.8572 0.1847 0.4806
246
+ vn -0.8572 -0.1847 0.4806
247
+ vn -0.9678 0.2183 0.1257
248
+ vn -0.9678 -0.2183 0.1257
249
+ vn 0.0000 -0.2255 -0.9742
250
+ vn 0.2279 -0.1771 -0.9575
251
+ vn 0.2279 0.1771 -0.9575
252
+ vn 0.0000 0.2255 -0.9742
253
+ vn -0.8945 -0.2361 -0.3797
254
+ vn -0.8945 0.2361 -0.3797
255
+ vn 0.0000 -0.9870 -0.1606
256
+ vn 0.0247 -0.9852 -0.1694
257
+ vn 0.1624 -0.6740 -0.7206
258
+ vn 0.0000 -0.7551 -0.6556
259
+ vn 0.0583 -0.9460 -0.3190
260
+ vn 0.1517 -0.5680 -0.8089
261
+ vn -0.4024 -0.5836 0.7053
262
+ vn 0.0000 -0.5393 0.8421
263
+ vn -0.1318 -0.9470 0.2929
264
+ vn 0.0000 -0.9390 0.3440
265
+ vn 0.0000 0.5393 0.8421
266
+ vn -0.4024 0.5836 0.7053
267
+ vn 0.0000 0.9390 0.3440
268
+ vn -0.1318 0.9470 0.2929
269
+ vn -0.6387 -0.6655 0.3863
270
+ vn -0.2030 -0.9669 0.1548
271
+ vn 0.0247 0.9852 -0.1694
272
+ vn -0.0000 0.9870 -0.1606
273
+ vn 0.0000 0.7551 -0.6556
274
+ vn 0.1624 0.6740 -0.7206
275
+ vn 0.0583 0.9460 -0.3190
276
+ vn 0.1517 0.5680 -0.8089
277
+ vn -0.6387 0.6655 0.3863
278
+ vn -0.2030 0.9669 0.1548
279
+ vn -0.1804 0.9821 -0.0543
280
+ vn -0.0951 0.9531 -0.2874
281
+ vn -0.2234 0.6362 -0.7384
282
+ vn -0.6431 0.7203 -0.2600
283
+ vn -0.0951 -0.9531 -0.2874
284
+ vn -0.1804 -0.9821 -0.0543
285
+ vn -0.6431 -0.7203 -0.2600
286
+ vn -0.2234 -0.6362 -0.7384
287
+ vn -0.1858 0.9820 0.0343
288
+ vn -0.6801 0.7265 0.0987
289
+ vn -0.1858 -0.9820 0.0343
290
+ vn -0.6801 -0.7265 0.0987
291
+ vn 0.3609 -0.1941 -0.9122
292
+ vn -0.2092 -0.1359 -0.9684
293
+ vn -0.2092 0.1359 -0.9684
294
+ vn 0.3609 0.1941 -0.9122
295
+ vn 0.5010 -0.1369 0.8546
296
+ vn 0.5010 0.1369 0.8546
297
+ vn 0.8572 -0.1847 0.4806
298
+ vn 0.8572 0.1847 0.4806
299
+ vn 0.9678 -0.2183 0.1257
300
+ vn 0.9678 0.2183 0.1257
301
+ vn -0.2279 0.1771 -0.9575
302
+ vn -0.2279 -0.1771 -0.9575
303
+ vn 0.8945 0.2361 -0.3797
304
+ vn 0.8945 -0.2361 -0.3797
305
+ vn -0.1624 -0.6740 -0.7206
306
+ vn -0.0247 -0.9852 -0.1694
307
+ vn -0.1517 -0.5680 -0.8089
308
+ vn -0.0583 -0.9460 -0.3190
309
+ vn 0.4024 -0.5836 0.7053
310
+ vn 0.1318 -0.9470 0.2929
311
+ vn 0.4024 0.5836 0.7053
312
+ vn 0.1318 0.9470 0.2929
313
+ vn 0.6387 -0.6655 0.3863
314
+ vn 0.2030 -0.9669 0.1548
315
+ vn -0.0247 0.9852 -0.1694
316
+ vn -0.1624 0.6740 -0.7206
317
+ vn -0.0583 0.9460 -0.3190
318
+ vn -0.1517 0.5680 -0.8089
319
+ vn 0.6387 0.6655 0.3863
320
+ vn 0.2030 0.9669 0.1548
321
+ vn 0.1804 0.9821 -0.0543
322
+ vn 0.6431 0.7203 -0.2600
323
+ vn 0.2234 0.6362 -0.7384
324
+ vn 0.0951 0.9531 -0.2874
325
+ vn 0.0951 -0.9531 -0.2874
326
+ vn 0.2234 -0.6362 -0.7384
327
+ vn 0.6431 -0.7203 -0.2600
328
+ vn 0.1804 -0.9821 -0.0543
329
+ vn 0.1858 0.9820 0.0343
330
+ vn 0.6801 0.7265 0.0987
331
+ vn 0.6801 -0.7265 0.0987
332
+ vn 0.1858 -0.9820 0.0343
333
+ usemtl toybox
334
+ s 1
335
+ f 1/1/1 45/2/1 44/3/1
336
+ f 31/4/2 38/5/3 7/6/4 5/7/5
337
+ f 32/8/1 4/9/1 44/3/1
338
+ f 61/10/1 11/11/1 44/3/1
339
+ f 11/11/1 17/12/1 44/3/1
340
+ f 17/12/1 25/13/1 44/3/1
341
+ f 41/14/6 35/15/6 43/16/6
342
+ f 28/17/1 32/8/1 44/3/1
343
+ f 8/18/6 37/19/6 43/16/6
344
+ f 37/19/6 41/14/6 43/16/6
345
+ f 25/13/1 28/17/1 44/3/1
346
+ f 60/20/7 64/21/8 14/22/9 10/23/10
347
+ f 10/23/10 14/22/9 23/24/11 16/25/12
348
+ f 16/25/12 23/24/11 34/26/13 26/27/14
349
+ f 35/15/6 22/28/6 43/16/6
350
+ f 4/9/1 1/1/1 44/3/1
351
+ f 46/29/15 2/30/16 19/31/17 48/32/18
352
+ f 38/5/3 31/4/2 29/33/19 40/34/20
353
+ f 49/35/6 20/36/6 43/16/6
354
+ f 13/37/6 63/38/6 43/16/6
355
+ f 2/30/16 5/7/5 7/6/4 19/31/17
356
+ f 40/34/20 29/33/19 26/27/14 34/26/13
357
+ f 20/36/6 8/18/6 43/16/6
358
+ f 22/28/6 13/37/6 43/16/6
359
+ f 45/2/21 1/1/22 3/39/23 47/40/24
360
+ f 47/40/24 3/39/23 2/41/16 46/42/15
361
+ f 1/1/22 4/9/25 6/43/26 3/39/23
362
+ f 3/39/23 6/43/26 5/44/5 2/41/16
363
+ f 60/45/7 10/46/10 12/47/27 62/48/28
364
+ f 62/48/28 12/47/27 11/11/29 61/10/30
365
+ f 14/49/9 64/50/8 65/51/31 15/52/32
366
+ f 15/52/32 65/51/31 63/38/33 13/37/34
367
+ f 10/46/10 16/53/12 18/54/35 12/47/27
368
+ f 12/47/27 18/54/35 17/12/36 11/11/29
369
+ f 20/36/37 49/35/38 50/55/39 21/56/40
370
+ f 21/56/40 50/55/39 48/57/18 19/58/17
371
+ f 8/18/41 20/36/37 21/56/40 9/59/42
372
+ f 9/59/42 21/56/40 19/58/17 7/60/4
373
+ f 23/61/11 14/49/9 15/52/32 24/62/43
374
+ f 24/62/43 15/52/32 13/37/34 22/28/44
375
+ f 41/14/45 37/19/46 39/63/47 42/64/48
376
+ f 42/64/48 39/63/47 38/65/3 40/66/20
377
+ f 32/8/49 28/17/50 30/67/51 33/68/52
378
+ f 33/68/52 30/67/51 29/69/19 31/70/2
379
+ f 35/15/53 41/14/45 42/64/48 36/71/54
380
+ f 36/71/54 42/64/48 40/66/20 34/72/13
381
+ f 28/17/50 25/13/55 27/73/56 30/67/51
382
+ f 30/67/51 27/73/56 26/74/14 29/69/19
383
+ f 25/13/55 17/12/36 18/54/35 27/73/56
384
+ f 27/73/56 18/54/35 16/53/12 26/74/14
385
+ f 22/28/44 35/15/53 36/71/54 24/62/43
386
+ f 24/62/43 36/71/54 34/72/13 23/61/11
387
+ f 4/9/25 32/8/49 33/68/52 6/43/26
388
+ f 6/43/26 33/68/52 31/70/2 5/44/5
389
+ f 37/19/46 8/18/41 9/59/42 39/63/47
390
+ f 39/63/47 9/59/42 7/60/4 38/65/3
391
+ f 51/75/1 44/3/1 45/2/1
392
+ f 87/76/57 55/77/58 57/78/59 94/79/60
393
+ f 88/80/1 44/3/1 54/81/1
394
+ f 61/10/1 44/3/1 67/82/1
395
+ f 67/82/1 44/3/1 73/83/1
396
+ f 73/83/1 44/3/1 81/84/1
397
+ f 97/85/6 43/16/6 91/86/6
398
+ f 84/87/1 44/3/1 88/80/1
399
+ f 58/88/6 43/16/6 93/89/6
400
+ f 93/89/6 43/16/6 97/85/6
401
+ f 81/84/1 44/3/1 84/87/1
402
+ f 60/20/7 66/90/61 70/91/62 64/21/8
403
+ f 66/90/61 72/92/63 79/93/64 70/91/62
404
+ f 72/92/63 82/94/65 90/95/66 79/93/64
405
+ f 91/86/6 43/16/6 78/96/6
406
+ f 54/81/1 44/3/1 51/75/1
407
+ f 46/29/15 48/32/18 75/97/67 52/98/68
408
+ f 94/79/60 96/99/69 85/100/70 87/76/57
409
+ f 49/35/6 43/16/6 76/101/6
410
+ f 69/102/6 43/16/6 63/38/6
411
+ f 52/98/68 75/97/67 57/78/59 55/77/58
412
+ f 96/103/69 90/95/66 82/94/65 85/104/70
413
+ f 76/101/6 43/16/6 58/88/6
414
+ f 78/96/6 43/16/6 69/102/6
415
+ f 45/2/21 47/40/24 53/105/71 51/75/72
416
+ f 47/40/24 46/42/15 52/106/68 53/105/71
417
+ f 51/75/72 53/105/71 56/107/73 54/81/74
418
+ f 53/105/71 52/106/68 55/108/58 56/107/73
419
+ f 60/45/7 62/48/28 68/109/75 66/110/61
420
+ f 62/48/28 61/10/30 67/82/76 68/109/75
421
+ f 70/111/62 71/112/77 65/51/31 64/50/8
422
+ f 71/112/77 69/102/78 63/38/33 65/51/31
423
+ f 66/110/61 68/109/75 74/113/79 72/114/63
424
+ f 68/109/75 67/82/76 73/83/80 74/113/79
425
+ f 76/101/81 77/115/82 50/55/39 49/35/38
426
+ f 77/115/82 75/116/67 48/57/18 50/55/39
427
+ f 58/88/83 59/117/84 77/115/82 76/101/81
428
+ f 59/117/84 57/118/59 75/116/67 77/115/82
429
+ f 79/119/64 80/120/85 71/112/77 70/111/62
430
+ f 80/120/85 78/96/86 69/102/78 71/112/77
431
+ f 97/85/87 98/121/88 95/122/89 93/89/90
432
+ f 98/121/88 96/123/69 94/124/60 95/122/89
433
+ f 88/80/91 89/125/92 86/126/93 84/87/94
434
+ f 89/125/92 87/127/57 85/128/70 86/126/93
435
+ f 91/86/95 92/129/96 98/121/88 97/85/87
436
+ f 92/129/96 90/130/66 96/123/69 98/121/88
437
+ f 84/87/94 86/126/93 83/131/97 81/84/98
438
+ f 86/126/93 85/128/70 82/132/65 83/131/97
439
+ f 81/84/98 83/131/97 74/113/79 73/83/80
440
+ f 83/131/97 82/132/65 72/114/63 74/113/79
441
+ f 78/96/86 80/120/85 92/129/96 91/86/95
442
+ f 80/120/85 79/119/64 90/130/66 92/129/96
443
+ f 54/81/74 56/107/73 89/125/92 88/80/91
444
+ f 56/107/73 55/108/58 87/127/57 89/125/92
445
+ f 93/89/90 95/122/89 59/117/84 58/88/83
446
+ f 95/122/89 94/124/60 57/118/59 59/117/84
envs/assets/blocks/pentagon.obj ADDED
@@ -0,0 +1,419 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.92.0 OBJ File: ''
2
+ # www.blender.org
3
+ mtllib pentagon.mtl
4
+ o pentagon_yellow_block_Cube.003
5
+ v -0.000000 0.000000 -0.001329
6
+ v -0.000000 0.038100 -0.001329
7
+ v -0.010789 0.000000 0.015873
8
+ v -0.011940 0.002032 0.017708
9
+ v -0.011603 0.000595 0.017170
10
+ v -0.014201 0.002032 0.016092
11
+ v -0.012833 0.000000 0.014413
12
+ v -0.013800 0.000595 0.015600
13
+ v -0.013310 0.002032 0.017235
14
+ v -0.012029 0.000000 0.015448
15
+ v -0.012935 0.000595 0.016711
16
+ v -0.012833 0.038100 0.014413
17
+ v -0.014201 0.036068 0.016092
18
+ v -0.013800 0.037505 0.015600
19
+ v -0.011940 0.036068 0.017708
20
+ v -0.010789 0.038100 0.015873
21
+ v -0.011603 0.037505 0.017170
22
+ v -0.012029 0.038100 0.015448
23
+ v -0.013310 0.036068 0.017235
24
+ v -0.012935 0.037505 0.016711
25
+ v -0.020074 0.000000 -0.006755
26
+ v -0.022213 0.002032 -0.007333
27
+ v -0.021586 0.000595 -0.007164
28
+ v -0.021345 0.002032 -0.009898
29
+ v -0.019288 0.000000 -0.009072
30
+ v -0.020743 0.000595 -0.009656
31
+ v -0.022188 0.002032 -0.008754
32
+ v -0.020054 0.000000 -0.008040
33
+ v -0.021563 0.000595 -0.008545
34
+ v -0.019288 0.038100 -0.009072
35
+ v -0.021345 0.036068 -0.009898
36
+ v -0.020743 0.037505 -0.009656
37
+ v -0.022213 0.036068 -0.007333
38
+ v -0.020074 0.038100 -0.006755
39
+ v -0.021586 0.037505 -0.007164
40
+ v -0.022188 0.036068 -0.008754
41
+ v -0.020054 0.038100 -0.008040
42
+ v -0.021563 0.037505 -0.008545
43
+ v 0.012833 0.000000 0.014413
44
+ v 0.014201 0.002032 0.016092
45
+ v 0.013800 0.000595 0.015600
46
+ v 0.011940 0.002032 0.017708
47
+ v 0.010789 0.000000 0.015873
48
+ v 0.011603 0.000595 0.017170
49
+ v 0.012029 0.000000 0.015448
50
+ v 0.013310 0.002032 0.017235
51
+ v 0.012935 0.000595 0.016711
52
+ v 0.010789 0.038100 0.015873
53
+ v 0.011940 0.036068 0.017708
54
+ v 0.011603 0.037505 0.017170
55
+ v 0.014201 0.036068 0.016092
56
+ v 0.012833 0.038100 0.014413
57
+ v 0.013800 0.037505 0.015600
58
+ v 0.013310 0.036068 0.017235
59
+ v 0.012029 0.038100 0.015448
60
+ v 0.012935 0.037505 0.016711
61
+ v 0.019288 0.000000 -0.009072
62
+ v 0.021345 0.002032 -0.009898
63
+ v 0.020743 0.000595 -0.009656
64
+ v 0.022213 0.002032 -0.007333
65
+ v 0.020074 0.000000 -0.006755
66
+ v 0.021586 0.000595 -0.007164
67
+ v 0.022188 0.002032 -0.008754
68
+ v 0.020054 0.000000 -0.008040
69
+ v 0.021563 0.000595 -0.008545
70
+ v 0.020074 0.038100 -0.006755
71
+ v 0.022213 0.036068 -0.007333
72
+ v 0.021586 0.037505 -0.007164
73
+ v 0.021345 0.036068 -0.009898
74
+ v 0.019288 0.038100 -0.009072
75
+ v 0.020743 0.037505 -0.009656
76
+ v 0.022188 0.036068 -0.008754
77
+ v 0.020054 0.038100 -0.008040
78
+ v 0.021563 0.037505 -0.008545
79
+ v 0.001283 0.038100 -0.021099
80
+ v 0.001420 0.036068 -0.023214
81
+ v 0.001380 0.037505 -0.022595
82
+ v -0.001420 0.036068 -0.023214
83
+ v -0.001283 0.038100 -0.021099
84
+ v -0.001380 0.037505 -0.022595
85
+ v 0.000000 0.036068 -0.023607
86
+ v -0.000000 0.038100 -0.021456
87
+ v -0.000000 0.037505 -0.022977
88
+ v -0.001283 0.000000 -0.021099
89
+ v -0.001420 0.002032 -0.023214
90
+ v -0.001380 0.000595 -0.022595
91
+ v 0.001420 0.002032 -0.023214
92
+ v 0.001283 0.000000 -0.021099
93
+ v 0.001380 0.000595 -0.022595
94
+ v -0.000000 0.000000 -0.021456
95
+ v 0.000000 0.002032 -0.023607
96
+ v -0.000000 0.000595 -0.022977
97
+ vt 0.718822 0.029303
98
+ vt 0.730852 0.033346
99
+ vt 0.721614 0.040891
100
+ vt 0.709480 0.040139
101
+ vt 0.717419 0.029729
102
+ vt 0.717181 0.051685
103
+ vt 0.709481 0.041565
104
+ vt 0.686374 0.033371
105
+ vt 0.706477 0.033371
106
+ vt 0.706477 0.034211
107
+ vt 0.686374 0.034211
108
+ vt 0.731711 0.034535
109
+ vt 0.731731 0.047644
110
+ vt 0.732155 0.013538
111
+ vt 0.732386 0.014251
112
+ vt 0.720023 0.014212
113
+ vt 0.724218 0.025374
114
+ vt 0.723581 0.025796
115
+ vt 0.731459 0.033813
116
+ vt 0.686374 0.065488
117
+ vt 0.706477 0.065488
118
+ vt 0.706477 0.066344
119
+ vt 0.686374 0.066344
120
+ vt 0.730865 0.048779
121
+ vt 0.718604 0.052165
122
+ vt 0.717826 0.052123
123
+ vt 0.710784 0.021757
124
+ vt 0.710178 0.021290
125
+ vt 0.706477 0.050528
126
+ vt 0.686374 0.050528
127
+ vt 0.686374 0.049672
128
+ vt 0.706477 0.049672
129
+ vt 0.709250 0.040852
130
+ vt 0.718055 0.029306
131
+ vt 0.686374 0.051384
132
+ vt 0.706477 0.051384
133
+ vt 0.686374 0.035050
134
+ vt 0.706477 0.035050
135
+ vt 0.686374 0.019217
136
+ vt 0.706477 0.019217
137
+ vt 0.731482 0.048351
138
+ vt 0.709906 0.007458
139
+ vt 0.710155 0.006751
140
+ vt 0.686374 0.003322
141
+ vt 0.706477 0.003322
142
+ vt 0.706477 0.017476
143
+ vt 0.686374 0.017476
144
+ vt 0.706477 0.081821
145
+ vt 0.686374 0.081821
146
+ vt 0.686374 0.067199
147
+ vt 0.706477 0.067199
148
+ vt 0.723811 0.002980
149
+ vt 0.724455 0.003418
150
+ vt 0.706477 0.082661
151
+ vt 0.686374 0.082661
152
+ vt 0.686374 0.002483
153
+ vt 0.706477 0.002483
154
+ vt 0.732157 0.014964
155
+ vt 0.723033 0.002938
156
+ vt 0.722814 0.025800
157
+ vt 0.686374 0.018347
158
+ vt 0.706477 0.018347
159
+ vt 0.709926 0.020568
160
+ vt 0.710771 0.006324
161
+ vt 0.733509 0.048629
162
+ vt 0.733072 0.049555
163
+ vt 0.732273 0.048949
164
+ vt 0.732572 0.048190
165
+ vt 0.732296 0.050225
166
+ vt 0.731620 0.049441
167
+ vt 0.734168 0.013254
168
+ vt 0.734379 0.014256
169
+ vt 0.733377 0.014254
170
+ vt 0.733156 0.013468
171
+ vt 0.734166 0.015257
172
+ vt 0.733156 0.015039
173
+ vt 0.707468 0.041849
174
+ vt 0.707257 0.040847
175
+ vt 0.708259 0.040849
176
+ vt 0.708480 0.041634
177
+ vt 0.707471 0.039846
178
+ vt 0.708480 0.040063
179
+ vt 0.708128 0.006474
180
+ vt 0.708564 0.005547
181
+ vt 0.709364 0.006153
182
+ vt 0.709064 0.006913
183
+ vt 0.709341 0.004877
184
+ vt 0.710016 0.005662
185
+ vt 0.731599 0.032714
186
+ vt 0.732251 0.033244
187
+ vt 0.732301 0.031933
188
+ vt 0.733073 0.032652
189
+ vt 0.723869 0.026728
190
+ vt 0.723033 0.026764
191
+ vt 0.724166 0.027687
192
+ vt 0.723129 0.027790
193
+ vt 0.732546 0.034029
194
+ vt 0.733509 0.033612
195
+ vt 0.724576 0.026281
196
+ vt 0.725089 0.027190
197
+ vt 0.710038 0.022388
198
+ vt 0.709386 0.021859
199
+ vt 0.709336 0.023170
200
+ vt 0.708563 0.022451
201
+ vt 0.717767 0.028375
202
+ vt 0.718604 0.028339
203
+ vt 0.717470 0.027416
204
+ vt 0.718508 0.027313
205
+ vt 0.709090 0.021074
206
+ vt 0.708128 0.021490
207
+ vt 0.717060 0.028822
208
+ vt 0.716547 0.027913
209
+ vt 0.723265 0.002005
210
+ vt 0.724118 0.002071
211
+ vt 0.723385 0.000970
212
+ vt 0.724440 0.001117
213
+ vt 0.717519 0.053032
214
+ vt 0.716801 0.052568
215
+ vt 0.717197 0.053986
216
+ vt 0.716269 0.053464
217
+ vt 0.724836 0.002535
218
+ vt 0.725367 0.001639
219
+ vt 0.718372 0.053098
220
+ vt 0.718251 0.054133
221
+ vn 0.0000 -1.0000 0.0000
222
+ vn -0.6839 0.1913 -0.7040
223
+ vn -0.6839 -0.1913 -0.7040
224
+ vn -0.9271 -0.2054 -0.3134
225
+ vn -0.9271 0.2054 -0.3134
226
+ vn 0.0000 1.0000 -0.0000
227
+ vn 0.1696 0.1897 0.9671
228
+ vn 0.1696 -0.1897 0.9671
229
+ vn 0.5696 -0.2019 0.7968
230
+ vn 0.5696 0.2019 0.7968
231
+ vn -0.5696 -0.2019 0.7968
232
+ vn -0.5696 0.2019 0.7968
233
+ vn -0.8602 0.1898 0.4734
234
+ vn -0.8602 -0.1898 0.4734
235
+ vn -0.1696 0.1897 0.9671
236
+ vn -0.1696 -0.1897 0.9671
237
+ vn -0.9708 0.1916 0.1446
238
+ vn -0.9708 -0.1916 0.1446
239
+ vn -0.4034 0.1878 -0.8955
240
+ vn -0.4034 -0.1878 -0.8955
241
+ vn 0.6839 0.1913 -0.7040
242
+ vn 0.6839 -0.1913 -0.7040
243
+ vn 0.4034 -0.1878 -0.8955
244
+ vn 0.4034 0.1878 -0.8955
245
+ vn 0.9708 -0.1916 0.1446
246
+ vn 0.9708 0.1916 0.1446
247
+ vn 0.8602 0.1898 0.4734
248
+ vn 0.8602 -0.1898 0.4734
249
+ vn 0.9271 -0.2054 -0.3134
250
+ vn 0.9271 0.2054 -0.3134
251
+ vn 0.0000 0.1985 -0.9801
252
+ vn 0.0000 -0.1985 -0.9801
253
+ vn -0.6793 -0.6969 -0.2300
254
+ vn -0.7180 -0.6862 0.1166
255
+ vn -0.2003 -0.9774 -0.0681
256
+ vn -0.2163 -0.9753 0.0454
257
+ vn -0.5001 -0.6857 -0.5289
258
+ vn -0.1444 -0.9752 -0.1679
259
+ vn -0.6793 0.6969 -0.2300
260
+ vn -0.5001 0.6857 -0.5289
261
+ vn -0.2003 0.9774 -0.0681
262
+ vn -0.1444 0.9752 -0.1679
263
+ vn -0.7180 0.6862 0.1166
264
+ vn -0.2163 0.9753 0.0454
265
+ vn 0.6793 -0.6969 -0.2300
266
+ vn 0.5001 -0.6857 -0.5289
267
+ vn 0.2003 -0.9774 -0.0681
268
+ vn 0.1444 -0.9752 -0.1679
269
+ vn 0.7180 -0.6862 0.1166
270
+ vn 0.2163 -0.9753 0.0454
271
+ vn 0.6793 0.6969 -0.2300
272
+ vn 0.7180 0.6862 0.1166
273
+ vn 0.2003 0.9774 -0.0681
274
+ vn 0.2163 0.9753 0.0454
275
+ vn 0.5001 0.6857 -0.5289
276
+ vn 0.1444 0.9752 -0.1679
277
+ vn -0.1241 -0.9769 0.1739
278
+ vn -0.0267 -0.9752 0.2199
279
+ vn -0.1178 -0.6841 0.7198
280
+ vn -0.4189 -0.6935 0.5862
281
+ vn -0.0267 0.9752 0.2199
282
+ vn -0.1242 0.9769 0.1739
283
+ vn -0.4189 0.6935 0.5862
284
+ vn -0.1178 0.6841 0.7198
285
+ vn -0.1992 -0.9752 0.0965
286
+ vn -0.6427 -0.6843 0.3444
287
+ vn -0.1992 0.9752 0.0965
288
+ vn -0.6427 0.6843 0.3444
289
+ vn 0.1242 0.9769 0.1739
290
+ vn 0.0267 0.9752 0.2199
291
+ vn 0.1178 0.6841 0.7198
292
+ vn 0.4189 0.6935 0.5862
293
+ vn 0.0267 -0.9752 0.2199
294
+ vn 0.1241 -0.9769 0.1739
295
+ vn 0.4189 -0.6935 0.5862
296
+ vn 0.1178 -0.6841 0.7198
297
+ vn 0.1992 0.9752 0.0965
298
+ vn 0.6427 0.6843 0.3444
299
+ vn 0.1992 -0.9752 0.0965
300
+ vn 0.6427 -0.6843 0.3444
301
+ vn -0.0000 0.9764 -0.2159
302
+ vn 0.1005 0.9750 -0.1981
303
+ vn 0.3067 0.6821 -0.6638
304
+ vn -0.0000 0.6900 -0.7238
305
+ vn 0.1005 -0.9750 -0.1981
306
+ vn 0.0000 -0.9764 -0.2159
307
+ vn 0.0000 -0.6900 -0.7238
308
+ vn 0.3067 -0.6821 -0.6638
309
+ vn -0.1005 0.9750 -0.1981
310
+ vn -0.3067 0.6821 -0.6638
311
+ vn -0.1005 -0.9750 -0.1981
312
+ vn -0.3067 -0.6821 -0.6638
313
+ usemtl toybox
314
+ s 1
315
+ f 43/1/1 3/2/1 1/3/1
316
+ f 61/4/1 39/5/1 1/3/1
317
+ f 88/6/1 57/7/1 1/3/1
318
+ f 31/8/2 24/9/3 27/10/4 36/11/5
319
+ f 7/12/1 21/13/1 1/3/1
320
+ f 30/14/6 37/15/6 2/16/6
321
+ f 12/17/6 18/18/6 2/16/6
322
+ f 10/19/1 7/12/1 1/3/1
323
+ f 49/20/7 42/21/8 46/22/9 54/23/10
324
+ f 25/24/1 84/25/1 1/3/1
325
+ f 90/26/1 88/6/1 1/3/1
326
+ f 3/2/1 10/19/1 1/3/1
327
+ f 48/27/6 55/28/6 2/16/6
328
+ f 9/29/11 19/30/12 13/31/13 6/32/14
329
+ f 64/33/1 61/4/1 1/3/1
330
+ f 39/5/1 45/34/1 1/3/1
331
+ f 42/21/8 49/20/7 15/35/15 4/36/16
332
+ f 6/32/14 13/31/13 33/37/17 22/38/18
333
+ f 24/9/3 31/8/2 78/39/19 85/40/20
334
+ f 21/13/1 28/41/1 1/3/1
335
+ f 66/42/6 73/43/6 2/16/6
336
+ f 69/44/21 58/45/22 87/46/23 76/47/24
337
+ f 45/34/1 43/1/1 1/3/1
338
+ f 60/48/25 67/49/26 51/50/27 40/51/28
339
+ f 82/52/6 79/53/6 2/16/6
340
+ f 84/25/1 90/26/1 1/3/1
341
+ f 67/49/26 60/48/25 63/54/29 72/55/30
342
+ f 28/41/1 25/24/1 1/3/1
343
+ f 72/56/30 63/57/29 58/45/22 69/44/21
344
+ f 37/15/6 34/58/6 2/16/6
345
+ f 79/53/6 30/14/6 2/16/6
346
+ f 75/59/6 82/52/6 2/16/6
347
+ f 18/18/6 16/60/6 2/16/6
348
+ f 54/23/10 46/22/9 40/51/28 51/50/27
349
+ f 81/61/31 91/62/32 85/40/20 78/39/19
350
+ f 16/60/6 48/27/6 2/16/6
351
+ f 52/63/6 66/42/6 2/16/6
352
+ f 36/11/5 27/10/4 22/38/18 33/37/17
353
+ f 73/43/6 70/64/6 2/16/6
354
+ f 57/7/1 64/33/1 1/3/1
355
+ f 70/64/6 75/59/6 2/16/6
356
+ f 34/58/6 12/17/6 2/16/6
357
+ f 76/47/24 87/46/23 91/62/32 81/61/31
358
+ f 55/28/6 52/63/6 2/16/6
359
+ f 22/65/18 27/66/4 29/67/33 23/68/34
360
+ f 23/68/34 29/67/33 28/41/35 21/13/36
361
+ f 27/66/4 24/69/3 26/70/37 29/67/33
362
+ f 29/67/33 26/70/37 25/24/38 28/41/35
363
+ f 31/71/2 36/72/5 38/73/39 32/74/40
364
+ f 32/74/40 38/73/39 37/15/41 30/14/42
365
+ f 36/72/5 33/75/17 35/76/43 38/73/39
366
+ f 38/73/39 35/76/43 34/58/44 37/15/41
367
+ f 58/77/22 63/78/29 65/79/45 59/80/46
368
+ f 59/80/46 65/79/45 64/33/47 57/7/48
369
+ f 63/78/29 60/81/25 62/82/49 65/79/45
370
+ f 65/79/45 62/82/49 61/4/50 64/33/47
371
+ f 67/83/26 72/84/30 74/85/51 68/86/52
372
+ f 68/86/52 74/85/51 73/43/53 66/42/54
373
+ f 72/84/30 69/87/21 71/88/55 74/85/51
374
+ f 74/85/51 71/88/55 70/64/56 73/43/53
375
+ f 10/19/57 3/2/58 5/89/59 11/90/60
376
+ f 11/90/60 5/89/59 4/91/16 9/92/11
377
+ f 16/60/61 18/18/62 20/93/63 17/94/64
378
+ f 17/94/64 20/93/63 19/95/12 15/96/15
379
+ f 7/12/65 10/19/57 11/90/60 8/97/66
380
+ f 8/97/66 11/90/60 9/92/11 6/98/14
381
+ f 18/18/62 12/17/67 14/99/68 20/93/63
382
+ f 20/93/63 14/99/68 13/100/13 19/95/12
383
+ f 55/28/69 48/27/70 50/101/71 56/102/72
384
+ f 56/102/72 50/101/71 49/103/7 54/104/10
385
+ f 43/1/73 45/34/74 47/105/75 44/106/76
386
+ f 44/106/76 47/105/75 46/107/9 42/108/8
387
+ f 52/63/77 55/28/69 56/102/72 53/109/78
388
+ f 53/109/78 56/102/72 54/104/10 51/110/27
389
+ f 45/34/74 39/5/79 41/111/80 47/105/75
390
+ f 47/105/75 41/111/80 40/112/28 46/107/9
391
+ f 82/52/81 75/59/82 77/113/83 83/114/84
392
+ f 83/114/84 77/113/83 76/115/24 81/116/31
393
+ f 88/6/85 90/26/86 92/117/87 89/118/88
394
+ f 89/118/88 92/117/87 91/119/32 87/120/23
395
+ f 79/53/89 82/52/81 83/114/84 80/121/90
396
+ f 80/121/90 83/114/84 81/116/31 78/122/19
397
+ f 90/26/86 84/25/91 86/123/92 92/117/87
398
+ f 92/117/87 86/123/92 85/124/20 91/119/32
399
+ f 6/98/14 22/65/18 23/68/34 8/97/66
400
+ f 8/97/66 23/68/34 21/13/36 7/12/65
401
+ f 60/81/25 40/112/28 41/111/80 62/82/49
402
+ f 62/82/49 41/111/80 39/5/79 61/4/50
403
+ f 42/108/8 4/91/16 5/89/59 44/106/76
404
+ f 44/106/76 5/89/59 3/2/58 43/1/73
405
+ f 12/17/67 34/58/44 35/76/43 14/99/68
406
+ f 14/99/68 35/76/43 33/75/17 13/100/13
407
+ f 48/27/70 16/60/61 17/94/64 50/101/71
408
+ f 50/101/71 17/94/64 15/96/15 49/103/7
409
+ f 51/110/27 67/83/26 68/86/52 53/109/78
410
+ f 53/109/78 68/86/52 66/42/54 52/63/77
411
+ f 84/25/91 25/24/38 26/70/37 86/123/92
412
+ f 86/123/92 26/70/37 24/69/3 85/124/20
413
+ f 30/14/42 79/53/89 80/121/90 32/74/40
414
+ f 32/74/40 80/121/90 78/122/19 31/71/2
415
+ f 87/120/23 58/77/22 59/80/46 89/118/88
416
+ f 89/118/88 59/80/46 57/7/48 88/6/85
417
+ f 69/87/21 76/115/24 77/113/83 71/88/55
418
+ f 71/88/55 77/113/83 75/59/82 70/64/56
419
+ f 4/36/16 15/35/15 19/30/12 9/29/11
envs/assets/blocks/red_moon.urdf ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" ?>
2
+ <robot name="red_moon.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="0.5"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="1.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".01"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <mesh filename="moon.obj" scale="1.0 1.0 1.0"/>
18
+ </geometry>
19
+ <material name="red">
20
+ <color rgba="1 0.4 0.4 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <mesh filename="moon.obj" scale="1.0 1.0 1.0"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+ </robot>
envs/assets/blocks/star.obj ADDED
@@ -0,0 +1,689 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.92.0 OBJ File: ''
2
+ # www.blender.org
3
+ mtllib star.mtl
4
+ o star_green_block_star0_block
5
+ v -0.000030 0.000000 0.001549
6
+ v -0.000030 0.038100 0.001549
7
+ v 0.006429 0.000000 -0.009092
8
+ v 0.007380 0.002032 -0.010659
9
+ v 0.007101 0.000595 -0.010200
10
+ v 0.009909 0.002032 -0.008940
11
+ v 0.008636 0.000000 -0.007597
12
+ v 0.009536 0.000595 -0.008547
13
+ v 0.008636 0.038100 -0.007597
14
+ v 0.009909 0.036068 -0.008940
15
+ v 0.009536 0.037505 -0.008547
16
+ v 0.007380 0.036068 -0.010659
17
+ v 0.006429 0.038100 -0.009092
18
+ v 0.007101 0.037505 -0.010200
19
+ v -0.006436 0.038100 -0.009091
20
+ v -0.007380 0.036068 -0.010659
21
+ v -0.007103 0.037505 -0.010200
22
+ v -0.009909 0.036068 -0.008940
23
+ v -0.008642 0.038100 -0.007595
24
+ v -0.009538 0.037505 -0.008546
25
+ v -0.008642 0.000000 -0.007595
26
+ v -0.009909 0.002032 -0.008940
27
+ v -0.009538 0.000595 -0.008546
28
+ v -0.007380 0.002032 -0.010659
29
+ v -0.006436 0.000000 -0.009091
30
+ v -0.007103 0.000595 -0.010200
31
+ v -0.011672 0.000000 0.006854
32
+ v -0.013354 0.002032 0.007620
33
+ v -0.012861 0.000595 0.007396
34
+ v -0.014336 0.002032 0.004714
35
+ v -0.012526 0.000000 0.004313
36
+ v -0.013806 0.000595 0.004596
37
+ v -0.012526 0.038100 0.004313
38
+ v -0.014336 0.036068 0.004714
39
+ v -0.013806 0.037505 0.004596
40
+ v -0.013354 0.036068 0.007620
41
+ v -0.011672 0.038100 0.006854
42
+ v -0.012861 0.037505 0.007396
43
+ v 0.012521 0.000000 0.004314
44
+ v 0.014336 0.002032 0.004714
45
+ v 0.013805 0.000595 0.004597
46
+ v 0.013354 0.002032 0.007620
47
+ v 0.011668 0.000000 0.006855
48
+ v 0.012860 0.000595 0.007396
49
+ v 0.011668 0.038100 0.006855
50
+ v 0.013354 0.036068 0.007620
51
+ v 0.012860 0.037505 0.007396
52
+ v 0.014336 0.036068 0.004714
53
+ v 0.012521 0.038100 0.004314
54
+ v 0.013805 0.037505 0.004597
55
+ v -0.010002 0.000000 0.018088
56
+ v -0.011405 0.002032 0.020414
57
+ v -0.010994 0.000595 0.019733
58
+ v -0.013985 0.002032 0.018583
59
+ v -0.012256 0.000000 0.016473
60
+ v -0.013478 0.000595 0.017965
61
+ v -0.013300 0.002032 0.020351
62
+ v -0.011669 0.000000 0.018040
63
+ v -0.012822 0.000595 0.019674
64
+ v -0.012256 0.038100 0.016473
65
+ v -0.013985 0.036068 0.018583
66
+ v -0.013478 0.037505 0.017965
67
+ v -0.011405 0.036068 0.020414
68
+ v -0.010002 0.038100 0.018088
69
+ v -0.010994 0.037505 0.019733
70
+ v -0.011669 0.038100 0.018040
71
+ v -0.013300 0.036068 0.020351
72
+ v -0.012822 0.037505 0.019674
73
+ v -0.018964 0.000000 -0.003158
74
+ v -0.021672 0.002032 -0.003831
75
+ v -0.020879 0.000595 -0.003634
76
+ v -0.020696 0.002032 -0.006698
77
+ v -0.018097 0.000000 -0.005661
78
+ v -0.019935 0.000595 -0.006394
79
+ v -0.019426 0.000000 -0.004715
80
+ v -0.022190 0.002032 -0.005607
81
+ v -0.021380 0.000595 -0.005346
82
+ v -0.018097 0.038100 -0.005661
83
+ v -0.020696 0.036068 -0.006698
84
+ v -0.019935 0.037505 -0.006394
85
+ v -0.021672 0.036068 -0.003831
86
+ v -0.018964 0.038100 -0.003158
87
+ v -0.020879 0.037505 -0.003634
88
+ v -0.022190 0.036068 -0.005607
89
+ v -0.019426 0.038100 -0.004715
90
+ v -0.021380 0.037505 -0.005346
91
+ v 0.012251 0.000000 0.016476
92
+ v 0.013985 0.002032 0.018583
93
+ v 0.013477 0.000595 0.017966
94
+ v 0.011405 0.002032 0.020414
95
+ v 0.009996 0.000000 0.018089
96
+ v 0.010992 0.000595 0.019733
97
+ v 0.011663 0.000000 0.018043
98
+ v 0.013300 0.002032 0.020351
99
+ v 0.012821 0.000595 0.019675
100
+ v 0.009996 0.038100 0.018089
101
+ v 0.011405 0.036068 0.020414
102
+ v 0.010992 0.037505 0.019733
103
+ v 0.013985 0.036068 0.018583
104
+ v 0.012251 0.038100 0.016476
105
+ v 0.013477 0.037505 0.017966
106
+ v 0.013300 0.036068 0.020351
107
+ v 0.011663 0.038100 0.018043
108
+ v 0.012821 0.037505 0.019675
109
+ v 0.018094 0.000000 -0.005663
110
+ v 0.020696 0.002032 -0.006698
111
+ v 0.019934 0.000595 -0.006395
112
+ v 0.021672 0.002032 -0.003831
113
+ v 0.018961 0.000000 -0.003159
114
+ v 0.020878 0.000595 -0.003635
115
+ v 0.019423 0.000000 -0.004716
116
+ v 0.022190 0.002032 -0.005607
117
+ v 0.021380 0.000595 -0.005346
118
+ v 0.018961 0.038100 -0.003159
119
+ v 0.021672 0.036068 -0.003831
120
+ v 0.020878 0.037505 -0.003635
121
+ v 0.020696 0.036068 -0.006698
122
+ v 0.018094 0.038100 -0.005663
123
+ v 0.019934 0.037505 -0.006395
124
+ v 0.022190 0.036068 -0.005607
125
+ v 0.019423 0.038100 -0.004716
126
+ v 0.021380 0.037505 -0.005346
127
+ v 0.001426 0.038100 -0.016730
128
+ v 0.001638 0.036068 -0.019383
129
+ v 0.001576 0.037505 -0.018606
130
+ v -0.001638 0.036068 -0.019383
131
+ v -0.001434 0.038100 -0.016729
132
+ v -0.001578 0.037505 -0.018606
133
+ v 0.000000 0.036068 -0.020414
134
+ v -0.000004 0.038100 -0.017644
135
+ v -0.000001 0.037505 -0.019603
136
+ v -0.001434 0.000000 -0.016729
137
+ v -0.001638 0.002032 -0.019383
138
+ v -0.001578 0.000595 -0.018606
139
+ v 0.001638 0.002032 -0.019383
140
+ v 0.001426 0.000000 -0.016730
141
+ v 0.001576 0.000595 -0.018606
142
+ v -0.000004 0.000000 -0.017644
143
+ v 0.000000 0.002032 -0.020414
144
+ v -0.000001 0.000595 -0.019603
145
+ v 0.001733 0.000000 0.015081
146
+ v 0.001984 0.002032 0.017005
147
+ v 0.001910 0.000595 0.016441
148
+ v -0.001984 0.002032 0.017005
149
+ v -0.001741 0.000000 0.015080
150
+ v -0.001912 0.000595 0.016441
151
+ v -0.001741 0.038100 0.015080
152
+ v -0.001984 0.036068 0.017005
153
+ v -0.001912 0.037505 0.016441
154
+ v 0.001984 0.036068 0.017005
155
+ v 0.001733 0.038100 0.015081
156
+ v 0.001910 0.037505 0.016441
157
+ vt 0.884326 0.143054
158
+ vt 0.881637 0.147199
159
+ vt 0.874485 0.145636
160
+ vt 0.870501 0.139220
161
+ vt 0.873534 0.134807
162
+ vt 0.837942 0.112414
163
+ vt 0.837942 0.131533
164
+ vt 0.832314 0.131533
165
+ vt 0.832314 0.112414
166
+ vt 0.883572 0.123603
167
+ vt 0.883611 0.122735
168
+ vt 0.892897 0.129050
169
+ vt 0.881240 0.148682
170
+ vt 0.881539 0.153913
171
+ vt 0.898370 0.134009
172
+ vt 0.897030 0.139070
173
+ vt 0.873770 0.153188
174
+ vt 0.868788 0.154800
175
+ vt 0.903052 0.129620
176
+ vt 0.899208 0.132723
177
+ vt 0.862147 0.112414
178
+ vt 0.862147 0.131533
179
+ vt 0.855821 0.131533
180
+ vt 0.855821 0.112414
181
+ vt 0.830184 0.112414
182
+ vt 0.830184 0.131533
183
+ vt 0.824015 0.131533
184
+ vt 0.824015 0.112414
185
+ vt 0.874942 0.134807
186
+ vt 0.877932 0.139104
187
+ vt 0.880430 0.154782
188
+ vt 0.875317 0.153178
189
+ vt 0.866663 0.147297
190
+ vt 0.863946 0.143378
191
+ vt 0.884646 0.135960
192
+ vt 0.883837 0.135640
193
+ vt 0.874236 0.134337
194
+ vt 0.813888 0.131533
195
+ vt 0.813888 0.112414
196
+ vt 0.814927 0.112414
197
+ vt 0.814927 0.131533
198
+ vt 0.881273 0.154719
199
+ vt 0.863186 0.112414
200
+ vt 0.863186 0.131533
201
+ vt 0.867919 0.154747
202
+ vt 0.867585 0.153944
203
+ vt 0.845799 0.112414
204
+ vt 0.845799 0.131533
205
+ vt 0.840171 0.131533
206
+ vt 0.840171 0.112414
207
+ vt 0.903484 0.128858
208
+ vt 0.883832 0.141627
209
+ vt 0.884500 0.142196
210
+ vt 0.864368 0.141965
211
+ vt 0.869278 0.140166
212
+ vt 0.854097 0.112414
213
+ vt 0.854097 0.131533
214
+ vt 0.847929 0.131533
215
+ vt 0.847929 0.112414
216
+ vt 0.896527 0.139753
217
+ vt 0.867312 0.149178
218
+ vt 0.822292 0.112414
219
+ vt 0.822292 0.131533
220
+ vt 0.872132 0.131533
221
+ vt 0.872132 0.112414
222
+ vt 0.877999 0.112414
223
+ vt 0.877999 0.131533
224
+ vt 0.864225 0.131533
225
+ vt 0.864225 0.112414
226
+ vt 0.870414 0.112414
227
+ vt 0.870414 0.131533
228
+ vt 0.815966 0.112414
229
+ vt 0.815966 0.131533
230
+ vt 0.891348 0.136441
231
+ vt 0.889875 0.135971
232
+ vt 0.846864 0.131533
233
+ vt 0.846864 0.112414
234
+ vt 0.831249 0.131533
235
+ vt 0.831249 0.112414
236
+ vt 0.879159 0.140026
237
+ vt 0.896695 0.118860
238
+ vt 0.898202 0.123877
239
+ vt 0.895355 0.118422
240
+ vt 0.896169 0.118193
241
+ vt 0.863717 0.142542
242
+ vt 0.889642 0.122199
243
+ vt 0.891100 0.121679
244
+ vt 0.899083 0.125135
245
+ vt 0.805981 0.112414
246
+ vt 0.805981 0.131533
247
+ vt 0.800114 0.131533
248
+ vt 0.800114 0.112414
249
+ vt 0.807699 0.112414
250
+ vt 0.807699 0.131533
251
+ vt 0.884410 0.122387
252
+ vt 0.895707 0.139551
253
+ vt 0.879086 0.112414
254
+ vt 0.879086 0.131533
255
+ vt 0.883766 0.134776
256
+ vt 0.884979 0.130164
257
+ vt 0.903027 0.128111
258
+ vt 0.799027 0.131533
259
+ vt 0.799027 0.112414
260
+ vt 0.884945 0.128175
261
+ vt 0.882437 0.147351
262
+ vt 0.881976 0.149022
263
+ vt 0.883261 0.147612
264
+ vt 0.882837 0.149234
265
+ vt 0.898778 0.123306
266
+ vt 0.899783 0.124719
267
+ vt 0.899515 0.122813
268
+ vt 0.900468 0.124192
269
+ vt 0.899922 0.133113
270
+ vt 0.898965 0.134558
271
+ vt 0.900624 0.133616
272
+ vt 0.899718 0.135026
273
+ vt 0.878303 0.138385
274
+ vt 0.879696 0.139416
275
+ vt 0.878850 0.137688
276
+ vt 0.880184 0.138702
277
+ vt 0.889274 0.121462
278
+ vt 0.890920 0.120885
279
+ vt 0.889004 0.120636
280
+ vt 0.890592 0.120060
281
+ vt 0.875420 0.153985
282
+ vt 0.873676 0.154006
283
+ vt 0.875394 0.154873
284
+ vt 0.873704 0.154874
285
+ vt 0.891195 0.137238
286
+ vt 0.889532 0.136718
287
+ vt 0.890894 0.138073
288
+ vt 0.889289 0.137551
289
+ vt 0.868700 0.139582
290
+ vt 0.870085 0.138524
291
+ vt 0.868187 0.138882
292
+ vt 0.869517 0.137842
293
+ vt 0.862942 0.143128
294
+ vt 0.862743 0.142252
295
+ vt 0.862000 0.143160
296
+ vt 0.861880 0.141993
297
+ vt 0.883020 0.136246
298
+ vt 0.882908 0.135355
299
+ vt 0.882298 0.136785
300
+ vt 0.882012 0.135647
301
+ vt 0.863391 0.141636
302
+ vt 0.862626 0.141085
303
+ vt 0.883840 0.136603
304
+ vt 0.883310 0.137383
305
+ vt 0.897417 0.140057
306
+ vt 0.896867 0.140740
307
+ vt 0.898025 0.140801
308
+ vt 0.897174 0.141610
309
+ vt 0.874217 0.133293
310
+ vt 0.874968 0.133747
311
+ vt 0.874208 0.132371
312
+ vt 0.875284 0.132840
313
+ vt 0.896006 0.140567
314
+ vt 0.896002 0.141529
315
+ vt 0.873468 0.133750
316
+ vt 0.873134 0.132848
317
+ vt 0.895621 0.117396
318
+ vt 0.896476 0.117194
319
+ vt 0.895584 0.116434
320
+ vt 0.896753 0.116313
321
+ vt 0.881903 0.155553
322
+ vt 0.881031 0.155656
323
+ vt 0.882465 0.156286
324
+ vt 0.881325 0.156572
325
+ vt 0.897048 0.117859
326
+ vt 0.897631 0.117095
327
+ vt 0.882214 0.154732
328
+ vt 0.883023 0.155251
329
+ vt 0.882691 0.123051
330
+ vt 0.882773 0.122155
331
+ vt 0.881783 0.122789
332
+ vt 0.882031 0.121638
333
+ vt 0.867329 0.155578
334
+ vt 0.866946 0.154764
335
+ vt 0.866810 0.156317
336
+ vt 0.866182 0.155321
337
+ vt 0.883582 0.121769
338
+ vt 0.883025 0.121005
339
+ vt 0.868222 0.155664
340
+ vt 0.867960 0.156572
341
+ vt 0.904030 0.128049
342
+ vt 0.904478 0.128840
343
+ vt 0.904906 0.127731
344
+ vt 0.905368 0.128825
345
+ vt 0.885439 0.141872
346
+ vt 0.885289 0.142769
347
+ vt 0.886281 0.141582
348
+ vt 0.886221 0.142768
349
+ vt 0.904056 0.129646
350
+ vt 0.904943 0.129934
351
+ vt 0.884767 0.141260
352
+ vt 0.885502 0.140686
353
+ vt 0.865721 0.149838
354
+ vt 0.865007 0.147749
355
+ vt 0.865827 0.147457
356
+ vt 0.866565 0.149562
357
+ vt 0.883268 0.130306
358
+ vt 0.883225 0.128099
359
+ vt 0.884112 0.128070
360
+ vt 0.884138 0.130300
361
+ vn 0.0000 -1.0000 0.0000
362
+ vn 0.1760 -0.1821 0.9674
363
+ vn 0.1760 0.1821 0.9674
364
+ vn 0.1373 0.1995 0.9702
365
+ vn 0.1373 -0.1995 0.9702
366
+ vn -0.0000 1.0000 -0.0000
367
+ vn 0.8706 -0.1992 0.4499
368
+ vn 0.8706 0.1992 0.4499
369
+ vn 0.8522 0.1751 0.4930
370
+ vn 0.8522 -0.1751 0.4930
371
+ vn -0.9656 -0.1964 0.1706
372
+ vn -0.9656 0.1964 0.1706
373
+ vn -0.9767 0.1745 0.1252
374
+ vn -0.9767 -0.1745 0.1252
375
+ vn -0.4160 0.1947 -0.8883
376
+ vn -0.4160 -0.1947 -0.8883
377
+ vn -0.9185 -0.2428 -0.3119
378
+ vn -0.9185 0.2428 -0.3119
379
+ vn 0.9185 -0.2429 -0.3119
380
+ vn 0.9185 0.2429 -0.3119
381
+ vn -0.1372 -0.1993 0.9703
382
+ vn -0.1372 0.1993 0.9703
383
+ vn -0.1761 0.1819 0.9674
384
+ vn -0.1761 -0.1819 0.9674
385
+ vn 0.9766 -0.1749 0.1252
386
+ vn 0.9766 0.1749 0.1252
387
+ vn 0.9655 0.1968 0.1705
388
+ vn 0.9655 -0.1968 0.1705
389
+ vn -0.8523 -0.1748 0.4930
390
+ vn -0.8523 0.1748 0.4930
391
+ vn 0.7051 0.1756 -0.6870
392
+ vn 0.7051 -0.1756 -0.6870
393
+ vn 0.6736 -0.1964 -0.7125
394
+ vn 0.6736 0.1964 -0.7125
395
+ vn 0.4161 0.1947 -0.8883
396
+ vn 0.4161 -0.1947 -0.8883
397
+ vn 0.3770 -0.1709 -0.9103
398
+ vn 0.3770 0.1709 -0.9103
399
+ vn -0.8706 -0.1990 0.4499
400
+ vn -0.8706 0.1990 0.4499
401
+ vn 0.5621 0.2389 0.7918
402
+ vn 0.5621 -0.2389 0.7918
403
+ vn -0.5620 0.2388 0.7919
404
+ vn -0.5620 -0.2388 0.7919
405
+ vn -0.7052 -0.1752 -0.6870
406
+ vn -0.7052 0.1752 -0.6870
407
+ vn -0.6736 0.1961 -0.7126
408
+ vn -0.6736 -0.1961 -0.7126
409
+ vn -0.3770 -0.1709 -0.9103
410
+ vn -0.3770 0.1709 -0.9103
411
+ vn 0.0001 -0.2357 -0.9718
412
+ vn 0.0001 0.2357 -0.9718
413
+ vn 0.0822 -0.9807 -0.1776
414
+ vn 0.1339 -0.9813 -0.1382
415
+ vn 0.5148 -0.6952 -0.5016
416
+ vn 0.2776 -0.6869 -0.6716
417
+ vn 0.1339 0.9813 -0.1382
418
+ vn 0.0822 0.9807 -0.1776
419
+ vn 0.2776 0.6869 -0.6716
420
+ vn 0.5148 0.6952 -0.5016
421
+ vn -0.0822 0.9807 -0.1777
422
+ vn -0.1341 0.9812 -0.1385
423
+ vn -0.5153 0.6946 -0.5020
424
+ vn -0.2777 0.6868 -0.6717
425
+ vn -0.1341 -0.9812 -0.1385
426
+ vn -0.0822 -0.9807 -0.1777
427
+ vn -0.2777 -0.6868 -0.6717
428
+ vn -0.5153 -0.6946 -0.5020
429
+ vn 0.1701 0.9813 0.0905
430
+ vn 0.1906 0.9812 0.0307
431
+ vn 0.7146 0.6935 0.0914
432
+ vn 0.6235 0.6939 0.3603
433
+ vn 0.1906 -0.9812 0.0307
434
+ vn 0.1701 -0.9813 0.0905
435
+ vn 0.6235 -0.6939 0.3603
436
+ vn 0.7146 -0.6935 0.0914
437
+ vn -0.1911 0.9811 0.0308
438
+ vn -0.1703 0.9812 0.0907
439
+ vn -0.6239 0.6934 0.3605
440
+ vn -0.7153 0.6928 0.0914
441
+ vn -0.1703 -0.9812 0.0907
442
+ vn -0.1911 -0.9811 0.0308
443
+ vn -0.7153 -0.6928 0.0914
444
+ vn -0.6239 -0.6934 0.3605
445
+ vn -0.1140 -0.9807 0.1590
446
+ vn 0.0574 -0.9732 0.2225
447
+ vn 0.1230 -0.6920 0.7113
448
+ vn -0.3997 -0.7242 0.5619
449
+ vn 0.0574 0.9732 0.2225
450
+ vn -0.1140 0.9807 0.1590
451
+ vn -0.3997 0.7242 0.5619
452
+ vn 0.1230 0.6920 0.7113
453
+ vn -0.2413 -0.9704 0.0099
454
+ vn -0.7222 -0.6844 0.1004
455
+ vn -0.2413 0.9704 0.0099
456
+ vn -0.7222 0.6844 0.1004
457
+ vn -0.1821 0.9811 -0.0651
458
+ vn -0.0726 0.9686 -0.2377
459
+ vn -0.2844 0.6806 -0.6752
460
+ vn -0.6483 0.7279 -0.2233
461
+ vn -0.0726 -0.9686 -0.2377
462
+ vn -0.1821 -0.9811 -0.0651
463
+ vn -0.6483 -0.7279 -0.2233
464
+ vn -0.2844 -0.6806 -0.6752
465
+ vn -0.1966 0.9707 0.1379
466
+ vn -0.6321 0.6875 0.3576
467
+ vn -0.1966 -0.9707 0.1379
468
+ vn -0.6321 -0.6875 0.3576
469
+ vn 0.1820 0.9811 -0.0652
470
+ vn 0.1964 0.9708 0.1377
471
+ vn 0.6318 0.6878 0.3574
472
+ vn 0.6481 0.7280 -0.2234
473
+ vn 0.1964 -0.9708 0.1377
474
+ vn 0.1820 -0.9811 -0.0652
475
+ vn 0.6481 -0.7280 -0.2234
476
+ vn 0.6318 -0.6878 0.3574
477
+ vn 0.0726 0.9686 -0.2378
478
+ vn 0.2843 0.6806 -0.6752
479
+ vn 0.0726 -0.9686 -0.2378
480
+ vn 0.2843 -0.6806 -0.6752
481
+ vn 0.1137 0.9807 0.1592
482
+ vn -0.0576 0.9731 0.2229
483
+ vn -0.1231 0.6917 0.7116
484
+ vn 0.3994 0.7243 0.5620
485
+ vn -0.0576 -0.9731 0.2229
486
+ vn 0.1137 -0.9807 0.1592
487
+ vn 0.3994 -0.7243 0.5620
488
+ vn -0.1231 -0.6917 0.7116
489
+ vn 0.2406 0.9706 0.0100
490
+ vn 0.7217 0.6849 0.1003
491
+ vn 0.2407 -0.9706 0.0100
492
+ vn 0.7217 -0.6849 0.1003
493
+ vn -0.0002 0.9803 -0.1975
494
+ vn 0.1845 0.9711 -0.1512
495
+ vn 0.5181 0.6852 -0.5119
496
+ vn -0.0002 0.7214 -0.6926
497
+ vn 0.1845 -0.9711 -0.1512
498
+ vn -0.0002 -0.9803 -0.1975
499
+ vn -0.0002 -0.7214 -0.6926
500
+ vn 0.5181 -0.6852 -0.5119
501
+ vn -0.1850 0.9710 -0.1515
502
+ vn -0.5185 0.6847 -0.5122
503
+ vn -0.1850 -0.9710 -0.1515
504
+ vn -0.5185 -0.6847 -0.5122
505
+ vn 0.1263 -0.7034 0.6995
506
+ vn -0.1265 -0.7031 0.6997
507
+ vn 0.0277 -0.9822 0.1859
508
+ vn -0.0277 -0.9821 0.1861
509
+ vn -0.1265 0.7031 0.6997
510
+ vn 0.1263 0.7034 0.6995
511
+ vn -0.0277 0.9821 0.1861
512
+ vn 0.0277 0.9822 0.1859
513
+ usemtl toybox.002
514
+ s 1
515
+ f 136/1/1 3/2/1 1/3/1
516
+ f 31/4/1 69/5/1 1/3/1
517
+ f 144/6/2 148/7/3 63/8/4 52/9/5
518
+ f 96/10/6 103/11/6 2/12/6
519
+ f 7/13/1 105/14/1 1/3/1
520
+ f 19/15/6 78/16/6 2/12/6
521
+ f 43/17/1 87/18/1 1/3/1
522
+ f 127/19/6 15/20/6 2/12/6
523
+ f 15/20/6 19/15/6 2/12/6
524
+ f 108/21/7 115/22/8 48/23/9 40/24/10
525
+ f 54/25/11 61/26/12 36/27/13 28/28/14
526
+ f 73/29/1 21/30/1 1/3/1
527
+ f 109/31/1 39/32/1 1/3/1
528
+ f 1/3/1 145/33/1 51/34/1
529
+ f 60/35/6 66/36/6 2/12/6
530
+ f 75/37/1 73/29/1 1/3/1
531
+ f 79/38/15 72/39/16 76/40/17 84/41/18
532
+ f 111/42/1 109/31/1 1/3/1
533
+ f 115/22/8 108/21/7 112/43/19 120/44/20
534
+ f 93/45/1 91/46/1 1/3/1
535
+ f 90/47/21 97/48/22 150/49/23 142/50/24
536
+ f 130/51/6 127/19/6 2/12/6
537
+ f 132/52/1 138/53/1 1/3/1
538
+ f 55/54/1 27/55/1 1/3/1
539
+ f 42/56/25 46/57/26 99/58/27 88/59/28
540
+ f 78/16/6 85/60/6 2/12/6
541
+ f 91/46/1 141/61/1 1/3/1
542
+ f 69/5/1 75/37/1 1/3/1
543
+ f 30/62/29 28/28/14 36/27/13 34/63/30
544
+ f 12/64/31 4/65/32 135/66/33 124/67/34
545
+ f 117/68/35 106/69/36 6/70/37 10/71/38
546
+ f 84/41/18 76/40/17 70/72/39 81/73/40
547
+ f 33/74/6 37/75/6 2/12/6
548
+ f 42/56/25 40/24/10 48/23/9 46/57/26
549
+ f 102/76/41 94/77/42 88/59/28 99/58/27
550
+ f 31/4/1 1/3/1 27/55/1
551
+ f 52/9/5 63/8/4 67/78/43 57/79/44
552
+ f 25/80/1 132/52/1 1/3/1
553
+ f 118/81/6 9/82/6 2/12/6
554
+ f 114/83/6 121/84/6 2/12/6
555
+ f 58/85/1 55/54/1 1/3/1
556
+ f 148/7/3 144/6/2 142/50/24 150/49/23
557
+ f 57/79/44 67/78/43 61/26/12 54/25/11
558
+ f 45/86/6 49/87/6 2/12/6
559
+ f 120/44/20 112/43/19 106/69/36 117/68/35
560
+ f 138/53/1 136/1/1 1/3/1
561
+ f 9/82/6 13/88/6 2/12/6
562
+ f 30/62/29 34/63/30 81/73/40 70/72/39
563
+ f 39/32/1 43/17/1 1/3/1
564
+ f 51/34/1 58/85/1 1/3/1
565
+ f 24/89/45 16/90/46 126/91/47 133/92/48
566
+ f 24/89/45 22/93/49 18/94/50 16/90/46
567
+ f 87/18/1 93/45/1 1/3/1
568
+ f 103/11/6 100/95/6 2/12/6
569
+ f 105/14/1 111/42/1 1/3/1
570
+ f 82/96/6 33/74/6 2/12/6
571
+ f 37/75/6 60/35/6 2/12/6
572
+ f 72/39/16 79/38/15 18/94/50 22/93/49
573
+ f 121/84/6 118/81/6 2/12/6
574
+ f 100/95/6 45/86/6 2/12/6
575
+ f 124/67/34 135/66/33 139/97/51 129/98/52
576
+ f 25/80/1 1/3/1 21/30/1
577
+ f 64/99/6 147/100/6 2/12/6
578
+ f 123/101/6 130/51/6 2/12/6
579
+ f 66/36/6 64/99/6 2/12/6
580
+ f 129/102/52 139/103/51 133/92/48 126/91/47
581
+ f 49/87/6 114/83/6 2/12/6
582
+ f 13/88/6 123/101/6 2/12/6
583
+ f 85/60/6 82/96/6 2/12/6
584
+ f 2/12/6 151/104/6 96/10/6
585
+ f 3/2/1 7/13/1 1/3/1
586
+ f 147/100/6 151/104/6 2/12/6
587
+ f 141/61/1 145/33/1 1/3/1
588
+ f 97/48/22 90/47/21 94/77/42 102/76/41
589
+ f 7/13/53 3/2/54 5/105/55 8/106/56
590
+ f 8/106/56 5/105/55 4/107/32 6/108/37
591
+ f 13/88/57 9/82/58 11/109/59 14/110/60
592
+ f 14/110/60 11/109/59 10/111/38 12/112/31
593
+ f 19/15/61 15/20/62 17/113/63 20/114/64
594
+ f 20/114/64 17/113/63 16/115/46 18/116/50
595
+ f 25/80/65 21/30/66 23/117/67 26/118/68
596
+ f 26/118/68 23/117/67 22/119/49 24/120/45
597
+ f 49/87/69 45/86/70 47/121/71 50/122/72
598
+ f 50/122/72 47/121/71 46/123/26 48/124/9
599
+ f 43/17/73 39/32/74 41/125/75 44/126/76
600
+ f 44/126/76 41/125/75 40/127/10 42/128/25
601
+ f 37/75/77 33/74/78 35/129/79 38/130/80
602
+ f 38/130/80 35/129/79 34/131/30 36/132/13
603
+ f 31/4/81 27/55/82 29/133/83 32/134/84
604
+ f 32/134/84 29/133/83 28/135/14 30/136/29
605
+ f 58/85/85 51/34/86 53/137/87 59/138/88
606
+ f 59/138/88 53/137/87 52/139/5 57/140/44
607
+ f 64/99/89 66/36/90 68/141/91 65/142/92
608
+ f 65/142/92 68/141/91 67/143/43 63/144/4
609
+ f 55/54/93 58/85/85 59/138/88 56/145/94
610
+ f 56/145/94 59/138/88 57/140/44 54/146/11
611
+ f 66/36/90 60/35/95 62/147/96 68/141/91
612
+ f 68/141/91 62/147/96 61/148/12 67/143/43
613
+ f 85/60/97 78/16/98 80/149/99 86/150/100
614
+ f 86/150/100 80/149/99 79/151/15 84/152/18
615
+ f 73/29/101 75/37/102 77/153/103 74/154/104
616
+ f 74/154/104 77/153/103 76/155/17 72/156/16
617
+ f 82/96/105 85/60/97 86/150/100 83/157/106
618
+ f 83/157/106 86/150/100 84/152/18 81/158/40
619
+ f 75/37/102 69/5/107 71/159/108 77/153/103
620
+ f 77/153/103 71/159/108 70/160/39 76/155/17
621
+ f 121/84/109 114/83/110 116/161/111 122/162/112
622
+ f 122/162/112 116/161/111 115/163/8 120/164/20
623
+ f 109/31/113 111/42/114 113/165/115 110/166/116
624
+ f 110/166/116 113/165/115 112/167/19 108/168/7
625
+ f 118/81/117 121/84/109 122/162/112 119/169/118
626
+ f 119/169/118 122/162/112 120/164/20 117/170/35
627
+ f 111/42/114 105/14/119 107/171/120 113/165/115
628
+ f 113/165/115 107/171/120 106/172/36 112/167/19
629
+ f 103/11/121 96/10/122 98/173/123 104/174/124
630
+ f 104/174/124 98/173/123 97/175/22 102/176/41
631
+ f 91/46/125 93/45/126 95/177/127 92/178/128
632
+ f 92/178/128 95/177/127 94/179/42 90/180/21
633
+ f 100/95/129 103/11/121 104/174/124 101/181/130
634
+ f 101/181/130 104/174/124 102/176/41 99/182/27
635
+ f 93/45/126 87/18/131 89/183/132 95/177/127
636
+ f 95/177/127 89/183/132 88/184/28 94/179/42
637
+ f 130/51/133 123/101/134 125/185/135 131/186/136
638
+ f 131/186/136 125/185/135 124/187/34 129/188/52
639
+ f 136/1/137 138/53/138 140/189/139 137/190/140
640
+ f 137/190/140 140/189/139 139/191/51 135/192/33
641
+ f 127/19/141 130/51/133 131/186/136 128/193/142
642
+ f 128/193/142 131/186/136 129/188/52 126/194/47
643
+ f 138/53/138 132/52/143 134/195/144 140/189/139
644
+ f 140/189/139 134/195/144 133/196/48 139/191/51
645
+ f 27/55/82 55/54/93 56/145/94 29/133/83
646
+ f 29/133/83 56/145/94 54/146/11 28/135/14
647
+ f 60/35/95 37/75/77 38/130/80 62/147/96
648
+ f 62/147/96 38/130/80 36/132/13 61/148/12
649
+ f 69/5/107 31/4/81 32/134/84 71/159/108
650
+ f 71/159/108 32/134/84 30/136/29 70/160/39
651
+ f 33/74/78 82/96/105 83/157/106 35/129/79
652
+ f 35/129/79 83/157/106 81/158/40 34/131/30
653
+ f 21/30/66 73/29/101 74/154/104 23/117/67
654
+ f 23/117/67 74/154/104 72/156/16 22/119/49
655
+ f 78/16/98 19/15/61 20/114/64 80/149/99
656
+ f 80/149/99 20/114/64 18/116/50 79/151/15
657
+ f 87/18/131 43/17/73 44/126/76 89/183/132
658
+ f 89/183/132 44/126/76 42/128/25 88/184/28
659
+ f 45/86/70 100/95/129 101/181/130 47/121/71
660
+ f 47/121/71 101/181/130 99/182/27 46/123/26
661
+ f 9/82/58 118/81/117 119/169/118 11/109/59
662
+ f 11/109/59 119/169/118 117/170/35 10/111/38
663
+ f 105/14/119 7/13/53 8/106/56 107/171/120
664
+ f 107/171/120 8/106/56 6/108/37 106/172/36
665
+ f 39/32/74 109/31/113 110/166/116 41/125/75
666
+ f 41/125/75 110/166/116 108/168/7 40/127/10
667
+ f 114/83/110 49/87/69 50/122/72 116/161/111
668
+ f 116/161/111 50/122/72 48/124/9 115/163/8
669
+ f 123/101/134 13/88/57 14/110/60 125/185/135
670
+ f 125/185/135 14/110/60 12/112/31 124/187/34
671
+ f 3/2/54 136/1/137 137/190/140 5/105/55
672
+ f 5/105/55 137/190/140 135/192/33 4/107/32
673
+ f 132/52/143 25/80/65 26/118/68 134/195/144
674
+ f 134/195/144 26/118/68 24/120/45 133/196/48
675
+ f 15/20/62 127/19/141 128/193/142 17/113/63
676
+ f 17/113/63 128/193/142 126/194/47 16/115/46
677
+ f 142/197/24 144/198/2 146/199/145 143/200/146
678
+ f 143/200/146 146/199/145 145/33/147 141/61/148
679
+ f 148/201/3 150/202/23 152/203/149 149/204/150
680
+ f 149/204/150 152/203/149 151/104/151 147/100/152
681
+ f 141/61/148 91/46/125 92/178/128 143/200/146
682
+ f 143/200/146 92/178/128 90/180/21 142/197/24
683
+ f 96/10/122 151/104/151 152/203/149 98/173/123
684
+ f 98/173/123 152/203/149 150/202/23 97/175/22
685
+ f 51/34/86 145/33/147 146/199/145 53/137/87
686
+ f 53/137/87 146/199/145 144/198/2 52/139/5
687
+ f 147/100/152 64/99/89 65/142/92 149/204/150
688
+ f 149/204/150 65/142/92 63/144/4 148/201/3
689
+ f 12/64/31 10/71/38 6/70/37 4/65/32
envs/assets/blocks/yellow_pentagon.urdf ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" ?>
2
+ <robot name="yellow_pentagon.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="0.5"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="1.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".01"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <mesh filename="pentagon.obj" scale="1.0 1.0 1.0"/>
18
+ </geometry>
19
+ <material name="yellow">
20
+ <color rgba="1 1 0.4 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <mesh filename="pentagon.obj" scale="1.0 1.0 1.0"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+ </robot>
envs/assets/insert.urdf ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="0.0" ?>
2
+ <robot name="ell.urdf">
3
+ <link name="baseLink">
4
+ <contact>
5
+ <lateral_friction value="0.3"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="3.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".1"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+
15
+ <visual>
16
+ <origin rpy="0 0 0" xyz="0.025 0 0"/>
17
+ <geometry>
18
+ <box size=".08 .03 .04"/>
19
+ </geometry>
20
+ <material name="red">
21
+ <color rgba="0. 0.3412 0.3490 1"/>
22
+ </material>
23
+ </visual>
24
+ <collision>
25
+ <origin rpy="0 0 0" xyz="0.025 0 0"/>
26
+ <geometry>
27
+ <box size=".08 .03 .04"/>
28
+ </geometry>
29
+ </collision>
30
+
31
+ <visual>
32
+ <origin rpy="0 0 0" xyz="0 0.025 0"/>
33
+ <geometry>
34
+ <box size=".03 .08 .0399"/>
35
+ </geometry>
36
+ <material name="dark">
37
+ <color rgba="0. 0.3412 0.3490 1"/>
38
+ </material>
39
+ </visual>
40
+ <collision>
41
+ <origin rpy="0 0 0" xyz="0 0.025 0"/>
42
+ <geometry>
43
+ <box size=".03 .08 .0399"/>
44
+ </geometry>
45
+ </collision>
46
+
47
+
48
+ <visual>
49
+ <origin rpy="0 0 0" xyz="0.075 0.025 0"/>
50
+ <geometry>
51
+ <box size=".03 .08 .0399"/>
52
+ </geometry>
53
+ <material name="dark">
54
+ <color rgba="0. 0.3412 0.3490 1"/>
55
+ </material>
56
+ </visual>
57
+ <collision>
58
+ <origin rpy="0 0 0" xyz="0.075 0.025 0"/>
59
+ <geometry>
60
+ <box size=".03 .08 .0399"/>
61
+ </geometry>
62
+ </collision>
63
+
64
+
65
+ </link>
66
+ </robot>
envs/assets/plane.obj ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.66 (sub 1) OBJ File: ''
2
+ # www.blender.org
3
+ mtllib plane.mtl
4
+ o Plane
5
+ v 15.000000 -15.000000 0.000000
6
+ v 15.000000 15.000000 0.000000
7
+ v -15.000000 15.000000 0.000000
8
+ v -15.000000 -15.000000 0.000000
9
+
10
+ vt 15.000000 0.000000
11
+ vt 15.000000 15.000000
12
+ vt 0.000000 15.000000
13
+ vt 0.000000 0.000000
14
+
15
+ usemtl Material
16
+ s off
17
+ f 1/1 2/2 3/3
18
+ f 1/1 3/3 4/4
envs/assets/suction/base.obj ADDED
@@ -0,0 +1,396 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Object Export From Tinkercad Server 2015
2
+
3
+ mtllib obj.mtl
4
+
5
+ o obj_0
6
+ v 7.413 37.27 25
7
+ v 7.413 37.27 0
8
+ v 33.513 17.913 25
9
+ v 35.107 14.542 25
10
+ v 35.107 14.542 0
11
+ v 33.513 17.913 0
12
+ v -31.596 21.112 25
13
+ v -31.596 21.112 0
14
+ v -33.513 17.913 0
15
+ v -33.513 17.913 25
16
+ v 3.725 37.817 0
17
+ v 3.725 37.817 25
18
+ v -29.374 24.107 25
19
+ v -29.374 24.107 0
20
+ v 0 38 0
21
+ v 11.031 36.364 25
22
+ v 11.031 36.364 0
23
+ v 14.542 35.107 25
24
+ v 14.542 35.107 0
25
+ v -37.27 -7.413 25
26
+ v 17.913 33.513 25
27
+ v -37.27 -7.413 0
28
+ v -11.031 -36.364 25
29
+ v 17.913 33.513 0
30
+ v -36.364 -11.031 0
31
+ v -14.542 -35.107 25
32
+ v -26.87 26.87 25
33
+ v -36.364 -11.031 25
34
+ v -14.542 -35.107 0
35
+ v -26.87 26.87 0
36
+ v -3.725 37.817 0
37
+ v -11.031 -36.364 0
38
+ v 24.107 -29.374 25
39
+ v -17.913 -33.513 25
40
+ v -17.913 -33.513 0
41
+ v 21.112 -31.596 25
42
+ v 21.112 -31.596 0
43
+ v 24.107 -29.374 0
44
+ v -3.725 37.817 25
45
+ v 0 38 25
46
+ v 17.913 -33.513 25
47
+ v 17.913 -33.513 0
48
+ v 37.817 -3.725 25
49
+ v -37.817 -3.725 25
50
+ v -37.817 -3.725 0
51
+ v 37.817 -3.725 0
52
+ v 38 0 0
53
+ v -21.112 -31.596 25
54
+ v 38 0 25
55
+ v 14.542 -35.107 25
56
+ v -21.112 -31.596 0
57
+ v 36.364 11.031 25
58
+ v 14.542 -35.107 0
59
+ v 36.364 11.031 0
60
+ v 37.27 -7.413 25
61
+ v 37.27 -7.413 0
62
+ v -24.107 29.374 25
63
+ v 21.112 31.596 25
64
+ v -24.107 29.374 0
65
+ v 36.364 -11.031 25
66
+ v 21.112 31.596 0
67
+ v 36.364 -11.031 0
68
+ v -38 0 25
69
+ v 11.031 -36.364 25
70
+ v 11.031 -36.364 0
71
+ v -21.112 31.596 25
72
+ v -21.112 31.596 0
73
+ v 7.413 -37.27 25
74
+ v 7.413 -37.27 0
75
+ v -17.913 33.513 25
76
+ v -17.913 33.513 0
77
+ v 24.107 29.374 0
78
+ v -38 0 0
79
+ v 26.87 26.87 0
80
+ v 35.107 -14.542 25
81
+ v 29.374 24.107 0
82
+ v 35.107 -14.542 0
83
+ v 31.596 21.112 0
84
+ v 3.725 -37.817 25
85
+ v 37.27 7.413 25
86
+ v 3.725 -37.817 0
87
+ v 24.107 29.374 25
88
+ v -24.107 -29.374 25
89
+ v -14.542 35.107 25
90
+ v -24.107 -29.374 0
91
+ v -14.542 35.107 0
92
+ v 37.27 7.413 0
93
+ v 33.513 -17.913 25
94
+ v 37.817 3.725 0
95
+ v 33.513 -17.913 0
96
+ v -11.031 36.364 25
97
+ v -11.031 36.364 0
98
+ v -7.413 37.27 25
99
+ v 26.87 26.87 25
100
+ v -7.413 37.27 0
101
+ v -26.87 -26.87 25
102
+ v -26.87 -26.87 0
103
+ v 37.817 3.725 25
104
+ v 0 -38 25
105
+ v 31.596 -21.112 25
106
+ v 0 -38 0
107
+ v 31.596 -21.112 0
108
+ v 29.374 24.107 25
109
+ v -29.374 -24.107 25
110
+ v -29.374 -24.107 0
111
+ v 29.374 -24.107 25
112
+ v 31.596 21.112 25
113
+ v 29.374 -24.107 0
114
+ v -3.725 -37.817 25
115
+ v -31.596 -21.112 25
116
+ v -3.725 -37.817 0
117
+ v -31.596 -21.112 0
118
+ v -37.817 3.725 25
119
+ v -37.817 3.725 0
120
+ v 26.87 -26.87 25
121
+ v 26.87 -26.87 0
122
+ v -37.27 7.413 0
123
+ v -37.27 7.413 25
124
+ v -7.413 -37.27 25
125
+ v -33.513 -17.913 25
126
+ v -33.513 -17.913 0
127
+ v -7.413 -37.27 0
128
+ v -36.364 11.031 25
129
+ v -36.364 11.031 0
130
+ v -35.107 -14.542 25
131
+ v -35.107 -14.542 0
132
+ v -35.107 14.542 25
133
+ v -35.107 14.542 0
134
+ # 128 vertices
135
+
136
+ g group_0_2829873
137
+
138
+ usemtl color_2829873
139
+ s 0
140
+
141
+ f 3 4 5
142
+ f 3 5 6
143
+ f 7 8 9
144
+ f 7 9 10
145
+ f 1 2 11
146
+ f 1 11 12
147
+ f 16 17 2
148
+ f 16 2 1
149
+ f 13 14 8
150
+ f 13 8 7
151
+ f 18 19 17
152
+ f 18 17 16
153
+ f 23 26 29
154
+ f 23 29 32
155
+ f 26 34 35
156
+ f 26 35 29
157
+ f 33 36 37
158
+ f 33 37 38
159
+ f 15 31 39
160
+ f 15 39 40
161
+ f 20 22 25
162
+ f 20 25 28
163
+ f 36 41 42
164
+ f 36 42 37
165
+ f 44 45 22
166
+ f 44 22 20
167
+ f 21 24 19
168
+ f 21 19 18
169
+ f 43 46 47
170
+ f 43 47 49
171
+ f 27 30 14
172
+ f 27 14 13
173
+ f 41 50 53
174
+ f 41 53 42
175
+ f 43 55 56
176
+ f 43 56 46
177
+ f 34 48 51
178
+ f 34 51 35
179
+ f 4 52 54
180
+ f 4 54 5
181
+ f 50 64 65
182
+ f 50 65 53
183
+ f 57 59 30
184
+ f 57 30 27
185
+ f 55 60 62
186
+ f 55 62 56
187
+ f 66 67 59
188
+ f 66 59 57
189
+ f 64 68 69
190
+ f 64 69 65
191
+ f 58 61 24
192
+ f 58 24 21
193
+ f 63 73 45
194
+ f 63 45 44
195
+ f 70 71 67
196
+ f 70 67 66
197
+ f 60 75 77
198
+ f 60 77 62
199
+ f 82 72 61
200
+ f 82 61 58
201
+ f 68 79 81
202
+ f 68 81 69
203
+ f 48 83 85
204
+ f 48 85 51
205
+ f 84 86 71
206
+ f 84 71 70
207
+ f 75 88 90
208
+ f 75 90 77
209
+ f 91 92 86
210
+ f 91 86 84
211
+ f 15 11 2
212
+ f 15 2 17
213
+ f 15 17 19
214
+ f 15 19 24
215
+ f 15 24 61
216
+ f 15 61 72
217
+ f 15 72 74
218
+ f 15 74 76
219
+ f 15 76 78
220
+ f 15 78 6
221
+ f 15 6 5
222
+ f 15 5 54
223
+ f 15 54 87
224
+ f 15 87 89
225
+ f 15 89 47
226
+ f 52 80 87
227
+ f 52 87 54
228
+ f 94 74 72
229
+ f 94 72 82
230
+ f 93 95 92
231
+ f 93 92 91
232
+ f 83 96 97
233
+ f 83 97 85
234
+ f 80 98 89
235
+ f 80 89 87
236
+ f 79 99 101
237
+ f 79 101 81
238
+ f 88 100 102
239
+ f 88 102 90
240
+ f 94 103 76
241
+ f 94 76 74
242
+ f 39 31 95
243
+ f 39 95 93
244
+ f 104 105 97
245
+ f 104 97 96
246
+ f 98 49 47
247
+ f 98 47 89
248
+ f 100 106 108
249
+ f 100 108 102
250
+ f 103 107 78
251
+ f 103 78 76
252
+ f 101 99 109
253
+ f 101 109 111
254
+ f 110 112 105
255
+ f 110 105 104
256
+ f 113 114 73
257
+ f 113 73 63
258
+ f 106 115 116
259
+ f 106 116 108
260
+ f 107 3 6
261
+ f 107 6 78
262
+ f 118 117 114
263
+ f 118 114 113
264
+ f 120 121 112
265
+ f 120 112 110
266
+ f 109 119 122
267
+ f 109 122 111
268
+ f 123 124 117
269
+ f 123 117 118
270
+ f 125 126 121
271
+ f 125 121 120
272
+ f 127 128 124
273
+ f 127 124 123
274
+ f 115 33 38
275
+ f 115 38 116
276
+ f 10 9 128
277
+ f 10 128 127
278
+ f 28 25 126
279
+ f 28 126 125
280
+ f 119 23 32
281
+ f 119 32 122
282
+ f 73 114 117
283
+ f 73 117 124
284
+ f 73 124 128
285
+ f 73 128 9
286
+ f 73 9 8
287
+ f 73 8 14
288
+ f 73 14 30
289
+ f 73 30 59
290
+ f 73 59 67
291
+ f 73 67 71
292
+ f 73 71 86
293
+ f 73 86 92
294
+ f 73 92 95
295
+ f 73 95 31
296
+ f 73 31 15
297
+ f 81 101 15
298
+ f 47 46 15
299
+ f 46 56 15
300
+ f 56 62 15
301
+ f 62 77 15
302
+ f 77 90 15
303
+ f 90 102 15
304
+ f 102 108 15
305
+ f 108 116 15
306
+ f 116 38 15
307
+ f 38 37 15
308
+ f 37 42 15
309
+ f 42 53 15
310
+ f 53 65 15
311
+ f 69 15 65
312
+ f 81 15 69
313
+ f 45 73 15
314
+ f 101 111 15
315
+ f 111 122 15
316
+ f 122 32 15
317
+ f 32 29 15
318
+ f 29 35 15
319
+ f 35 51 15
320
+ f 51 85 15
321
+ f 85 97 15
322
+ f 97 105 15
323
+ f 105 112 15
324
+ f 112 121 15
325
+ f 121 126 15
326
+ f 126 25 15
327
+ f 22 15 25
328
+ f 45 15 22
329
+ f 49 98 80
330
+ f 49 80 52
331
+ f 49 52 4
332
+ f 49 4 3
333
+ f 49 3 107
334
+ f 49 107 103
335
+ f 49 103 94
336
+ f 49 94 82
337
+ f 49 82 58
338
+ f 49 58 21
339
+ f 49 21 18
340
+ f 49 18 16
341
+ f 49 16 1
342
+ f 49 1 12
343
+ f 49 12 40
344
+ f 40 39 93
345
+ f 40 93 91
346
+ f 40 91 84
347
+ f 40 84 70
348
+ f 40 70 66
349
+ f 40 66 57
350
+ f 40 57 27
351
+ f 40 27 13
352
+ f 40 13 7
353
+ f 40 7 10
354
+ f 40 10 127
355
+ f 40 127 123
356
+ f 40 123 118
357
+ f 40 118 113
358
+ f 63 40 113
359
+ f 68 40 79
360
+ f 64 40 68
361
+ f 50 40 64
362
+ f 41 40 50
363
+ f 36 40 41
364
+ f 33 40 36
365
+ f 115 40 33
366
+ f 106 40 115
367
+ f 100 40 106
368
+ f 88 40 100
369
+ f 75 40 88
370
+ f 60 40 75
371
+ f 55 40 60
372
+ f 43 40 55
373
+ f 49 40 43
374
+ f 99 79 40
375
+ f 20 40 44
376
+ f 28 40 20
377
+ f 125 40 28
378
+ f 120 40 125
379
+ f 110 40 120
380
+ f 104 40 110
381
+ f 96 40 104
382
+ f 83 40 96
383
+ f 48 40 83
384
+ f 34 40 48
385
+ f 26 40 34
386
+ f 23 40 26
387
+ f 119 40 23
388
+ f 109 40 119
389
+ f 99 40 109
390
+ f 63 44 40
391
+ f 12 11 15
392
+ f 12 15 40
393
+ # 252 faces
394
+
395
+ #end of obj_0
396
+
envs/assets/suction/cylinder.urdf ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="0.0" ?>
2
+ <robot name="cylinder.urdf">
3
+ <link name="headLink">
4
+ <contact>
5
+ <lateral_friction value="1.0"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="3.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".1"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <mesh filename="head.obj" scale="0.001 0.001 0.001"/>
18
+ </geometry>
19
+ <material name="darkgrey">
20
+ <color rgba="0.2 0.2 0.2 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <mesh filename="head.obj" scale="0.001 0.001 0.001"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+
31
+ <joint name="tipJoint" type="fixed">
32
+ <parent link="headLink"/>
33
+ <child link="tipLink"/>
34
+ <origin rpy="0.0 0.0 0.0" xyz="0.0 0.0 0.029"/>
35
+ <axis xyz="0 0 1"/>
36
+ <limit effort="150.0" lower="-6.28318530718" upper="6.28318530718" velocity="3.15"/>
37
+ <dynamics damping="10.0" friction="0.0"/>
38
+ </joint>
39
+
40
+ <link name="tipLink">
41
+ <contact>
42
+ <lateral_friction value="1.0"/>
43
+ <rolling_friction value="0.0001"/>
44
+ <inertia_scaling value="3.0"/>
45
+ </contact>
46
+ <inertial>
47
+ <origin rpy="0 0 0" xyz="0 0 0"/>
48
+ <mass value=".1"/>
49
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
50
+ </inertial>
51
+ <visual>
52
+ <origin rpy="0 0 0" xyz="0 0 0"/>
53
+ <geometry>
54
+ <cylinder length="0.05" radius="0.005"/>
55
+ </geometry>
56
+ <material name="blue">
57
+ <color rgba="0.18039216, 0.50588235, 0.77254902 1"/>
58
+ </material>
59
+ </visual>
60
+ <collision>
61
+ <origin rpy="0 0 0" xyz="0 0 0"/>
62
+ <geometry>
63
+ <cylinder length="0.05" radius="0.005"/>
64
+ </geometry>
65
+ </collision>
66
+ </link>
67
+
68
+ <!-- <link name="asdfLink">
69
+ <contact>
70
+ <lateral_friction value="1.0"/>
71
+ <rolling_friction value="0.0001"/>
72
+ <inertia_scaling value="3.0"/>
73
+ </contact>
74
+ <inertial>
75
+ <origin rpy="0 0 0" xyz="0 0 0"/>
76
+ <mass value=".1"/>
77
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
78
+ </inertial>
79
+ <visual>
80
+ <origin rpy="0 0 0" xyz="0 0 0"/>
81
+ <geometry>
82
+ <cylinder length="0.028" radius="0.001"/>
83
+ </geometry>
84
+ </visual>
85
+ </link>
86
+
87
+ <joint name="asdfoint" type="fixed">
88
+ <parent link="tipLink"/>
89
+ <child link="asdfLink"/>
90
+ <origin rpy="0.0 0.0 0.0" xyz="0.0 0.0 0.0"/>
91
+ <axis xyz="0 0 1"/>
92
+ <limit effort="150.0" lower="-6.28318530718" upper="6.28318530718" velocity="3.15"/>
93
+ <dynamics damping="10.0" friction="0.0"/>
94
+ </joint> -->
95
+
96
+
97
+ </robot>
98
+
envs/assets/suction/cylinder_real.urdf ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="0.0" ?>
2
+ <robot name="cylinder_real.urdf">
3
+ <link name="headLink">
4
+ <contact>
5
+ <lateral_friction value="1.0"/>
6
+ <rolling_friction value="0.0001"/>
7
+ <inertia_scaling value="3.0"/>
8
+ </contact>
9
+ <inertial>
10
+ <origin rpy="0 0 0" xyz="0 0 0"/>
11
+ <mass value=".1"/>
12
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
13
+ </inertial>
14
+ <visual>
15
+ <origin rpy="0 0 0" xyz="0 0 0"/>
16
+ <geometry>
17
+ <mesh filename="head.obj" scale="0.001 0.001 0.001"/>
18
+ </geometry>
19
+ <material name="darkgrey">
20
+ <color rgba="0.2 0.2 0.2 1"/>
21
+ </material>
22
+ </visual>
23
+ <collision>
24
+ <origin rpy="0 0 0" xyz="0 0 0"/>
25
+ <geometry>
26
+ <mesh filename="head.obj" scale="0.001 0.001 0.001"/>
27
+ </geometry>
28
+ </collision>
29
+ </link>
30
+
31
+ <joint name="tipJoint" type="fixed">
32
+ <parent link="headLink"/>
33
+ <child link="tipLink"/>
34
+ <origin rpy="0.0 0.0 0.0" xyz="0.0 0.0 0.029"/>
35
+ <axis xyz="0 0 1"/>
36
+ <limit effort="150.0" lower="-6.28318530718" upper="6.28318530718" velocity="3.15"/>
37
+ <dynamics damping="10.0" friction="0.0"/>
38
+ </joint>
39
+
40
+ <link name="tipLink">
41
+ <contact>
42
+ <lateral_friction value="1.0"/>
43
+ <rolling_friction value="0.0001"/>
44
+ <inertia_scaling value="3.0"/>
45
+ </contact>
46
+ <inertial>
47
+ <origin rpy="0 0 0" xyz="0 0 0"/>
48
+ <mass value=".1"/>
49
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
50
+ </inertial>
51
+ <visual>
52
+ <origin rpy="0 0 0" xyz="0 0 0"/>
53
+ <geometry>
54
+ <cylinder length="0.135" radius="0.0127"/>
55
+ </geometry>
56
+ <material name="blue">
57
+ <color rgba="0.5, 0.5, 0.5 1"/>
58
+ </material>
59
+ </visual>
60
+ <collision>
61
+ <origin rpy="0 0 0" xyz="0 0 0"/>
62
+ <geometry>
63
+ <cylinder length="0.135" radius="0.0127"/>
64
+ </geometry>
65
+ </collision>
66
+ </link>
67
+
68
+ <!-- <link name="asdfLink">
69
+ <contact>
70
+ <lateral_friction value="1.0"/>
71
+ <rolling_friction value="0.0001"/>
72
+ <inertia_scaling value="3.0"/>
73
+ </contact>
74
+ <inertial>
75
+ <origin rpy="0 0 0" xyz="0 0 0"/>
76
+ <mass value=".1"/>
77
+ <inertia ixx="1" ixy="0" ixz="0" iyy="1" iyz="0" izz="1"/>
78
+ </inertial>
79
+ <visual>
80
+ <origin rpy="0 0 0" xyz="0 0 0"/>
81
+ <geometry>
82
+ <cylinder length="0.028" radius="0.001"/>
83
+ </geometry>
84
+ </visual>
85
+ </link>
86
+
87
+ <joint name="asdfoint" type="fixed">
88
+ <parent link="tipLink"/>
89
+ <child link="asdfLink"/>
90
+ <origin rpy="0.0 0.0 0.0" xyz="0.0 0.0 0.0"/>
91
+ <axis xyz="0 0 1"/>
92
+ <limit effort="150.0" lower="-6.28318530718" upper="6.28318530718" velocity="3.15"/>
93
+ <dynamics damping="10.0" friction="0.0"/>
94
+ </joint> -->
95
+
96
+
97
+ </robot>
98
+