helper2424 commited on
Commit
a23f79c
·
verified ·
1 Parent(s): 999924a

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. checkpoints/000100/pretrained_model/config.json +72 -0
  2. checkpoints/000100/pretrained_model/model.safetensors +3 -0
  3. checkpoints/000100/pretrained_model/train_config.json +184 -0
  4. checkpoints/000100/training_state/optimizer_param_groups.json +803 -0
  5. checkpoints/000100/training_state/optimizer_state.safetensors +3 -0
  6. checkpoints/000100/training_state/rng_state.safetensors +3 -0
  7. checkpoints/000100/training_state/scheduler_state.json +15 -0
  8. checkpoints/000100/training_state/training_step.json +3 -0
  9. checkpoints/000200/pretrained_model/config.json +72 -0
  10. checkpoints/000200/pretrained_model/model.safetensors +3 -0
  11. checkpoints/000200/pretrained_model/train_config.json +184 -0
  12. checkpoints/000200/training_state/optimizer_param_groups.json +803 -0
  13. checkpoints/000200/training_state/optimizer_state.safetensors +3 -0
  14. checkpoints/000200/training_state/rng_state.safetensors +3 -0
  15. checkpoints/000200/training_state/scheduler_state.json +15 -0
  16. checkpoints/000200/training_state/training_step.json +3 -0
  17. checkpoints/000300/pretrained_model/config.json +72 -0
  18. checkpoints/000300/pretrained_model/model.safetensors +3 -0
  19. checkpoints/000300/pretrained_model/train_config.json +184 -0
  20. checkpoints/000300/training_state/optimizer_param_groups.json +803 -0
  21. checkpoints/000300/training_state/optimizer_state.safetensors +3 -0
  22. checkpoints/000300/training_state/rng_state.safetensors +3 -0
  23. checkpoints/000300/training_state/scheduler_state.json +15 -0
  24. checkpoints/000300/training_state/training_step.json +3 -0
  25. checkpoints/000400/pretrained_model/config.json +72 -0
  26. checkpoints/000400/pretrained_model/model.safetensors +3 -0
  27. checkpoints/000400/pretrained_model/train_config.json +184 -0
  28. checkpoints/000400/training_state/optimizer_param_groups.json +803 -0
  29. checkpoints/000400/training_state/optimizer_state.safetensors +3 -0
  30. checkpoints/000400/training_state/rng_state.safetensors +3 -0
  31. checkpoints/000400/training_state/scheduler_state.json +15 -0
  32. checkpoints/000400/training_state/training_step.json +3 -0
  33. checkpoints/000500/pretrained_model/config.json +72 -0
  34. checkpoints/000500/pretrained_model/model.safetensors +3 -0
  35. checkpoints/000500/pretrained_model/train_config.json +184 -0
  36. checkpoints/000500/training_state/optimizer_param_groups.json +803 -0
  37. checkpoints/000500/training_state/optimizer_state.safetensors +3 -0
  38. checkpoints/000500/training_state/rng_state.safetensors +3 -0
  39. checkpoints/000500/training_state/scheduler_state.json +15 -0
  40. checkpoints/000500/training_state/training_step.json +3 -0
  41. checkpoints/000600/pretrained_model/config.json +72 -0
  42. checkpoints/000600/pretrained_model/model.safetensors +3 -0
  43. checkpoints/000600/pretrained_model/train_config.json +184 -0
  44. checkpoints/000600/training_state/rng_state.safetensors +3 -0
  45. checkpoints/000600/training_state/training_step.json +3 -0
  46. wandb/debug-internal.log +7 -0
  47. wandb/debug.log +23 -0
  48. wandb/run-20250412_213619-shity_version/files/config.yaml +175 -0
  49. wandb/run-20250412_213619-shity_version/files/output.log +75 -0
  50. wandb/run-20250412_213619-shity_version/files/requirements.txt +245 -0
checkpoints/000100/pretrained_model/config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi0",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.images.gripper": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 480,
21
+ 640
22
+ ]
23
+ },
24
+ "observation.images.webcam": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 480,
29
+ 640
30
+ ]
31
+ }
32
+ },
33
+ "output_features": {
34
+ "action": {
35
+ "type": "ACTION",
36
+ "shape": [
37
+ 6
38
+ ]
39
+ }
40
+ },
41
+ "device": "cuda",
42
+ "use_amp": false,
43
+ "chunk_size": 50,
44
+ "n_action_steps": 50,
45
+ "max_state_dim": 32,
46
+ "max_action_dim": 32,
47
+ "resize_imgs_with_padding": [
48
+ 224,
49
+ 224
50
+ ],
51
+ "empty_cameras": 0,
52
+ "adapt_to_pi_aloha": false,
53
+ "use_delta_joint_actions_aloha": false,
54
+ "tokenizer_max_length": 48,
55
+ "proj_width": 1024,
56
+ "num_steps": 10,
57
+ "use_cache": true,
58
+ "attention_implementation": "eager",
59
+ "freeze_vision_encoder": true,
60
+ "train_expert_only": false,
61
+ "train_state_proj": true,
62
+ "optimizer_lr": 2.5e-05,
63
+ "optimizer_betas": [
64
+ 0.9,
65
+ 0.95
66
+ ],
67
+ "optimizer_eps": 1e-08,
68
+ "optimizer_weight_decay": 1e-10,
69
+ "scheduler_warmup_steps": 1000,
70
+ "scheduler_decay_steps": 30000,
71
+ "scheduler_decay_lr": 2.5e-06
72
+ }
checkpoints/000100/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1caa4a8ce1a72347a568aee5be82b8a9de987b56d8a76bde783310d8daf6f21e
3
+ size 7536022544
checkpoints/000100/pretrained_model/train_config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "maelic/hackathon7",
4
+ "root": null,
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ }
61
+ }
62
+ },
63
+ "revision": null,
64
+ "use_imagenet_stats": true,
65
+ "video_backend": "torchcodec"
66
+ },
67
+ "env": null,
68
+ "policy": {
69
+ "type": "pi0",
70
+ "n_obs_steps": 1,
71
+ "normalization_mapping": {
72
+ "VISUAL": "IDENTITY",
73
+ "STATE": "MEAN_STD",
74
+ "ACTION": "MEAN_STD"
75
+ },
76
+ "input_features": {
77
+ "observation.state": {
78
+ "type": "STATE",
79
+ "shape": [
80
+ 6
81
+ ]
82
+ },
83
+ "observation.images.gripper": {
84
+ "type": "VISUAL",
85
+ "shape": [
86
+ 3,
87
+ 480,
88
+ 640
89
+ ]
90
+ },
91
+ "observation.images.webcam": {
92
+ "type": "VISUAL",
93
+ "shape": [
94
+ 3,
95
+ 480,
96
+ 640
97
+ ]
98
+ }
99
+ },
100
+ "output_features": {
101
+ "action": {
102
+ "type": "ACTION",
103
+ "shape": [
104
+ 6
105
+ ]
106
+ }
107
+ },
108
+ "device": "cuda",
109
+ "use_amp": false,
110
+ "chunk_size": 50,
111
+ "n_action_steps": 50,
112
+ "max_state_dim": 32,
113
+ "max_action_dim": 32,
114
+ "resize_imgs_with_padding": [
115
+ 224,
116
+ 224
117
+ ],
118
+ "empty_cameras": 0,
119
+ "adapt_to_pi_aloha": false,
120
+ "use_delta_joint_actions_aloha": false,
121
+ "tokenizer_max_length": 48,
122
+ "proj_width": 1024,
123
+ "num_steps": 10,
124
+ "use_cache": true,
125
+ "attention_implementation": "eager",
126
+ "freeze_vision_encoder": true,
127
+ "train_expert_only": false,
128
+ "train_state_proj": true,
129
+ "optimizer_lr": 2.5e-05,
130
+ "optimizer_betas": [
131
+ 0.9,
132
+ 0.95
133
+ ],
134
+ "optimizer_eps": 1e-08,
135
+ "optimizer_weight_decay": 1e-10,
136
+ "scheduler_warmup_steps": 1000,
137
+ "scheduler_decay_steps": 30000,
138
+ "scheduler_decay_lr": 2.5e-06
139
+ },
140
+ "output_dir": "/scratch/train/pi_shity_version",
141
+ "job_name": "pi0",
142
+ "resume": false,
143
+ "seed": 1000,
144
+ "num_workers": 4,
145
+ "batch_size": 20,
146
+ "steps": 100000,
147
+ "eval_freq": 100,
148
+ "log_freq": 200,
149
+ "save_checkpoint": true,
150
+ "save_freq": 100,
151
+ "use_policy_training_preset": true,
152
+ "optimizer": {
153
+ "type": "adamw",
154
+ "lr": 2.5e-05,
155
+ "weight_decay": 1e-10,
156
+ "grad_clip_norm": 10.0,
157
+ "betas": [
158
+ 0.9,
159
+ 0.95
160
+ ],
161
+ "eps": 1e-08
162
+ },
163
+ "scheduler": {
164
+ "type": "cosine_decay_with_warmup",
165
+ "num_warmup_steps": 1000,
166
+ "num_decay_steps": 30000,
167
+ "peak_lr": 2.5e-05,
168
+ "decay_lr": 2.5e-06
169
+ },
170
+ "eval": {
171
+ "n_episodes": 50,
172
+ "batch_size": 50,
173
+ "use_async_envs": false
174
+ },
175
+ "wandb": {
176
+ "enable": true,
177
+ "disable_artifact": false,
178
+ "project": "pi0_mistral_hackathon",
179
+ "entity": null,
180
+ "notes": null,
181
+ "run_id": "shity_version",
182
+ "mode": null
183
+ }
184
+ }
checkpoints/000100/training_state/optimizer_param_groups.json ADDED
@@ -0,0 +1,803 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "lr": 2.5224775224775203e-06,
4
+ "betas": [
5
+ 0.9,
6
+ 0.95
7
+ ],
8
+ "eps": 1e-08,
9
+ "weight_decay": 1e-10,
10
+ "amsgrad": false,
11
+ "foreach": null,
12
+ "maximize": false,
13
+ "capturable": false,
14
+ "differentiable": false,
15
+ "fused": null,
16
+ "initial_lr": 2.5e-05,
17
+ "params": [
18
+ 0,
19
+ 1,
20
+ 2,
21
+ 3,
22
+ 4,
23
+ 5,
24
+ 6,
25
+ 7,
26
+ 8,
27
+ 9,
28
+ 10,
29
+ 11,
30
+ 12,
31
+ 13,
32
+ 14,
33
+ 15,
34
+ 16,
35
+ 17,
36
+ 18,
37
+ 19,
38
+ 20,
39
+ 21,
40
+ 22,
41
+ 23,
42
+ 24,
43
+ 25,
44
+ 26,
45
+ 27,
46
+ 28,
47
+ 29,
48
+ 30,
49
+ 31,
50
+ 32,
51
+ 33,
52
+ 34,
53
+ 35,
54
+ 36,
55
+ 37,
56
+ 38,
57
+ 39,
58
+ 40,
59
+ 41,
60
+ 42,
61
+ 43,
62
+ 44,
63
+ 45,
64
+ 46,
65
+ 47,
66
+ 48,
67
+ 49,
68
+ 50,
69
+ 51,
70
+ 52,
71
+ 53,
72
+ 54,
73
+ 55,
74
+ 56,
75
+ 57,
76
+ 58,
77
+ 59,
78
+ 60,
79
+ 61,
80
+ 62,
81
+ 63,
82
+ 64,
83
+ 65,
84
+ 66,
85
+ 67,
86
+ 68,
87
+ 69,
88
+ 70,
89
+ 71,
90
+ 72,
91
+ 73,
92
+ 74,
93
+ 75,
94
+ 76,
95
+ 77,
96
+ 78,
97
+ 79,
98
+ 80,
99
+ 81,
100
+ 82,
101
+ 83,
102
+ 84,
103
+ 85,
104
+ 86,
105
+ 87,
106
+ 88,
107
+ 89,
108
+ 90,
109
+ 91,
110
+ 92,
111
+ 93,
112
+ 94,
113
+ 95,
114
+ 96,
115
+ 97,
116
+ 98,
117
+ 99,
118
+ 100,
119
+ 101,
120
+ 102,
121
+ 103,
122
+ 104,
123
+ 105,
124
+ 106,
125
+ 107,
126
+ 108,
127
+ 109,
128
+ 110,
129
+ 111,
130
+ 112,
131
+ 113,
132
+ 114,
133
+ 115,
134
+ 116,
135
+ 117,
136
+ 118,
137
+ 119,
138
+ 120,
139
+ 121,
140
+ 122,
141
+ 123,
142
+ 124,
143
+ 125,
144
+ 126,
145
+ 127,
146
+ 128,
147
+ 129,
148
+ 130,
149
+ 131,
150
+ 132,
151
+ 133,
152
+ 134,
153
+ 135,
154
+ 136,
155
+ 137,
156
+ 138,
157
+ 139,
158
+ 140,
159
+ 141,
160
+ 142,
161
+ 143,
162
+ 144,
163
+ 145,
164
+ 146,
165
+ 147,
166
+ 148,
167
+ 149,
168
+ 150,
169
+ 151,
170
+ 152,
171
+ 153,
172
+ 154,
173
+ 155,
174
+ 156,
175
+ 157,
176
+ 158,
177
+ 159,
178
+ 160,
179
+ 161,
180
+ 162,
181
+ 163,
182
+ 164,
183
+ 165,
184
+ 166,
185
+ 167,
186
+ 168,
187
+ 169,
188
+ 170,
189
+ 171,
190
+ 172,
191
+ 173,
192
+ 174,
193
+ 175,
194
+ 176,
195
+ 177,
196
+ 178,
197
+ 179,
198
+ 180,
199
+ 181,
200
+ 182,
201
+ 183,
202
+ 184,
203
+ 185,
204
+ 186,
205
+ 187,
206
+ 188,
207
+ 189,
208
+ 190,
209
+ 191,
210
+ 192,
211
+ 193,
212
+ 194,
213
+ 195,
214
+ 196,
215
+ 197,
216
+ 198,
217
+ 199,
218
+ 200,
219
+ 201,
220
+ 202,
221
+ 203,
222
+ 204,
223
+ 205,
224
+ 206,
225
+ 207,
226
+ 208,
227
+ 209,
228
+ 210,
229
+ 211,
230
+ 212,
231
+ 213,
232
+ 214,
233
+ 215,
234
+ 216,
235
+ 217,
236
+ 218,
237
+ 219,
238
+ 220,
239
+ 221,
240
+ 222,
241
+ 223,
242
+ 224,
243
+ 225,
244
+ 226,
245
+ 227,
246
+ 228,
247
+ 229,
248
+ 230,
249
+ 231,
250
+ 232,
251
+ 233,
252
+ 234,
253
+ 235,
254
+ 236,
255
+ 237,
256
+ 238,
257
+ 239,
258
+ 240,
259
+ 241,
260
+ 242,
261
+ 243,
262
+ 244,
263
+ 245,
264
+ 246,
265
+ 247,
266
+ 248,
267
+ 249,
268
+ 250,
269
+ 251,
270
+ 252,
271
+ 253,
272
+ 254,
273
+ 255,
274
+ 256,
275
+ 257,
276
+ 258,
277
+ 259,
278
+ 260,
279
+ 261,
280
+ 262,
281
+ 263,
282
+ 264,
283
+ 265,
284
+ 266,
285
+ 267,
286
+ 268,
287
+ 269,
288
+ 270,
289
+ 271,
290
+ 272,
291
+ 273,
292
+ 274,
293
+ 275,
294
+ 276,
295
+ 277,
296
+ 278,
297
+ 279,
298
+ 280,
299
+ 281,
300
+ 282,
301
+ 283,
302
+ 284,
303
+ 285,
304
+ 286,
305
+ 287,
306
+ 288,
307
+ 289,
308
+ 290,
309
+ 291,
310
+ 292,
311
+ 293,
312
+ 294,
313
+ 295,
314
+ 296,
315
+ 297,
316
+ 298,
317
+ 299,
318
+ 300,
319
+ 301,
320
+ 302,
321
+ 303,
322
+ 304,
323
+ 305,
324
+ 306,
325
+ 307,
326
+ 308,
327
+ 309,
328
+ 310,
329
+ 311,
330
+ 312,
331
+ 313,
332
+ 314,
333
+ 315,
334
+ 316,
335
+ 317,
336
+ 318,
337
+ 319,
338
+ 320,
339
+ 321,
340
+ 322,
341
+ 323,
342
+ 324,
343
+ 325,
344
+ 326,
345
+ 327,
346
+ 328,
347
+ 329,
348
+ 330,
349
+ 331,
350
+ 332,
351
+ 333,
352
+ 334,
353
+ 335,
354
+ 336,
355
+ 337,
356
+ 338,
357
+ 339,
358
+ 340,
359
+ 341,
360
+ 342,
361
+ 343,
362
+ 344,
363
+ 345,
364
+ 346,
365
+ 347,
366
+ 348,
367
+ 349,
368
+ 350,
369
+ 351,
370
+ 352,
371
+ 353,
372
+ 354,
373
+ 355,
374
+ 356,
375
+ 357,
376
+ 358,
377
+ 359,
378
+ 360,
379
+ 361,
380
+ 362,
381
+ 363,
382
+ 364,
383
+ 365,
384
+ 366,
385
+ 367,
386
+ 368,
387
+ 369,
388
+ 370,
389
+ 371,
390
+ 372,
391
+ 373,
392
+ 374,
393
+ 375,
394
+ 376,
395
+ 377,
396
+ 378,
397
+ 379,
398
+ 380,
399
+ 381,
400
+ 382,
401
+ 383,
402
+ 384,
403
+ 385,
404
+ 386,
405
+ 387,
406
+ 388,
407
+ 389,
408
+ 390,
409
+ 391,
410
+ 392,
411
+ 393,
412
+ 394,
413
+ 395,
414
+ 396,
415
+ 397,
416
+ 398,
417
+ 399,
418
+ 400,
419
+ 401,
420
+ 402,
421
+ 403,
422
+ 404,
423
+ 405,
424
+ 406,
425
+ 407,
426
+ 408,
427
+ 409,
428
+ 410,
429
+ 411,
430
+ 412,
431
+ 413,
432
+ 414,
433
+ 415,
434
+ 416,
435
+ 417,
436
+ 418,
437
+ 419,
438
+ 420,
439
+ 421,
440
+ 422,
441
+ 423,
442
+ 424,
443
+ 425,
444
+ 426,
445
+ 427,
446
+ 428,
447
+ 429,
448
+ 430,
449
+ 431,
450
+ 432,
451
+ 433,
452
+ 434,
453
+ 435,
454
+ 436,
455
+ 437,
456
+ 438,
457
+ 439,
458
+ 440,
459
+ 441,
460
+ 442,
461
+ 443,
462
+ 444,
463
+ 445,
464
+ 446,
465
+ 447,
466
+ 448,
467
+ 449,
468
+ 450,
469
+ 451,
470
+ 452,
471
+ 453,
472
+ 454,
473
+ 455,
474
+ 456,
475
+ 457,
476
+ 458,
477
+ 459,
478
+ 460,
479
+ 461,
480
+ 462,
481
+ 463,
482
+ 464,
483
+ 465,
484
+ 466,
485
+ 467,
486
+ 468,
487
+ 469,
488
+ 470,
489
+ 471,
490
+ 472,
491
+ 473,
492
+ 474,
493
+ 475,
494
+ 476,
495
+ 477,
496
+ 478,
497
+ 479,
498
+ 480,
499
+ 481,
500
+ 482,
501
+ 483,
502
+ 484,
503
+ 485,
504
+ 486,
505
+ 487,
506
+ 488,
507
+ 489,
508
+ 490,
509
+ 491,
510
+ 492,
511
+ 493,
512
+ 494,
513
+ 495,
514
+ 496,
515
+ 497,
516
+ 498,
517
+ 499,
518
+ 500,
519
+ 501,
520
+ 502,
521
+ 503,
522
+ 504,
523
+ 505,
524
+ 506,
525
+ 507,
526
+ 508,
527
+ 509,
528
+ 510,
529
+ 511,
530
+ 512,
531
+ 513,
532
+ 514,
533
+ 515,
534
+ 516,
535
+ 517,
536
+ 518,
537
+ 519,
538
+ 520,
539
+ 521,
540
+ 522,
541
+ 523,
542
+ 524,
543
+ 525,
544
+ 526,
545
+ 527,
546
+ 528,
547
+ 529,
548
+ 530,
549
+ 531,
550
+ 532,
551
+ 533,
552
+ 534,
553
+ 535,
554
+ 536,
555
+ 537,
556
+ 538,
557
+ 539,
558
+ 540,
559
+ 541,
560
+ 542,
561
+ 543,
562
+ 544,
563
+ 545,
564
+ 546,
565
+ 547,
566
+ 548,
567
+ 549,
568
+ 550,
569
+ 551,
570
+ 552,
571
+ 553,
572
+ 554,
573
+ 555,
574
+ 556,
575
+ 557,
576
+ 558,
577
+ 559,
578
+ 560,
579
+ 561,
580
+ 562,
581
+ 563,
582
+ 564,
583
+ 565,
584
+ 566,
585
+ 567,
586
+ 568,
587
+ 569,
588
+ 570,
589
+ 571,
590
+ 572,
591
+ 573,
592
+ 574,
593
+ 575,
594
+ 576,
595
+ 577,
596
+ 578,
597
+ 579,
598
+ 580,
599
+ 581,
600
+ 582,
601
+ 583,
602
+ 584,
603
+ 585,
604
+ 586,
605
+ 587,
606
+ 588,
607
+ 589,
608
+ 590,
609
+ 591,
610
+ 592,
611
+ 593,
612
+ 594,
613
+ 595,
614
+ 596,
615
+ 597,
616
+ 598,
617
+ 599,
618
+ 600,
619
+ 601,
620
+ 602,
621
+ 603,
622
+ 604,
623
+ 605,
624
+ 606,
625
+ 607,
626
+ 608,
627
+ 609,
628
+ 610,
629
+ 611,
630
+ 612,
631
+ 613,
632
+ 614,
633
+ 615,
634
+ 616,
635
+ 617,
636
+ 618,
637
+ 619,
638
+ 620,
639
+ 621,
640
+ 622,
641
+ 623,
642
+ 624,
643
+ 625,
644
+ 626,
645
+ 627,
646
+ 628,
647
+ 629,
648
+ 630,
649
+ 631,
650
+ 632,
651
+ 633,
652
+ 634,
653
+ 635,
654
+ 636,
655
+ 637,
656
+ 638,
657
+ 639,
658
+ 640,
659
+ 641,
660
+ 642,
661
+ 643,
662
+ 644,
663
+ 645,
664
+ 646,
665
+ 647,
666
+ 648,
667
+ 649,
668
+ 650,
669
+ 651,
670
+ 652,
671
+ 653,
672
+ 654,
673
+ 655,
674
+ 656,
675
+ 657,
676
+ 658,
677
+ 659,
678
+ 660,
679
+ 661,
680
+ 662,
681
+ 663,
682
+ 664,
683
+ 665,
684
+ 666,
685
+ 667,
686
+ 668,
687
+ 669,
688
+ 670,
689
+ 671,
690
+ 672,
691
+ 673,
692
+ 674,
693
+ 675,
694
+ 676,
695
+ 677,
696
+ 678,
697
+ 679,
698
+ 680,
699
+ 681,
700
+ 682,
701
+ 683,
702
+ 684,
703
+ 685,
704
+ 686,
705
+ 687,
706
+ 688,
707
+ 689,
708
+ 690,
709
+ 691,
710
+ 692,
711
+ 693,
712
+ 694,
713
+ 695,
714
+ 696,
715
+ 697,
716
+ 698,
717
+ 699,
718
+ 700,
719
+ 701,
720
+ 702,
721
+ 703,
722
+ 704,
723
+ 705,
724
+ 706,
725
+ 707,
726
+ 708,
727
+ 709,
728
+ 710,
729
+ 711,
730
+ 712,
731
+ 713,
732
+ 714,
733
+ 715,
734
+ 716,
735
+ 717,
736
+ 718,
737
+ 719,
738
+ 720,
739
+ 721,
740
+ 722,
741
+ 723,
742
+ 724,
743
+ 725,
744
+ 726,
745
+ 727,
746
+ 728,
747
+ 729,
748
+ 730,
749
+ 731,
750
+ 732,
751
+ 733,
752
+ 734,
753
+ 735,
754
+ 736,
755
+ 737,
756
+ 738,
757
+ 739,
758
+ 740,
759
+ 741,
760
+ 742,
761
+ 743,
762
+ 744,
763
+ 745,
764
+ 746,
765
+ 747,
766
+ 748,
767
+ 749,
768
+ 750,
769
+ 751,
770
+ 752,
771
+ 753,
772
+ 754,
773
+ 755,
774
+ 756,
775
+ 757,
776
+ 758,
777
+ 759,
778
+ 760,
779
+ 761,
780
+ 762,
781
+ 763,
782
+ 764,
783
+ 765,
784
+ 766,
785
+ 767,
786
+ 768,
787
+ 769,
788
+ 770,
789
+ 771,
790
+ 772,
791
+ 773,
792
+ 774,
793
+ 775,
794
+ 776,
795
+ 777,
796
+ 778,
797
+ 779,
798
+ 780,
799
+ 781,
800
+ 782
801
+ ]
802
+ }
803
+ ]
checkpoints/000100/training_state/optimizer_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d5f93cd450bd32b7259e359a31b76b796830dcfa4edb6e15c94be9e35c4f540
3
+ size 10896063516
checkpoints/000100/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca8021aba17e321480ce6967b04e6ba9bc485bd9b6ab1e626759e196a9c20e73
3
+ size 15708
checkpoints/000100/training_state/scheduler_state.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_lrs": [
3
+ 2.5e-05
4
+ ],
5
+ "last_epoch": 100,
6
+ "verbose": false,
7
+ "_step_count": 101,
8
+ "_get_lr_called_within_step": false,
9
+ "_last_lr": [
10
+ 2.5224775224775203e-06
11
+ ],
12
+ "lr_lambdas": [
13
+ null
14
+ ]
15
+ }
checkpoints/000100/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 100
3
+ }
checkpoints/000200/pretrained_model/config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi0",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.images.gripper": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 480,
21
+ 640
22
+ ]
23
+ },
24
+ "observation.images.webcam": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 480,
29
+ 640
30
+ ]
31
+ }
32
+ },
33
+ "output_features": {
34
+ "action": {
35
+ "type": "ACTION",
36
+ "shape": [
37
+ 6
38
+ ]
39
+ }
40
+ },
41
+ "device": "cuda",
42
+ "use_amp": false,
43
+ "chunk_size": 50,
44
+ "n_action_steps": 50,
45
+ "max_state_dim": 32,
46
+ "max_action_dim": 32,
47
+ "resize_imgs_with_padding": [
48
+ 224,
49
+ 224
50
+ ],
51
+ "empty_cameras": 0,
52
+ "adapt_to_pi_aloha": false,
53
+ "use_delta_joint_actions_aloha": false,
54
+ "tokenizer_max_length": 48,
55
+ "proj_width": 1024,
56
+ "num_steps": 10,
57
+ "use_cache": true,
58
+ "attention_implementation": "eager",
59
+ "freeze_vision_encoder": true,
60
+ "train_expert_only": false,
61
+ "train_state_proj": true,
62
+ "optimizer_lr": 2.5e-05,
63
+ "optimizer_betas": [
64
+ 0.9,
65
+ 0.95
66
+ ],
67
+ "optimizer_eps": 1e-08,
68
+ "optimizer_weight_decay": 1e-10,
69
+ "scheduler_warmup_steps": 1000,
70
+ "scheduler_decay_steps": 30000,
71
+ "scheduler_decay_lr": 2.5e-06
72
+ }
checkpoints/000200/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba1226f37c19f01ce308a286bd5daf7ba1760bd1030913455df96c63c152d069
3
+ size 7536022544
checkpoints/000200/pretrained_model/train_config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "maelic/hackathon7",
4
+ "root": null,
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ }
61
+ }
62
+ },
63
+ "revision": null,
64
+ "use_imagenet_stats": true,
65
+ "video_backend": "torchcodec"
66
+ },
67
+ "env": null,
68
+ "policy": {
69
+ "type": "pi0",
70
+ "n_obs_steps": 1,
71
+ "normalization_mapping": {
72
+ "VISUAL": "IDENTITY",
73
+ "STATE": "MEAN_STD",
74
+ "ACTION": "MEAN_STD"
75
+ },
76
+ "input_features": {
77
+ "observation.state": {
78
+ "type": "STATE",
79
+ "shape": [
80
+ 6
81
+ ]
82
+ },
83
+ "observation.images.gripper": {
84
+ "type": "VISUAL",
85
+ "shape": [
86
+ 3,
87
+ 480,
88
+ 640
89
+ ]
90
+ },
91
+ "observation.images.webcam": {
92
+ "type": "VISUAL",
93
+ "shape": [
94
+ 3,
95
+ 480,
96
+ 640
97
+ ]
98
+ }
99
+ },
100
+ "output_features": {
101
+ "action": {
102
+ "type": "ACTION",
103
+ "shape": [
104
+ 6
105
+ ]
106
+ }
107
+ },
108
+ "device": "cuda",
109
+ "use_amp": false,
110
+ "chunk_size": 50,
111
+ "n_action_steps": 50,
112
+ "max_state_dim": 32,
113
+ "max_action_dim": 32,
114
+ "resize_imgs_with_padding": [
115
+ 224,
116
+ 224
117
+ ],
118
+ "empty_cameras": 0,
119
+ "adapt_to_pi_aloha": false,
120
+ "use_delta_joint_actions_aloha": false,
121
+ "tokenizer_max_length": 48,
122
+ "proj_width": 1024,
123
+ "num_steps": 10,
124
+ "use_cache": true,
125
+ "attention_implementation": "eager",
126
+ "freeze_vision_encoder": true,
127
+ "train_expert_only": false,
128
+ "train_state_proj": true,
129
+ "optimizer_lr": 2.5e-05,
130
+ "optimizer_betas": [
131
+ 0.9,
132
+ 0.95
133
+ ],
134
+ "optimizer_eps": 1e-08,
135
+ "optimizer_weight_decay": 1e-10,
136
+ "scheduler_warmup_steps": 1000,
137
+ "scheduler_decay_steps": 30000,
138
+ "scheduler_decay_lr": 2.5e-06
139
+ },
140
+ "output_dir": "/scratch/train/pi_shity_version",
141
+ "job_name": "pi0",
142
+ "resume": false,
143
+ "seed": 1000,
144
+ "num_workers": 4,
145
+ "batch_size": 20,
146
+ "steps": 100000,
147
+ "eval_freq": 100,
148
+ "log_freq": 200,
149
+ "save_checkpoint": true,
150
+ "save_freq": 100,
151
+ "use_policy_training_preset": true,
152
+ "optimizer": {
153
+ "type": "adamw",
154
+ "lr": 2.5e-05,
155
+ "weight_decay": 1e-10,
156
+ "grad_clip_norm": 10.0,
157
+ "betas": [
158
+ 0.9,
159
+ 0.95
160
+ ],
161
+ "eps": 1e-08
162
+ },
163
+ "scheduler": {
164
+ "type": "cosine_decay_with_warmup",
165
+ "num_warmup_steps": 1000,
166
+ "num_decay_steps": 30000,
167
+ "peak_lr": 2.5e-05,
168
+ "decay_lr": 2.5e-06
169
+ },
170
+ "eval": {
171
+ "n_episodes": 50,
172
+ "batch_size": 50,
173
+ "use_async_envs": false
174
+ },
175
+ "wandb": {
176
+ "enable": true,
177
+ "disable_artifact": false,
178
+ "project": "pi0_mistral_hackathon",
179
+ "entity": null,
180
+ "notes": null,
181
+ "run_id": "shity_version",
182
+ "mode": null
183
+ }
184
+ }
checkpoints/000200/training_state/optimizer_param_groups.json ADDED
@@ -0,0 +1,803 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "lr": 5.0199800199800195e-06,
4
+ "betas": [
5
+ 0.9,
6
+ 0.95
7
+ ],
8
+ "eps": 1e-08,
9
+ "weight_decay": 1e-10,
10
+ "amsgrad": false,
11
+ "foreach": null,
12
+ "maximize": false,
13
+ "capturable": false,
14
+ "differentiable": false,
15
+ "fused": null,
16
+ "initial_lr": 2.5e-05,
17
+ "params": [
18
+ 0,
19
+ 1,
20
+ 2,
21
+ 3,
22
+ 4,
23
+ 5,
24
+ 6,
25
+ 7,
26
+ 8,
27
+ 9,
28
+ 10,
29
+ 11,
30
+ 12,
31
+ 13,
32
+ 14,
33
+ 15,
34
+ 16,
35
+ 17,
36
+ 18,
37
+ 19,
38
+ 20,
39
+ 21,
40
+ 22,
41
+ 23,
42
+ 24,
43
+ 25,
44
+ 26,
45
+ 27,
46
+ 28,
47
+ 29,
48
+ 30,
49
+ 31,
50
+ 32,
51
+ 33,
52
+ 34,
53
+ 35,
54
+ 36,
55
+ 37,
56
+ 38,
57
+ 39,
58
+ 40,
59
+ 41,
60
+ 42,
61
+ 43,
62
+ 44,
63
+ 45,
64
+ 46,
65
+ 47,
66
+ 48,
67
+ 49,
68
+ 50,
69
+ 51,
70
+ 52,
71
+ 53,
72
+ 54,
73
+ 55,
74
+ 56,
75
+ 57,
76
+ 58,
77
+ 59,
78
+ 60,
79
+ 61,
80
+ 62,
81
+ 63,
82
+ 64,
83
+ 65,
84
+ 66,
85
+ 67,
86
+ 68,
87
+ 69,
88
+ 70,
89
+ 71,
90
+ 72,
91
+ 73,
92
+ 74,
93
+ 75,
94
+ 76,
95
+ 77,
96
+ 78,
97
+ 79,
98
+ 80,
99
+ 81,
100
+ 82,
101
+ 83,
102
+ 84,
103
+ 85,
104
+ 86,
105
+ 87,
106
+ 88,
107
+ 89,
108
+ 90,
109
+ 91,
110
+ 92,
111
+ 93,
112
+ 94,
113
+ 95,
114
+ 96,
115
+ 97,
116
+ 98,
117
+ 99,
118
+ 100,
119
+ 101,
120
+ 102,
121
+ 103,
122
+ 104,
123
+ 105,
124
+ 106,
125
+ 107,
126
+ 108,
127
+ 109,
128
+ 110,
129
+ 111,
130
+ 112,
131
+ 113,
132
+ 114,
133
+ 115,
134
+ 116,
135
+ 117,
136
+ 118,
137
+ 119,
138
+ 120,
139
+ 121,
140
+ 122,
141
+ 123,
142
+ 124,
143
+ 125,
144
+ 126,
145
+ 127,
146
+ 128,
147
+ 129,
148
+ 130,
149
+ 131,
150
+ 132,
151
+ 133,
152
+ 134,
153
+ 135,
154
+ 136,
155
+ 137,
156
+ 138,
157
+ 139,
158
+ 140,
159
+ 141,
160
+ 142,
161
+ 143,
162
+ 144,
163
+ 145,
164
+ 146,
165
+ 147,
166
+ 148,
167
+ 149,
168
+ 150,
169
+ 151,
170
+ 152,
171
+ 153,
172
+ 154,
173
+ 155,
174
+ 156,
175
+ 157,
176
+ 158,
177
+ 159,
178
+ 160,
179
+ 161,
180
+ 162,
181
+ 163,
182
+ 164,
183
+ 165,
184
+ 166,
185
+ 167,
186
+ 168,
187
+ 169,
188
+ 170,
189
+ 171,
190
+ 172,
191
+ 173,
192
+ 174,
193
+ 175,
194
+ 176,
195
+ 177,
196
+ 178,
197
+ 179,
198
+ 180,
199
+ 181,
200
+ 182,
201
+ 183,
202
+ 184,
203
+ 185,
204
+ 186,
205
+ 187,
206
+ 188,
207
+ 189,
208
+ 190,
209
+ 191,
210
+ 192,
211
+ 193,
212
+ 194,
213
+ 195,
214
+ 196,
215
+ 197,
216
+ 198,
217
+ 199,
218
+ 200,
219
+ 201,
220
+ 202,
221
+ 203,
222
+ 204,
223
+ 205,
224
+ 206,
225
+ 207,
226
+ 208,
227
+ 209,
228
+ 210,
229
+ 211,
230
+ 212,
231
+ 213,
232
+ 214,
233
+ 215,
234
+ 216,
235
+ 217,
236
+ 218,
237
+ 219,
238
+ 220,
239
+ 221,
240
+ 222,
241
+ 223,
242
+ 224,
243
+ 225,
244
+ 226,
245
+ 227,
246
+ 228,
247
+ 229,
248
+ 230,
249
+ 231,
250
+ 232,
251
+ 233,
252
+ 234,
253
+ 235,
254
+ 236,
255
+ 237,
256
+ 238,
257
+ 239,
258
+ 240,
259
+ 241,
260
+ 242,
261
+ 243,
262
+ 244,
263
+ 245,
264
+ 246,
265
+ 247,
266
+ 248,
267
+ 249,
268
+ 250,
269
+ 251,
270
+ 252,
271
+ 253,
272
+ 254,
273
+ 255,
274
+ 256,
275
+ 257,
276
+ 258,
277
+ 259,
278
+ 260,
279
+ 261,
280
+ 262,
281
+ 263,
282
+ 264,
283
+ 265,
284
+ 266,
285
+ 267,
286
+ 268,
287
+ 269,
288
+ 270,
289
+ 271,
290
+ 272,
291
+ 273,
292
+ 274,
293
+ 275,
294
+ 276,
295
+ 277,
296
+ 278,
297
+ 279,
298
+ 280,
299
+ 281,
300
+ 282,
301
+ 283,
302
+ 284,
303
+ 285,
304
+ 286,
305
+ 287,
306
+ 288,
307
+ 289,
308
+ 290,
309
+ 291,
310
+ 292,
311
+ 293,
312
+ 294,
313
+ 295,
314
+ 296,
315
+ 297,
316
+ 298,
317
+ 299,
318
+ 300,
319
+ 301,
320
+ 302,
321
+ 303,
322
+ 304,
323
+ 305,
324
+ 306,
325
+ 307,
326
+ 308,
327
+ 309,
328
+ 310,
329
+ 311,
330
+ 312,
331
+ 313,
332
+ 314,
333
+ 315,
334
+ 316,
335
+ 317,
336
+ 318,
337
+ 319,
338
+ 320,
339
+ 321,
340
+ 322,
341
+ 323,
342
+ 324,
343
+ 325,
344
+ 326,
345
+ 327,
346
+ 328,
347
+ 329,
348
+ 330,
349
+ 331,
350
+ 332,
351
+ 333,
352
+ 334,
353
+ 335,
354
+ 336,
355
+ 337,
356
+ 338,
357
+ 339,
358
+ 340,
359
+ 341,
360
+ 342,
361
+ 343,
362
+ 344,
363
+ 345,
364
+ 346,
365
+ 347,
366
+ 348,
367
+ 349,
368
+ 350,
369
+ 351,
370
+ 352,
371
+ 353,
372
+ 354,
373
+ 355,
374
+ 356,
375
+ 357,
376
+ 358,
377
+ 359,
378
+ 360,
379
+ 361,
380
+ 362,
381
+ 363,
382
+ 364,
383
+ 365,
384
+ 366,
385
+ 367,
386
+ 368,
387
+ 369,
388
+ 370,
389
+ 371,
390
+ 372,
391
+ 373,
392
+ 374,
393
+ 375,
394
+ 376,
395
+ 377,
396
+ 378,
397
+ 379,
398
+ 380,
399
+ 381,
400
+ 382,
401
+ 383,
402
+ 384,
403
+ 385,
404
+ 386,
405
+ 387,
406
+ 388,
407
+ 389,
408
+ 390,
409
+ 391,
410
+ 392,
411
+ 393,
412
+ 394,
413
+ 395,
414
+ 396,
415
+ 397,
416
+ 398,
417
+ 399,
418
+ 400,
419
+ 401,
420
+ 402,
421
+ 403,
422
+ 404,
423
+ 405,
424
+ 406,
425
+ 407,
426
+ 408,
427
+ 409,
428
+ 410,
429
+ 411,
430
+ 412,
431
+ 413,
432
+ 414,
433
+ 415,
434
+ 416,
435
+ 417,
436
+ 418,
437
+ 419,
438
+ 420,
439
+ 421,
440
+ 422,
441
+ 423,
442
+ 424,
443
+ 425,
444
+ 426,
445
+ 427,
446
+ 428,
447
+ 429,
448
+ 430,
449
+ 431,
450
+ 432,
451
+ 433,
452
+ 434,
453
+ 435,
454
+ 436,
455
+ 437,
456
+ 438,
457
+ 439,
458
+ 440,
459
+ 441,
460
+ 442,
461
+ 443,
462
+ 444,
463
+ 445,
464
+ 446,
465
+ 447,
466
+ 448,
467
+ 449,
468
+ 450,
469
+ 451,
470
+ 452,
471
+ 453,
472
+ 454,
473
+ 455,
474
+ 456,
475
+ 457,
476
+ 458,
477
+ 459,
478
+ 460,
479
+ 461,
480
+ 462,
481
+ 463,
482
+ 464,
483
+ 465,
484
+ 466,
485
+ 467,
486
+ 468,
487
+ 469,
488
+ 470,
489
+ 471,
490
+ 472,
491
+ 473,
492
+ 474,
493
+ 475,
494
+ 476,
495
+ 477,
496
+ 478,
497
+ 479,
498
+ 480,
499
+ 481,
500
+ 482,
501
+ 483,
502
+ 484,
503
+ 485,
504
+ 486,
505
+ 487,
506
+ 488,
507
+ 489,
508
+ 490,
509
+ 491,
510
+ 492,
511
+ 493,
512
+ 494,
513
+ 495,
514
+ 496,
515
+ 497,
516
+ 498,
517
+ 499,
518
+ 500,
519
+ 501,
520
+ 502,
521
+ 503,
522
+ 504,
523
+ 505,
524
+ 506,
525
+ 507,
526
+ 508,
527
+ 509,
528
+ 510,
529
+ 511,
530
+ 512,
531
+ 513,
532
+ 514,
533
+ 515,
534
+ 516,
535
+ 517,
536
+ 518,
537
+ 519,
538
+ 520,
539
+ 521,
540
+ 522,
541
+ 523,
542
+ 524,
543
+ 525,
544
+ 526,
545
+ 527,
546
+ 528,
547
+ 529,
548
+ 530,
549
+ 531,
550
+ 532,
551
+ 533,
552
+ 534,
553
+ 535,
554
+ 536,
555
+ 537,
556
+ 538,
557
+ 539,
558
+ 540,
559
+ 541,
560
+ 542,
561
+ 543,
562
+ 544,
563
+ 545,
564
+ 546,
565
+ 547,
566
+ 548,
567
+ 549,
568
+ 550,
569
+ 551,
570
+ 552,
571
+ 553,
572
+ 554,
573
+ 555,
574
+ 556,
575
+ 557,
576
+ 558,
577
+ 559,
578
+ 560,
579
+ 561,
580
+ 562,
581
+ 563,
582
+ 564,
583
+ 565,
584
+ 566,
585
+ 567,
586
+ 568,
587
+ 569,
588
+ 570,
589
+ 571,
590
+ 572,
591
+ 573,
592
+ 574,
593
+ 575,
594
+ 576,
595
+ 577,
596
+ 578,
597
+ 579,
598
+ 580,
599
+ 581,
600
+ 582,
601
+ 583,
602
+ 584,
603
+ 585,
604
+ 586,
605
+ 587,
606
+ 588,
607
+ 589,
608
+ 590,
609
+ 591,
610
+ 592,
611
+ 593,
612
+ 594,
613
+ 595,
614
+ 596,
615
+ 597,
616
+ 598,
617
+ 599,
618
+ 600,
619
+ 601,
620
+ 602,
621
+ 603,
622
+ 604,
623
+ 605,
624
+ 606,
625
+ 607,
626
+ 608,
627
+ 609,
628
+ 610,
629
+ 611,
630
+ 612,
631
+ 613,
632
+ 614,
633
+ 615,
634
+ 616,
635
+ 617,
636
+ 618,
637
+ 619,
638
+ 620,
639
+ 621,
640
+ 622,
641
+ 623,
642
+ 624,
643
+ 625,
644
+ 626,
645
+ 627,
646
+ 628,
647
+ 629,
648
+ 630,
649
+ 631,
650
+ 632,
651
+ 633,
652
+ 634,
653
+ 635,
654
+ 636,
655
+ 637,
656
+ 638,
657
+ 639,
658
+ 640,
659
+ 641,
660
+ 642,
661
+ 643,
662
+ 644,
663
+ 645,
664
+ 646,
665
+ 647,
666
+ 648,
667
+ 649,
668
+ 650,
669
+ 651,
670
+ 652,
671
+ 653,
672
+ 654,
673
+ 655,
674
+ 656,
675
+ 657,
676
+ 658,
677
+ 659,
678
+ 660,
679
+ 661,
680
+ 662,
681
+ 663,
682
+ 664,
683
+ 665,
684
+ 666,
685
+ 667,
686
+ 668,
687
+ 669,
688
+ 670,
689
+ 671,
690
+ 672,
691
+ 673,
692
+ 674,
693
+ 675,
694
+ 676,
695
+ 677,
696
+ 678,
697
+ 679,
698
+ 680,
699
+ 681,
700
+ 682,
701
+ 683,
702
+ 684,
703
+ 685,
704
+ 686,
705
+ 687,
706
+ 688,
707
+ 689,
708
+ 690,
709
+ 691,
710
+ 692,
711
+ 693,
712
+ 694,
713
+ 695,
714
+ 696,
715
+ 697,
716
+ 698,
717
+ 699,
718
+ 700,
719
+ 701,
720
+ 702,
721
+ 703,
722
+ 704,
723
+ 705,
724
+ 706,
725
+ 707,
726
+ 708,
727
+ 709,
728
+ 710,
729
+ 711,
730
+ 712,
731
+ 713,
732
+ 714,
733
+ 715,
734
+ 716,
735
+ 717,
736
+ 718,
737
+ 719,
738
+ 720,
739
+ 721,
740
+ 722,
741
+ 723,
742
+ 724,
743
+ 725,
744
+ 726,
745
+ 727,
746
+ 728,
747
+ 729,
748
+ 730,
749
+ 731,
750
+ 732,
751
+ 733,
752
+ 734,
753
+ 735,
754
+ 736,
755
+ 737,
756
+ 738,
757
+ 739,
758
+ 740,
759
+ 741,
760
+ 742,
761
+ 743,
762
+ 744,
763
+ 745,
764
+ 746,
765
+ 747,
766
+ 748,
767
+ 749,
768
+ 750,
769
+ 751,
770
+ 752,
771
+ 753,
772
+ 754,
773
+ 755,
774
+ 756,
775
+ 757,
776
+ 758,
777
+ 759,
778
+ 760,
779
+ 761,
780
+ 762,
781
+ 763,
782
+ 764,
783
+ 765,
784
+ 766,
785
+ 767,
786
+ 768,
787
+ 769,
788
+ 770,
789
+ 771,
790
+ 772,
791
+ 773,
792
+ 774,
793
+ 775,
794
+ 776,
795
+ 777,
796
+ 778,
797
+ 779,
798
+ 780,
799
+ 781,
800
+ 782
801
+ ]
802
+ }
803
+ ]
checkpoints/000200/training_state/optimizer_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdd7946065073cfc15fcadfdb18a6c52823ace248e63f04898e07bb4a504de19
3
+ size 10896063516
checkpoints/000200/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:184b214b2feea731bbad0502189e04546bee7137c89d7cf3edb66f6e065222ce
3
+ size 15708
checkpoints/000200/training_state/scheduler_state.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_lrs": [
3
+ 2.5e-05
4
+ ],
5
+ "last_epoch": 200,
6
+ "verbose": false,
7
+ "_step_count": 201,
8
+ "_get_lr_called_within_step": false,
9
+ "_last_lr": [
10
+ 5.0199800199800195e-06
11
+ ],
12
+ "lr_lambdas": [
13
+ null
14
+ ]
15
+ }
checkpoints/000200/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 200
3
+ }
checkpoints/000300/pretrained_model/config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi0",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.images.gripper": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 480,
21
+ 640
22
+ ]
23
+ },
24
+ "observation.images.webcam": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 480,
29
+ 640
30
+ ]
31
+ }
32
+ },
33
+ "output_features": {
34
+ "action": {
35
+ "type": "ACTION",
36
+ "shape": [
37
+ 6
38
+ ]
39
+ }
40
+ },
41
+ "device": "cuda",
42
+ "use_amp": false,
43
+ "chunk_size": 50,
44
+ "n_action_steps": 50,
45
+ "max_state_dim": 32,
46
+ "max_action_dim": 32,
47
+ "resize_imgs_with_padding": [
48
+ 224,
49
+ 224
50
+ ],
51
+ "empty_cameras": 0,
52
+ "adapt_to_pi_aloha": false,
53
+ "use_delta_joint_actions_aloha": false,
54
+ "tokenizer_max_length": 48,
55
+ "proj_width": 1024,
56
+ "num_steps": 10,
57
+ "use_cache": true,
58
+ "attention_implementation": "eager",
59
+ "freeze_vision_encoder": true,
60
+ "train_expert_only": false,
61
+ "train_state_proj": true,
62
+ "optimizer_lr": 2.5e-05,
63
+ "optimizer_betas": [
64
+ 0.9,
65
+ 0.95
66
+ ],
67
+ "optimizer_eps": 1e-08,
68
+ "optimizer_weight_decay": 1e-10,
69
+ "scheduler_warmup_steps": 1000,
70
+ "scheduler_decay_steps": 30000,
71
+ "scheduler_decay_lr": 2.5e-06
72
+ }
checkpoints/000300/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9c8be964aa757b69ece2c4c67b5972e36d6b0bd1dacf0ee3af709c0baa71846
3
+ size 7536022544
checkpoints/000300/pretrained_model/train_config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "maelic/hackathon7",
4
+ "root": null,
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ }
61
+ }
62
+ },
63
+ "revision": null,
64
+ "use_imagenet_stats": true,
65
+ "video_backend": "torchcodec"
66
+ },
67
+ "env": null,
68
+ "policy": {
69
+ "type": "pi0",
70
+ "n_obs_steps": 1,
71
+ "normalization_mapping": {
72
+ "VISUAL": "IDENTITY",
73
+ "STATE": "MEAN_STD",
74
+ "ACTION": "MEAN_STD"
75
+ },
76
+ "input_features": {
77
+ "observation.state": {
78
+ "type": "STATE",
79
+ "shape": [
80
+ 6
81
+ ]
82
+ },
83
+ "observation.images.gripper": {
84
+ "type": "VISUAL",
85
+ "shape": [
86
+ 3,
87
+ 480,
88
+ 640
89
+ ]
90
+ },
91
+ "observation.images.webcam": {
92
+ "type": "VISUAL",
93
+ "shape": [
94
+ 3,
95
+ 480,
96
+ 640
97
+ ]
98
+ }
99
+ },
100
+ "output_features": {
101
+ "action": {
102
+ "type": "ACTION",
103
+ "shape": [
104
+ 6
105
+ ]
106
+ }
107
+ },
108
+ "device": "cuda",
109
+ "use_amp": false,
110
+ "chunk_size": 50,
111
+ "n_action_steps": 50,
112
+ "max_state_dim": 32,
113
+ "max_action_dim": 32,
114
+ "resize_imgs_with_padding": [
115
+ 224,
116
+ 224
117
+ ],
118
+ "empty_cameras": 0,
119
+ "adapt_to_pi_aloha": false,
120
+ "use_delta_joint_actions_aloha": false,
121
+ "tokenizer_max_length": 48,
122
+ "proj_width": 1024,
123
+ "num_steps": 10,
124
+ "use_cache": true,
125
+ "attention_implementation": "eager",
126
+ "freeze_vision_encoder": true,
127
+ "train_expert_only": false,
128
+ "train_state_proj": true,
129
+ "optimizer_lr": 2.5e-05,
130
+ "optimizer_betas": [
131
+ 0.9,
132
+ 0.95
133
+ ],
134
+ "optimizer_eps": 1e-08,
135
+ "optimizer_weight_decay": 1e-10,
136
+ "scheduler_warmup_steps": 1000,
137
+ "scheduler_decay_steps": 30000,
138
+ "scheduler_decay_lr": 2.5e-06
139
+ },
140
+ "output_dir": "/scratch/train/pi_shity_version",
141
+ "job_name": "pi0",
142
+ "resume": false,
143
+ "seed": 1000,
144
+ "num_workers": 4,
145
+ "batch_size": 20,
146
+ "steps": 100000,
147
+ "eval_freq": 100,
148
+ "log_freq": 200,
149
+ "save_checkpoint": true,
150
+ "save_freq": 100,
151
+ "use_policy_training_preset": true,
152
+ "optimizer": {
153
+ "type": "adamw",
154
+ "lr": 2.5e-05,
155
+ "weight_decay": 1e-10,
156
+ "grad_clip_norm": 10.0,
157
+ "betas": [
158
+ 0.9,
159
+ 0.95
160
+ ],
161
+ "eps": 1e-08
162
+ },
163
+ "scheduler": {
164
+ "type": "cosine_decay_with_warmup",
165
+ "num_warmup_steps": 1000,
166
+ "num_decay_steps": 30000,
167
+ "peak_lr": 2.5e-05,
168
+ "decay_lr": 2.5e-06
169
+ },
170
+ "eval": {
171
+ "n_episodes": 50,
172
+ "batch_size": 50,
173
+ "use_async_envs": false
174
+ },
175
+ "wandb": {
176
+ "enable": true,
177
+ "disable_artifact": false,
178
+ "project": "pi0_mistral_hackathon",
179
+ "entity": null,
180
+ "notes": null,
181
+ "run_id": "shity_version",
182
+ "mode": null
183
+ }
184
+ }
checkpoints/000300/training_state/optimizer_param_groups.json ADDED
@@ -0,0 +1,803 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "lr": 7.517482517482519e-06,
4
+ "betas": [
5
+ 0.9,
6
+ 0.95
7
+ ],
8
+ "eps": 1e-08,
9
+ "weight_decay": 1e-10,
10
+ "amsgrad": false,
11
+ "foreach": null,
12
+ "maximize": false,
13
+ "capturable": false,
14
+ "differentiable": false,
15
+ "fused": null,
16
+ "initial_lr": 2.5e-05,
17
+ "params": [
18
+ 0,
19
+ 1,
20
+ 2,
21
+ 3,
22
+ 4,
23
+ 5,
24
+ 6,
25
+ 7,
26
+ 8,
27
+ 9,
28
+ 10,
29
+ 11,
30
+ 12,
31
+ 13,
32
+ 14,
33
+ 15,
34
+ 16,
35
+ 17,
36
+ 18,
37
+ 19,
38
+ 20,
39
+ 21,
40
+ 22,
41
+ 23,
42
+ 24,
43
+ 25,
44
+ 26,
45
+ 27,
46
+ 28,
47
+ 29,
48
+ 30,
49
+ 31,
50
+ 32,
51
+ 33,
52
+ 34,
53
+ 35,
54
+ 36,
55
+ 37,
56
+ 38,
57
+ 39,
58
+ 40,
59
+ 41,
60
+ 42,
61
+ 43,
62
+ 44,
63
+ 45,
64
+ 46,
65
+ 47,
66
+ 48,
67
+ 49,
68
+ 50,
69
+ 51,
70
+ 52,
71
+ 53,
72
+ 54,
73
+ 55,
74
+ 56,
75
+ 57,
76
+ 58,
77
+ 59,
78
+ 60,
79
+ 61,
80
+ 62,
81
+ 63,
82
+ 64,
83
+ 65,
84
+ 66,
85
+ 67,
86
+ 68,
87
+ 69,
88
+ 70,
89
+ 71,
90
+ 72,
91
+ 73,
92
+ 74,
93
+ 75,
94
+ 76,
95
+ 77,
96
+ 78,
97
+ 79,
98
+ 80,
99
+ 81,
100
+ 82,
101
+ 83,
102
+ 84,
103
+ 85,
104
+ 86,
105
+ 87,
106
+ 88,
107
+ 89,
108
+ 90,
109
+ 91,
110
+ 92,
111
+ 93,
112
+ 94,
113
+ 95,
114
+ 96,
115
+ 97,
116
+ 98,
117
+ 99,
118
+ 100,
119
+ 101,
120
+ 102,
121
+ 103,
122
+ 104,
123
+ 105,
124
+ 106,
125
+ 107,
126
+ 108,
127
+ 109,
128
+ 110,
129
+ 111,
130
+ 112,
131
+ 113,
132
+ 114,
133
+ 115,
134
+ 116,
135
+ 117,
136
+ 118,
137
+ 119,
138
+ 120,
139
+ 121,
140
+ 122,
141
+ 123,
142
+ 124,
143
+ 125,
144
+ 126,
145
+ 127,
146
+ 128,
147
+ 129,
148
+ 130,
149
+ 131,
150
+ 132,
151
+ 133,
152
+ 134,
153
+ 135,
154
+ 136,
155
+ 137,
156
+ 138,
157
+ 139,
158
+ 140,
159
+ 141,
160
+ 142,
161
+ 143,
162
+ 144,
163
+ 145,
164
+ 146,
165
+ 147,
166
+ 148,
167
+ 149,
168
+ 150,
169
+ 151,
170
+ 152,
171
+ 153,
172
+ 154,
173
+ 155,
174
+ 156,
175
+ 157,
176
+ 158,
177
+ 159,
178
+ 160,
179
+ 161,
180
+ 162,
181
+ 163,
182
+ 164,
183
+ 165,
184
+ 166,
185
+ 167,
186
+ 168,
187
+ 169,
188
+ 170,
189
+ 171,
190
+ 172,
191
+ 173,
192
+ 174,
193
+ 175,
194
+ 176,
195
+ 177,
196
+ 178,
197
+ 179,
198
+ 180,
199
+ 181,
200
+ 182,
201
+ 183,
202
+ 184,
203
+ 185,
204
+ 186,
205
+ 187,
206
+ 188,
207
+ 189,
208
+ 190,
209
+ 191,
210
+ 192,
211
+ 193,
212
+ 194,
213
+ 195,
214
+ 196,
215
+ 197,
216
+ 198,
217
+ 199,
218
+ 200,
219
+ 201,
220
+ 202,
221
+ 203,
222
+ 204,
223
+ 205,
224
+ 206,
225
+ 207,
226
+ 208,
227
+ 209,
228
+ 210,
229
+ 211,
230
+ 212,
231
+ 213,
232
+ 214,
233
+ 215,
234
+ 216,
235
+ 217,
236
+ 218,
237
+ 219,
238
+ 220,
239
+ 221,
240
+ 222,
241
+ 223,
242
+ 224,
243
+ 225,
244
+ 226,
245
+ 227,
246
+ 228,
247
+ 229,
248
+ 230,
249
+ 231,
250
+ 232,
251
+ 233,
252
+ 234,
253
+ 235,
254
+ 236,
255
+ 237,
256
+ 238,
257
+ 239,
258
+ 240,
259
+ 241,
260
+ 242,
261
+ 243,
262
+ 244,
263
+ 245,
264
+ 246,
265
+ 247,
266
+ 248,
267
+ 249,
268
+ 250,
269
+ 251,
270
+ 252,
271
+ 253,
272
+ 254,
273
+ 255,
274
+ 256,
275
+ 257,
276
+ 258,
277
+ 259,
278
+ 260,
279
+ 261,
280
+ 262,
281
+ 263,
282
+ 264,
283
+ 265,
284
+ 266,
285
+ 267,
286
+ 268,
287
+ 269,
288
+ 270,
289
+ 271,
290
+ 272,
291
+ 273,
292
+ 274,
293
+ 275,
294
+ 276,
295
+ 277,
296
+ 278,
297
+ 279,
298
+ 280,
299
+ 281,
300
+ 282,
301
+ 283,
302
+ 284,
303
+ 285,
304
+ 286,
305
+ 287,
306
+ 288,
307
+ 289,
308
+ 290,
309
+ 291,
310
+ 292,
311
+ 293,
312
+ 294,
313
+ 295,
314
+ 296,
315
+ 297,
316
+ 298,
317
+ 299,
318
+ 300,
319
+ 301,
320
+ 302,
321
+ 303,
322
+ 304,
323
+ 305,
324
+ 306,
325
+ 307,
326
+ 308,
327
+ 309,
328
+ 310,
329
+ 311,
330
+ 312,
331
+ 313,
332
+ 314,
333
+ 315,
334
+ 316,
335
+ 317,
336
+ 318,
337
+ 319,
338
+ 320,
339
+ 321,
340
+ 322,
341
+ 323,
342
+ 324,
343
+ 325,
344
+ 326,
345
+ 327,
346
+ 328,
347
+ 329,
348
+ 330,
349
+ 331,
350
+ 332,
351
+ 333,
352
+ 334,
353
+ 335,
354
+ 336,
355
+ 337,
356
+ 338,
357
+ 339,
358
+ 340,
359
+ 341,
360
+ 342,
361
+ 343,
362
+ 344,
363
+ 345,
364
+ 346,
365
+ 347,
366
+ 348,
367
+ 349,
368
+ 350,
369
+ 351,
370
+ 352,
371
+ 353,
372
+ 354,
373
+ 355,
374
+ 356,
375
+ 357,
376
+ 358,
377
+ 359,
378
+ 360,
379
+ 361,
380
+ 362,
381
+ 363,
382
+ 364,
383
+ 365,
384
+ 366,
385
+ 367,
386
+ 368,
387
+ 369,
388
+ 370,
389
+ 371,
390
+ 372,
391
+ 373,
392
+ 374,
393
+ 375,
394
+ 376,
395
+ 377,
396
+ 378,
397
+ 379,
398
+ 380,
399
+ 381,
400
+ 382,
401
+ 383,
402
+ 384,
403
+ 385,
404
+ 386,
405
+ 387,
406
+ 388,
407
+ 389,
408
+ 390,
409
+ 391,
410
+ 392,
411
+ 393,
412
+ 394,
413
+ 395,
414
+ 396,
415
+ 397,
416
+ 398,
417
+ 399,
418
+ 400,
419
+ 401,
420
+ 402,
421
+ 403,
422
+ 404,
423
+ 405,
424
+ 406,
425
+ 407,
426
+ 408,
427
+ 409,
428
+ 410,
429
+ 411,
430
+ 412,
431
+ 413,
432
+ 414,
433
+ 415,
434
+ 416,
435
+ 417,
436
+ 418,
437
+ 419,
438
+ 420,
439
+ 421,
440
+ 422,
441
+ 423,
442
+ 424,
443
+ 425,
444
+ 426,
445
+ 427,
446
+ 428,
447
+ 429,
448
+ 430,
449
+ 431,
450
+ 432,
451
+ 433,
452
+ 434,
453
+ 435,
454
+ 436,
455
+ 437,
456
+ 438,
457
+ 439,
458
+ 440,
459
+ 441,
460
+ 442,
461
+ 443,
462
+ 444,
463
+ 445,
464
+ 446,
465
+ 447,
466
+ 448,
467
+ 449,
468
+ 450,
469
+ 451,
470
+ 452,
471
+ 453,
472
+ 454,
473
+ 455,
474
+ 456,
475
+ 457,
476
+ 458,
477
+ 459,
478
+ 460,
479
+ 461,
480
+ 462,
481
+ 463,
482
+ 464,
483
+ 465,
484
+ 466,
485
+ 467,
486
+ 468,
487
+ 469,
488
+ 470,
489
+ 471,
490
+ 472,
491
+ 473,
492
+ 474,
493
+ 475,
494
+ 476,
495
+ 477,
496
+ 478,
497
+ 479,
498
+ 480,
499
+ 481,
500
+ 482,
501
+ 483,
502
+ 484,
503
+ 485,
504
+ 486,
505
+ 487,
506
+ 488,
507
+ 489,
508
+ 490,
509
+ 491,
510
+ 492,
511
+ 493,
512
+ 494,
513
+ 495,
514
+ 496,
515
+ 497,
516
+ 498,
517
+ 499,
518
+ 500,
519
+ 501,
520
+ 502,
521
+ 503,
522
+ 504,
523
+ 505,
524
+ 506,
525
+ 507,
526
+ 508,
527
+ 509,
528
+ 510,
529
+ 511,
530
+ 512,
531
+ 513,
532
+ 514,
533
+ 515,
534
+ 516,
535
+ 517,
536
+ 518,
537
+ 519,
538
+ 520,
539
+ 521,
540
+ 522,
541
+ 523,
542
+ 524,
543
+ 525,
544
+ 526,
545
+ 527,
546
+ 528,
547
+ 529,
548
+ 530,
549
+ 531,
550
+ 532,
551
+ 533,
552
+ 534,
553
+ 535,
554
+ 536,
555
+ 537,
556
+ 538,
557
+ 539,
558
+ 540,
559
+ 541,
560
+ 542,
561
+ 543,
562
+ 544,
563
+ 545,
564
+ 546,
565
+ 547,
566
+ 548,
567
+ 549,
568
+ 550,
569
+ 551,
570
+ 552,
571
+ 553,
572
+ 554,
573
+ 555,
574
+ 556,
575
+ 557,
576
+ 558,
577
+ 559,
578
+ 560,
579
+ 561,
580
+ 562,
581
+ 563,
582
+ 564,
583
+ 565,
584
+ 566,
585
+ 567,
586
+ 568,
587
+ 569,
588
+ 570,
589
+ 571,
590
+ 572,
591
+ 573,
592
+ 574,
593
+ 575,
594
+ 576,
595
+ 577,
596
+ 578,
597
+ 579,
598
+ 580,
599
+ 581,
600
+ 582,
601
+ 583,
602
+ 584,
603
+ 585,
604
+ 586,
605
+ 587,
606
+ 588,
607
+ 589,
608
+ 590,
609
+ 591,
610
+ 592,
611
+ 593,
612
+ 594,
613
+ 595,
614
+ 596,
615
+ 597,
616
+ 598,
617
+ 599,
618
+ 600,
619
+ 601,
620
+ 602,
621
+ 603,
622
+ 604,
623
+ 605,
624
+ 606,
625
+ 607,
626
+ 608,
627
+ 609,
628
+ 610,
629
+ 611,
630
+ 612,
631
+ 613,
632
+ 614,
633
+ 615,
634
+ 616,
635
+ 617,
636
+ 618,
637
+ 619,
638
+ 620,
639
+ 621,
640
+ 622,
641
+ 623,
642
+ 624,
643
+ 625,
644
+ 626,
645
+ 627,
646
+ 628,
647
+ 629,
648
+ 630,
649
+ 631,
650
+ 632,
651
+ 633,
652
+ 634,
653
+ 635,
654
+ 636,
655
+ 637,
656
+ 638,
657
+ 639,
658
+ 640,
659
+ 641,
660
+ 642,
661
+ 643,
662
+ 644,
663
+ 645,
664
+ 646,
665
+ 647,
666
+ 648,
667
+ 649,
668
+ 650,
669
+ 651,
670
+ 652,
671
+ 653,
672
+ 654,
673
+ 655,
674
+ 656,
675
+ 657,
676
+ 658,
677
+ 659,
678
+ 660,
679
+ 661,
680
+ 662,
681
+ 663,
682
+ 664,
683
+ 665,
684
+ 666,
685
+ 667,
686
+ 668,
687
+ 669,
688
+ 670,
689
+ 671,
690
+ 672,
691
+ 673,
692
+ 674,
693
+ 675,
694
+ 676,
695
+ 677,
696
+ 678,
697
+ 679,
698
+ 680,
699
+ 681,
700
+ 682,
701
+ 683,
702
+ 684,
703
+ 685,
704
+ 686,
705
+ 687,
706
+ 688,
707
+ 689,
708
+ 690,
709
+ 691,
710
+ 692,
711
+ 693,
712
+ 694,
713
+ 695,
714
+ 696,
715
+ 697,
716
+ 698,
717
+ 699,
718
+ 700,
719
+ 701,
720
+ 702,
721
+ 703,
722
+ 704,
723
+ 705,
724
+ 706,
725
+ 707,
726
+ 708,
727
+ 709,
728
+ 710,
729
+ 711,
730
+ 712,
731
+ 713,
732
+ 714,
733
+ 715,
734
+ 716,
735
+ 717,
736
+ 718,
737
+ 719,
738
+ 720,
739
+ 721,
740
+ 722,
741
+ 723,
742
+ 724,
743
+ 725,
744
+ 726,
745
+ 727,
746
+ 728,
747
+ 729,
748
+ 730,
749
+ 731,
750
+ 732,
751
+ 733,
752
+ 734,
753
+ 735,
754
+ 736,
755
+ 737,
756
+ 738,
757
+ 739,
758
+ 740,
759
+ 741,
760
+ 742,
761
+ 743,
762
+ 744,
763
+ 745,
764
+ 746,
765
+ 747,
766
+ 748,
767
+ 749,
768
+ 750,
769
+ 751,
770
+ 752,
771
+ 753,
772
+ 754,
773
+ 755,
774
+ 756,
775
+ 757,
776
+ 758,
777
+ 759,
778
+ 760,
779
+ 761,
780
+ 762,
781
+ 763,
782
+ 764,
783
+ 765,
784
+ 766,
785
+ 767,
786
+ 768,
787
+ 769,
788
+ 770,
789
+ 771,
790
+ 772,
791
+ 773,
792
+ 774,
793
+ 775,
794
+ 776,
795
+ 777,
796
+ 778,
797
+ 779,
798
+ 780,
799
+ 781,
800
+ 782
801
+ ]
802
+ }
803
+ ]
checkpoints/000300/training_state/optimizer_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5771d887785d5a9a9efae7b05f9f98c8bc6d28424ca17ee67b661fbff4cea8e
3
+ size 10896063516
checkpoints/000300/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5aaa23448022e37fd9f49f7682563e654bfc12e356201d47e8d5e8a07904a6b8
3
+ size 15708
checkpoints/000300/training_state/scheduler_state.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_lrs": [
3
+ 2.5e-05
4
+ ],
5
+ "last_epoch": 300,
6
+ "verbose": false,
7
+ "_step_count": 301,
8
+ "_get_lr_called_within_step": false,
9
+ "_last_lr": [
10
+ 7.517482517482519e-06
11
+ ],
12
+ "lr_lambdas": [
13
+ null
14
+ ]
15
+ }
checkpoints/000300/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 300
3
+ }
checkpoints/000400/pretrained_model/config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi0",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.images.gripper": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 480,
21
+ 640
22
+ ]
23
+ },
24
+ "observation.images.webcam": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 480,
29
+ 640
30
+ ]
31
+ }
32
+ },
33
+ "output_features": {
34
+ "action": {
35
+ "type": "ACTION",
36
+ "shape": [
37
+ 6
38
+ ]
39
+ }
40
+ },
41
+ "device": "cuda",
42
+ "use_amp": false,
43
+ "chunk_size": 50,
44
+ "n_action_steps": 50,
45
+ "max_state_dim": 32,
46
+ "max_action_dim": 32,
47
+ "resize_imgs_with_padding": [
48
+ 224,
49
+ 224
50
+ ],
51
+ "empty_cameras": 0,
52
+ "adapt_to_pi_aloha": false,
53
+ "use_delta_joint_actions_aloha": false,
54
+ "tokenizer_max_length": 48,
55
+ "proj_width": 1024,
56
+ "num_steps": 10,
57
+ "use_cache": true,
58
+ "attention_implementation": "eager",
59
+ "freeze_vision_encoder": true,
60
+ "train_expert_only": false,
61
+ "train_state_proj": true,
62
+ "optimizer_lr": 2.5e-05,
63
+ "optimizer_betas": [
64
+ 0.9,
65
+ 0.95
66
+ ],
67
+ "optimizer_eps": 1e-08,
68
+ "optimizer_weight_decay": 1e-10,
69
+ "scheduler_warmup_steps": 1000,
70
+ "scheduler_decay_steps": 30000,
71
+ "scheduler_decay_lr": 2.5e-06
72
+ }
checkpoints/000400/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce98db53ec22353c410f32ec1e332eb008aa222ab73292c4697b026927e55d44
3
+ size 7536022544
checkpoints/000400/pretrained_model/train_config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "maelic/hackathon7",
4
+ "root": null,
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ }
61
+ }
62
+ },
63
+ "revision": null,
64
+ "use_imagenet_stats": true,
65
+ "video_backend": "torchcodec"
66
+ },
67
+ "env": null,
68
+ "policy": {
69
+ "type": "pi0",
70
+ "n_obs_steps": 1,
71
+ "normalization_mapping": {
72
+ "VISUAL": "IDENTITY",
73
+ "STATE": "MEAN_STD",
74
+ "ACTION": "MEAN_STD"
75
+ },
76
+ "input_features": {
77
+ "observation.state": {
78
+ "type": "STATE",
79
+ "shape": [
80
+ 6
81
+ ]
82
+ },
83
+ "observation.images.gripper": {
84
+ "type": "VISUAL",
85
+ "shape": [
86
+ 3,
87
+ 480,
88
+ 640
89
+ ]
90
+ },
91
+ "observation.images.webcam": {
92
+ "type": "VISUAL",
93
+ "shape": [
94
+ 3,
95
+ 480,
96
+ 640
97
+ ]
98
+ }
99
+ },
100
+ "output_features": {
101
+ "action": {
102
+ "type": "ACTION",
103
+ "shape": [
104
+ 6
105
+ ]
106
+ }
107
+ },
108
+ "device": "cuda",
109
+ "use_amp": false,
110
+ "chunk_size": 50,
111
+ "n_action_steps": 50,
112
+ "max_state_dim": 32,
113
+ "max_action_dim": 32,
114
+ "resize_imgs_with_padding": [
115
+ 224,
116
+ 224
117
+ ],
118
+ "empty_cameras": 0,
119
+ "adapt_to_pi_aloha": false,
120
+ "use_delta_joint_actions_aloha": false,
121
+ "tokenizer_max_length": 48,
122
+ "proj_width": 1024,
123
+ "num_steps": 10,
124
+ "use_cache": true,
125
+ "attention_implementation": "eager",
126
+ "freeze_vision_encoder": true,
127
+ "train_expert_only": false,
128
+ "train_state_proj": true,
129
+ "optimizer_lr": 2.5e-05,
130
+ "optimizer_betas": [
131
+ 0.9,
132
+ 0.95
133
+ ],
134
+ "optimizer_eps": 1e-08,
135
+ "optimizer_weight_decay": 1e-10,
136
+ "scheduler_warmup_steps": 1000,
137
+ "scheduler_decay_steps": 30000,
138
+ "scheduler_decay_lr": 2.5e-06
139
+ },
140
+ "output_dir": "/scratch/train/pi_shity_version",
141
+ "job_name": "pi0",
142
+ "resume": false,
143
+ "seed": 1000,
144
+ "num_workers": 4,
145
+ "batch_size": 20,
146
+ "steps": 100000,
147
+ "eval_freq": 100,
148
+ "log_freq": 200,
149
+ "save_checkpoint": true,
150
+ "save_freq": 100,
151
+ "use_policy_training_preset": true,
152
+ "optimizer": {
153
+ "type": "adamw",
154
+ "lr": 2.5e-05,
155
+ "weight_decay": 1e-10,
156
+ "grad_clip_norm": 10.0,
157
+ "betas": [
158
+ 0.9,
159
+ 0.95
160
+ ],
161
+ "eps": 1e-08
162
+ },
163
+ "scheduler": {
164
+ "type": "cosine_decay_with_warmup",
165
+ "num_warmup_steps": 1000,
166
+ "num_decay_steps": 30000,
167
+ "peak_lr": 2.5e-05,
168
+ "decay_lr": 2.5e-06
169
+ },
170
+ "eval": {
171
+ "n_episodes": 50,
172
+ "batch_size": 50,
173
+ "use_async_envs": false
174
+ },
175
+ "wandb": {
176
+ "enable": true,
177
+ "disable_artifact": false,
178
+ "project": "pi0_mistral_hackathon",
179
+ "entity": null,
180
+ "notes": null,
181
+ "run_id": "shity_version",
182
+ "mode": null
183
+ }
184
+ }
checkpoints/000400/training_state/optimizer_param_groups.json ADDED
@@ -0,0 +1,803 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "lr": 1.0014985014985015e-05,
4
+ "betas": [
5
+ 0.9,
6
+ 0.95
7
+ ],
8
+ "eps": 1e-08,
9
+ "weight_decay": 1e-10,
10
+ "amsgrad": false,
11
+ "foreach": null,
12
+ "maximize": false,
13
+ "capturable": false,
14
+ "differentiable": false,
15
+ "fused": null,
16
+ "initial_lr": 2.5e-05,
17
+ "params": [
18
+ 0,
19
+ 1,
20
+ 2,
21
+ 3,
22
+ 4,
23
+ 5,
24
+ 6,
25
+ 7,
26
+ 8,
27
+ 9,
28
+ 10,
29
+ 11,
30
+ 12,
31
+ 13,
32
+ 14,
33
+ 15,
34
+ 16,
35
+ 17,
36
+ 18,
37
+ 19,
38
+ 20,
39
+ 21,
40
+ 22,
41
+ 23,
42
+ 24,
43
+ 25,
44
+ 26,
45
+ 27,
46
+ 28,
47
+ 29,
48
+ 30,
49
+ 31,
50
+ 32,
51
+ 33,
52
+ 34,
53
+ 35,
54
+ 36,
55
+ 37,
56
+ 38,
57
+ 39,
58
+ 40,
59
+ 41,
60
+ 42,
61
+ 43,
62
+ 44,
63
+ 45,
64
+ 46,
65
+ 47,
66
+ 48,
67
+ 49,
68
+ 50,
69
+ 51,
70
+ 52,
71
+ 53,
72
+ 54,
73
+ 55,
74
+ 56,
75
+ 57,
76
+ 58,
77
+ 59,
78
+ 60,
79
+ 61,
80
+ 62,
81
+ 63,
82
+ 64,
83
+ 65,
84
+ 66,
85
+ 67,
86
+ 68,
87
+ 69,
88
+ 70,
89
+ 71,
90
+ 72,
91
+ 73,
92
+ 74,
93
+ 75,
94
+ 76,
95
+ 77,
96
+ 78,
97
+ 79,
98
+ 80,
99
+ 81,
100
+ 82,
101
+ 83,
102
+ 84,
103
+ 85,
104
+ 86,
105
+ 87,
106
+ 88,
107
+ 89,
108
+ 90,
109
+ 91,
110
+ 92,
111
+ 93,
112
+ 94,
113
+ 95,
114
+ 96,
115
+ 97,
116
+ 98,
117
+ 99,
118
+ 100,
119
+ 101,
120
+ 102,
121
+ 103,
122
+ 104,
123
+ 105,
124
+ 106,
125
+ 107,
126
+ 108,
127
+ 109,
128
+ 110,
129
+ 111,
130
+ 112,
131
+ 113,
132
+ 114,
133
+ 115,
134
+ 116,
135
+ 117,
136
+ 118,
137
+ 119,
138
+ 120,
139
+ 121,
140
+ 122,
141
+ 123,
142
+ 124,
143
+ 125,
144
+ 126,
145
+ 127,
146
+ 128,
147
+ 129,
148
+ 130,
149
+ 131,
150
+ 132,
151
+ 133,
152
+ 134,
153
+ 135,
154
+ 136,
155
+ 137,
156
+ 138,
157
+ 139,
158
+ 140,
159
+ 141,
160
+ 142,
161
+ 143,
162
+ 144,
163
+ 145,
164
+ 146,
165
+ 147,
166
+ 148,
167
+ 149,
168
+ 150,
169
+ 151,
170
+ 152,
171
+ 153,
172
+ 154,
173
+ 155,
174
+ 156,
175
+ 157,
176
+ 158,
177
+ 159,
178
+ 160,
179
+ 161,
180
+ 162,
181
+ 163,
182
+ 164,
183
+ 165,
184
+ 166,
185
+ 167,
186
+ 168,
187
+ 169,
188
+ 170,
189
+ 171,
190
+ 172,
191
+ 173,
192
+ 174,
193
+ 175,
194
+ 176,
195
+ 177,
196
+ 178,
197
+ 179,
198
+ 180,
199
+ 181,
200
+ 182,
201
+ 183,
202
+ 184,
203
+ 185,
204
+ 186,
205
+ 187,
206
+ 188,
207
+ 189,
208
+ 190,
209
+ 191,
210
+ 192,
211
+ 193,
212
+ 194,
213
+ 195,
214
+ 196,
215
+ 197,
216
+ 198,
217
+ 199,
218
+ 200,
219
+ 201,
220
+ 202,
221
+ 203,
222
+ 204,
223
+ 205,
224
+ 206,
225
+ 207,
226
+ 208,
227
+ 209,
228
+ 210,
229
+ 211,
230
+ 212,
231
+ 213,
232
+ 214,
233
+ 215,
234
+ 216,
235
+ 217,
236
+ 218,
237
+ 219,
238
+ 220,
239
+ 221,
240
+ 222,
241
+ 223,
242
+ 224,
243
+ 225,
244
+ 226,
245
+ 227,
246
+ 228,
247
+ 229,
248
+ 230,
249
+ 231,
250
+ 232,
251
+ 233,
252
+ 234,
253
+ 235,
254
+ 236,
255
+ 237,
256
+ 238,
257
+ 239,
258
+ 240,
259
+ 241,
260
+ 242,
261
+ 243,
262
+ 244,
263
+ 245,
264
+ 246,
265
+ 247,
266
+ 248,
267
+ 249,
268
+ 250,
269
+ 251,
270
+ 252,
271
+ 253,
272
+ 254,
273
+ 255,
274
+ 256,
275
+ 257,
276
+ 258,
277
+ 259,
278
+ 260,
279
+ 261,
280
+ 262,
281
+ 263,
282
+ 264,
283
+ 265,
284
+ 266,
285
+ 267,
286
+ 268,
287
+ 269,
288
+ 270,
289
+ 271,
290
+ 272,
291
+ 273,
292
+ 274,
293
+ 275,
294
+ 276,
295
+ 277,
296
+ 278,
297
+ 279,
298
+ 280,
299
+ 281,
300
+ 282,
301
+ 283,
302
+ 284,
303
+ 285,
304
+ 286,
305
+ 287,
306
+ 288,
307
+ 289,
308
+ 290,
309
+ 291,
310
+ 292,
311
+ 293,
312
+ 294,
313
+ 295,
314
+ 296,
315
+ 297,
316
+ 298,
317
+ 299,
318
+ 300,
319
+ 301,
320
+ 302,
321
+ 303,
322
+ 304,
323
+ 305,
324
+ 306,
325
+ 307,
326
+ 308,
327
+ 309,
328
+ 310,
329
+ 311,
330
+ 312,
331
+ 313,
332
+ 314,
333
+ 315,
334
+ 316,
335
+ 317,
336
+ 318,
337
+ 319,
338
+ 320,
339
+ 321,
340
+ 322,
341
+ 323,
342
+ 324,
343
+ 325,
344
+ 326,
345
+ 327,
346
+ 328,
347
+ 329,
348
+ 330,
349
+ 331,
350
+ 332,
351
+ 333,
352
+ 334,
353
+ 335,
354
+ 336,
355
+ 337,
356
+ 338,
357
+ 339,
358
+ 340,
359
+ 341,
360
+ 342,
361
+ 343,
362
+ 344,
363
+ 345,
364
+ 346,
365
+ 347,
366
+ 348,
367
+ 349,
368
+ 350,
369
+ 351,
370
+ 352,
371
+ 353,
372
+ 354,
373
+ 355,
374
+ 356,
375
+ 357,
376
+ 358,
377
+ 359,
378
+ 360,
379
+ 361,
380
+ 362,
381
+ 363,
382
+ 364,
383
+ 365,
384
+ 366,
385
+ 367,
386
+ 368,
387
+ 369,
388
+ 370,
389
+ 371,
390
+ 372,
391
+ 373,
392
+ 374,
393
+ 375,
394
+ 376,
395
+ 377,
396
+ 378,
397
+ 379,
398
+ 380,
399
+ 381,
400
+ 382,
401
+ 383,
402
+ 384,
403
+ 385,
404
+ 386,
405
+ 387,
406
+ 388,
407
+ 389,
408
+ 390,
409
+ 391,
410
+ 392,
411
+ 393,
412
+ 394,
413
+ 395,
414
+ 396,
415
+ 397,
416
+ 398,
417
+ 399,
418
+ 400,
419
+ 401,
420
+ 402,
421
+ 403,
422
+ 404,
423
+ 405,
424
+ 406,
425
+ 407,
426
+ 408,
427
+ 409,
428
+ 410,
429
+ 411,
430
+ 412,
431
+ 413,
432
+ 414,
433
+ 415,
434
+ 416,
435
+ 417,
436
+ 418,
437
+ 419,
438
+ 420,
439
+ 421,
440
+ 422,
441
+ 423,
442
+ 424,
443
+ 425,
444
+ 426,
445
+ 427,
446
+ 428,
447
+ 429,
448
+ 430,
449
+ 431,
450
+ 432,
451
+ 433,
452
+ 434,
453
+ 435,
454
+ 436,
455
+ 437,
456
+ 438,
457
+ 439,
458
+ 440,
459
+ 441,
460
+ 442,
461
+ 443,
462
+ 444,
463
+ 445,
464
+ 446,
465
+ 447,
466
+ 448,
467
+ 449,
468
+ 450,
469
+ 451,
470
+ 452,
471
+ 453,
472
+ 454,
473
+ 455,
474
+ 456,
475
+ 457,
476
+ 458,
477
+ 459,
478
+ 460,
479
+ 461,
480
+ 462,
481
+ 463,
482
+ 464,
483
+ 465,
484
+ 466,
485
+ 467,
486
+ 468,
487
+ 469,
488
+ 470,
489
+ 471,
490
+ 472,
491
+ 473,
492
+ 474,
493
+ 475,
494
+ 476,
495
+ 477,
496
+ 478,
497
+ 479,
498
+ 480,
499
+ 481,
500
+ 482,
501
+ 483,
502
+ 484,
503
+ 485,
504
+ 486,
505
+ 487,
506
+ 488,
507
+ 489,
508
+ 490,
509
+ 491,
510
+ 492,
511
+ 493,
512
+ 494,
513
+ 495,
514
+ 496,
515
+ 497,
516
+ 498,
517
+ 499,
518
+ 500,
519
+ 501,
520
+ 502,
521
+ 503,
522
+ 504,
523
+ 505,
524
+ 506,
525
+ 507,
526
+ 508,
527
+ 509,
528
+ 510,
529
+ 511,
530
+ 512,
531
+ 513,
532
+ 514,
533
+ 515,
534
+ 516,
535
+ 517,
536
+ 518,
537
+ 519,
538
+ 520,
539
+ 521,
540
+ 522,
541
+ 523,
542
+ 524,
543
+ 525,
544
+ 526,
545
+ 527,
546
+ 528,
547
+ 529,
548
+ 530,
549
+ 531,
550
+ 532,
551
+ 533,
552
+ 534,
553
+ 535,
554
+ 536,
555
+ 537,
556
+ 538,
557
+ 539,
558
+ 540,
559
+ 541,
560
+ 542,
561
+ 543,
562
+ 544,
563
+ 545,
564
+ 546,
565
+ 547,
566
+ 548,
567
+ 549,
568
+ 550,
569
+ 551,
570
+ 552,
571
+ 553,
572
+ 554,
573
+ 555,
574
+ 556,
575
+ 557,
576
+ 558,
577
+ 559,
578
+ 560,
579
+ 561,
580
+ 562,
581
+ 563,
582
+ 564,
583
+ 565,
584
+ 566,
585
+ 567,
586
+ 568,
587
+ 569,
588
+ 570,
589
+ 571,
590
+ 572,
591
+ 573,
592
+ 574,
593
+ 575,
594
+ 576,
595
+ 577,
596
+ 578,
597
+ 579,
598
+ 580,
599
+ 581,
600
+ 582,
601
+ 583,
602
+ 584,
603
+ 585,
604
+ 586,
605
+ 587,
606
+ 588,
607
+ 589,
608
+ 590,
609
+ 591,
610
+ 592,
611
+ 593,
612
+ 594,
613
+ 595,
614
+ 596,
615
+ 597,
616
+ 598,
617
+ 599,
618
+ 600,
619
+ 601,
620
+ 602,
621
+ 603,
622
+ 604,
623
+ 605,
624
+ 606,
625
+ 607,
626
+ 608,
627
+ 609,
628
+ 610,
629
+ 611,
630
+ 612,
631
+ 613,
632
+ 614,
633
+ 615,
634
+ 616,
635
+ 617,
636
+ 618,
637
+ 619,
638
+ 620,
639
+ 621,
640
+ 622,
641
+ 623,
642
+ 624,
643
+ 625,
644
+ 626,
645
+ 627,
646
+ 628,
647
+ 629,
648
+ 630,
649
+ 631,
650
+ 632,
651
+ 633,
652
+ 634,
653
+ 635,
654
+ 636,
655
+ 637,
656
+ 638,
657
+ 639,
658
+ 640,
659
+ 641,
660
+ 642,
661
+ 643,
662
+ 644,
663
+ 645,
664
+ 646,
665
+ 647,
666
+ 648,
667
+ 649,
668
+ 650,
669
+ 651,
670
+ 652,
671
+ 653,
672
+ 654,
673
+ 655,
674
+ 656,
675
+ 657,
676
+ 658,
677
+ 659,
678
+ 660,
679
+ 661,
680
+ 662,
681
+ 663,
682
+ 664,
683
+ 665,
684
+ 666,
685
+ 667,
686
+ 668,
687
+ 669,
688
+ 670,
689
+ 671,
690
+ 672,
691
+ 673,
692
+ 674,
693
+ 675,
694
+ 676,
695
+ 677,
696
+ 678,
697
+ 679,
698
+ 680,
699
+ 681,
700
+ 682,
701
+ 683,
702
+ 684,
703
+ 685,
704
+ 686,
705
+ 687,
706
+ 688,
707
+ 689,
708
+ 690,
709
+ 691,
710
+ 692,
711
+ 693,
712
+ 694,
713
+ 695,
714
+ 696,
715
+ 697,
716
+ 698,
717
+ 699,
718
+ 700,
719
+ 701,
720
+ 702,
721
+ 703,
722
+ 704,
723
+ 705,
724
+ 706,
725
+ 707,
726
+ 708,
727
+ 709,
728
+ 710,
729
+ 711,
730
+ 712,
731
+ 713,
732
+ 714,
733
+ 715,
734
+ 716,
735
+ 717,
736
+ 718,
737
+ 719,
738
+ 720,
739
+ 721,
740
+ 722,
741
+ 723,
742
+ 724,
743
+ 725,
744
+ 726,
745
+ 727,
746
+ 728,
747
+ 729,
748
+ 730,
749
+ 731,
750
+ 732,
751
+ 733,
752
+ 734,
753
+ 735,
754
+ 736,
755
+ 737,
756
+ 738,
757
+ 739,
758
+ 740,
759
+ 741,
760
+ 742,
761
+ 743,
762
+ 744,
763
+ 745,
764
+ 746,
765
+ 747,
766
+ 748,
767
+ 749,
768
+ 750,
769
+ 751,
770
+ 752,
771
+ 753,
772
+ 754,
773
+ 755,
774
+ 756,
775
+ 757,
776
+ 758,
777
+ 759,
778
+ 760,
779
+ 761,
780
+ 762,
781
+ 763,
782
+ 764,
783
+ 765,
784
+ 766,
785
+ 767,
786
+ 768,
787
+ 769,
788
+ 770,
789
+ 771,
790
+ 772,
791
+ 773,
792
+ 774,
793
+ 775,
794
+ 776,
795
+ 777,
796
+ 778,
797
+ 779,
798
+ 780,
799
+ 781,
800
+ 782
801
+ ]
802
+ }
803
+ ]
checkpoints/000400/training_state/optimizer_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53cd76132357b389f0deb59640ffcdd70879e9c82bfb0de11f2669eaa5f68993
3
+ size 10896063516
checkpoints/000400/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b56e833e6eea05811578c3ab02d4aa80f853a9a2aa2921cd1638912496d0193e
3
+ size 15708
checkpoints/000400/training_state/scheduler_state.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_lrs": [
3
+ 2.5e-05
4
+ ],
5
+ "last_epoch": 400,
6
+ "verbose": false,
7
+ "_step_count": 401,
8
+ "_get_lr_called_within_step": false,
9
+ "_last_lr": [
10
+ 1.0014985014985015e-05
11
+ ],
12
+ "lr_lambdas": [
13
+ null
14
+ ]
15
+ }
checkpoints/000400/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 400
3
+ }
checkpoints/000500/pretrained_model/config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi0",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.images.gripper": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 480,
21
+ 640
22
+ ]
23
+ },
24
+ "observation.images.webcam": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 480,
29
+ 640
30
+ ]
31
+ }
32
+ },
33
+ "output_features": {
34
+ "action": {
35
+ "type": "ACTION",
36
+ "shape": [
37
+ 6
38
+ ]
39
+ }
40
+ },
41
+ "device": "cuda",
42
+ "use_amp": false,
43
+ "chunk_size": 50,
44
+ "n_action_steps": 50,
45
+ "max_state_dim": 32,
46
+ "max_action_dim": 32,
47
+ "resize_imgs_with_padding": [
48
+ 224,
49
+ 224
50
+ ],
51
+ "empty_cameras": 0,
52
+ "adapt_to_pi_aloha": false,
53
+ "use_delta_joint_actions_aloha": false,
54
+ "tokenizer_max_length": 48,
55
+ "proj_width": 1024,
56
+ "num_steps": 10,
57
+ "use_cache": true,
58
+ "attention_implementation": "eager",
59
+ "freeze_vision_encoder": true,
60
+ "train_expert_only": false,
61
+ "train_state_proj": true,
62
+ "optimizer_lr": 2.5e-05,
63
+ "optimizer_betas": [
64
+ 0.9,
65
+ 0.95
66
+ ],
67
+ "optimizer_eps": 1e-08,
68
+ "optimizer_weight_decay": 1e-10,
69
+ "scheduler_warmup_steps": 1000,
70
+ "scheduler_decay_steps": 30000,
71
+ "scheduler_decay_lr": 2.5e-06
72
+ }
checkpoints/000500/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5234e087c1cb0af3d9e7960a6766315f21d0344a5f7212e5aaaa5e7684aa8c8a
3
+ size 7536022544
checkpoints/000500/pretrained_model/train_config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "maelic/hackathon7",
4
+ "root": null,
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ }
61
+ }
62
+ },
63
+ "revision": null,
64
+ "use_imagenet_stats": true,
65
+ "video_backend": "torchcodec"
66
+ },
67
+ "env": null,
68
+ "policy": {
69
+ "type": "pi0",
70
+ "n_obs_steps": 1,
71
+ "normalization_mapping": {
72
+ "VISUAL": "IDENTITY",
73
+ "STATE": "MEAN_STD",
74
+ "ACTION": "MEAN_STD"
75
+ },
76
+ "input_features": {
77
+ "observation.state": {
78
+ "type": "STATE",
79
+ "shape": [
80
+ 6
81
+ ]
82
+ },
83
+ "observation.images.gripper": {
84
+ "type": "VISUAL",
85
+ "shape": [
86
+ 3,
87
+ 480,
88
+ 640
89
+ ]
90
+ },
91
+ "observation.images.webcam": {
92
+ "type": "VISUAL",
93
+ "shape": [
94
+ 3,
95
+ 480,
96
+ 640
97
+ ]
98
+ }
99
+ },
100
+ "output_features": {
101
+ "action": {
102
+ "type": "ACTION",
103
+ "shape": [
104
+ 6
105
+ ]
106
+ }
107
+ },
108
+ "device": "cuda",
109
+ "use_amp": false,
110
+ "chunk_size": 50,
111
+ "n_action_steps": 50,
112
+ "max_state_dim": 32,
113
+ "max_action_dim": 32,
114
+ "resize_imgs_with_padding": [
115
+ 224,
116
+ 224
117
+ ],
118
+ "empty_cameras": 0,
119
+ "adapt_to_pi_aloha": false,
120
+ "use_delta_joint_actions_aloha": false,
121
+ "tokenizer_max_length": 48,
122
+ "proj_width": 1024,
123
+ "num_steps": 10,
124
+ "use_cache": true,
125
+ "attention_implementation": "eager",
126
+ "freeze_vision_encoder": true,
127
+ "train_expert_only": false,
128
+ "train_state_proj": true,
129
+ "optimizer_lr": 2.5e-05,
130
+ "optimizer_betas": [
131
+ 0.9,
132
+ 0.95
133
+ ],
134
+ "optimizer_eps": 1e-08,
135
+ "optimizer_weight_decay": 1e-10,
136
+ "scheduler_warmup_steps": 1000,
137
+ "scheduler_decay_steps": 30000,
138
+ "scheduler_decay_lr": 2.5e-06
139
+ },
140
+ "output_dir": "/scratch/train/pi_shity_version",
141
+ "job_name": "pi0",
142
+ "resume": false,
143
+ "seed": 1000,
144
+ "num_workers": 4,
145
+ "batch_size": 20,
146
+ "steps": 100000,
147
+ "eval_freq": 100,
148
+ "log_freq": 200,
149
+ "save_checkpoint": true,
150
+ "save_freq": 100,
151
+ "use_policy_training_preset": true,
152
+ "optimizer": {
153
+ "type": "adamw",
154
+ "lr": 2.5e-05,
155
+ "weight_decay": 1e-10,
156
+ "grad_clip_norm": 10.0,
157
+ "betas": [
158
+ 0.9,
159
+ 0.95
160
+ ],
161
+ "eps": 1e-08
162
+ },
163
+ "scheduler": {
164
+ "type": "cosine_decay_with_warmup",
165
+ "num_warmup_steps": 1000,
166
+ "num_decay_steps": 30000,
167
+ "peak_lr": 2.5e-05,
168
+ "decay_lr": 2.5e-06
169
+ },
170
+ "eval": {
171
+ "n_episodes": 50,
172
+ "batch_size": 50,
173
+ "use_async_envs": false
174
+ },
175
+ "wandb": {
176
+ "enable": true,
177
+ "disable_artifact": false,
178
+ "project": "pi0_mistral_hackathon",
179
+ "entity": null,
180
+ "notes": null,
181
+ "run_id": "shity_version",
182
+ "mode": null
183
+ }
184
+ }
checkpoints/000500/training_state/optimizer_param_groups.json ADDED
@@ -0,0 +1,803 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "lr": 1.2512487512487514e-05,
4
+ "betas": [
5
+ 0.9,
6
+ 0.95
7
+ ],
8
+ "eps": 1e-08,
9
+ "weight_decay": 1e-10,
10
+ "amsgrad": false,
11
+ "foreach": null,
12
+ "maximize": false,
13
+ "capturable": false,
14
+ "differentiable": false,
15
+ "fused": null,
16
+ "initial_lr": 2.5e-05,
17
+ "params": [
18
+ 0,
19
+ 1,
20
+ 2,
21
+ 3,
22
+ 4,
23
+ 5,
24
+ 6,
25
+ 7,
26
+ 8,
27
+ 9,
28
+ 10,
29
+ 11,
30
+ 12,
31
+ 13,
32
+ 14,
33
+ 15,
34
+ 16,
35
+ 17,
36
+ 18,
37
+ 19,
38
+ 20,
39
+ 21,
40
+ 22,
41
+ 23,
42
+ 24,
43
+ 25,
44
+ 26,
45
+ 27,
46
+ 28,
47
+ 29,
48
+ 30,
49
+ 31,
50
+ 32,
51
+ 33,
52
+ 34,
53
+ 35,
54
+ 36,
55
+ 37,
56
+ 38,
57
+ 39,
58
+ 40,
59
+ 41,
60
+ 42,
61
+ 43,
62
+ 44,
63
+ 45,
64
+ 46,
65
+ 47,
66
+ 48,
67
+ 49,
68
+ 50,
69
+ 51,
70
+ 52,
71
+ 53,
72
+ 54,
73
+ 55,
74
+ 56,
75
+ 57,
76
+ 58,
77
+ 59,
78
+ 60,
79
+ 61,
80
+ 62,
81
+ 63,
82
+ 64,
83
+ 65,
84
+ 66,
85
+ 67,
86
+ 68,
87
+ 69,
88
+ 70,
89
+ 71,
90
+ 72,
91
+ 73,
92
+ 74,
93
+ 75,
94
+ 76,
95
+ 77,
96
+ 78,
97
+ 79,
98
+ 80,
99
+ 81,
100
+ 82,
101
+ 83,
102
+ 84,
103
+ 85,
104
+ 86,
105
+ 87,
106
+ 88,
107
+ 89,
108
+ 90,
109
+ 91,
110
+ 92,
111
+ 93,
112
+ 94,
113
+ 95,
114
+ 96,
115
+ 97,
116
+ 98,
117
+ 99,
118
+ 100,
119
+ 101,
120
+ 102,
121
+ 103,
122
+ 104,
123
+ 105,
124
+ 106,
125
+ 107,
126
+ 108,
127
+ 109,
128
+ 110,
129
+ 111,
130
+ 112,
131
+ 113,
132
+ 114,
133
+ 115,
134
+ 116,
135
+ 117,
136
+ 118,
137
+ 119,
138
+ 120,
139
+ 121,
140
+ 122,
141
+ 123,
142
+ 124,
143
+ 125,
144
+ 126,
145
+ 127,
146
+ 128,
147
+ 129,
148
+ 130,
149
+ 131,
150
+ 132,
151
+ 133,
152
+ 134,
153
+ 135,
154
+ 136,
155
+ 137,
156
+ 138,
157
+ 139,
158
+ 140,
159
+ 141,
160
+ 142,
161
+ 143,
162
+ 144,
163
+ 145,
164
+ 146,
165
+ 147,
166
+ 148,
167
+ 149,
168
+ 150,
169
+ 151,
170
+ 152,
171
+ 153,
172
+ 154,
173
+ 155,
174
+ 156,
175
+ 157,
176
+ 158,
177
+ 159,
178
+ 160,
179
+ 161,
180
+ 162,
181
+ 163,
182
+ 164,
183
+ 165,
184
+ 166,
185
+ 167,
186
+ 168,
187
+ 169,
188
+ 170,
189
+ 171,
190
+ 172,
191
+ 173,
192
+ 174,
193
+ 175,
194
+ 176,
195
+ 177,
196
+ 178,
197
+ 179,
198
+ 180,
199
+ 181,
200
+ 182,
201
+ 183,
202
+ 184,
203
+ 185,
204
+ 186,
205
+ 187,
206
+ 188,
207
+ 189,
208
+ 190,
209
+ 191,
210
+ 192,
211
+ 193,
212
+ 194,
213
+ 195,
214
+ 196,
215
+ 197,
216
+ 198,
217
+ 199,
218
+ 200,
219
+ 201,
220
+ 202,
221
+ 203,
222
+ 204,
223
+ 205,
224
+ 206,
225
+ 207,
226
+ 208,
227
+ 209,
228
+ 210,
229
+ 211,
230
+ 212,
231
+ 213,
232
+ 214,
233
+ 215,
234
+ 216,
235
+ 217,
236
+ 218,
237
+ 219,
238
+ 220,
239
+ 221,
240
+ 222,
241
+ 223,
242
+ 224,
243
+ 225,
244
+ 226,
245
+ 227,
246
+ 228,
247
+ 229,
248
+ 230,
249
+ 231,
250
+ 232,
251
+ 233,
252
+ 234,
253
+ 235,
254
+ 236,
255
+ 237,
256
+ 238,
257
+ 239,
258
+ 240,
259
+ 241,
260
+ 242,
261
+ 243,
262
+ 244,
263
+ 245,
264
+ 246,
265
+ 247,
266
+ 248,
267
+ 249,
268
+ 250,
269
+ 251,
270
+ 252,
271
+ 253,
272
+ 254,
273
+ 255,
274
+ 256,
275
+ 257,
276
+ 258,
277
+ 259,
278
+ 260,
279
+ 261,
280
+ 262,
281
+ 263,
282
+ 264,
283
+ 265,
284
+ 266,
285
+ 267,
286
+ 268,
287
+ 269,
288
+ 270,
289
+ 271,
290
+ 272,
291
+ 273,
292
+ 274,
293
+ 275,
294
+ 276,
295
+ 277,
296
+ 278,
297
+ 279,
298
+ 280,
299
+ 281,
300
+ 282,
301
+ 283,
302
+ 284,
303
+ 285,
304
+ 286,
305
+ 287,
306
+ 288,
307
+ 289,
308
+ 290,
309
+ 291,
310
+ 292,
311
+ 293,
312
+ 294,
313
+ 295,
314
+ 296,
315
+ 297,
316
+ 298,
317
+ 299,
318
+ 300,
319
+ 301,
320
+ 302,
321
+ 303,
322
+ 304,
323
+ 305,
324
+ 306,
325
+ 307,
326
+ 308,
327
+ 309,
328
+ 310,
329
+ 311,
330
+ 312,
331
+ 313,
332
+ 314,
333
+ 315,
334
+ 316,
335
+ 317,
336
+ 318,
337
+ 319,
338
+ 320,
339
+ 321,
340
+ 322,
341
+ 323,
342
+ 324,
343
+ 325,
344
+ 326,
345
+ 327,
346
+ 328,
347
+ 329,
348
+ 330,
349
+ 331,
350
+ 332,
351
+ 333,
352
+ 334,
353
+ 335,
354
+ 336,
355
+ 337,
356
+ 338,
357
+ 339,
358
+ 340,
359
+ 341,
360
+ 342,
361
+ 343,
362
+ 344,
363
+ 345,
364
+ 346,
365
+ 347,
366
+ 348,
367
+ 349,
368
+ 350,
369
+ 351,
370
+ 352,
371
+ 353,
372
+ 354,
373
+ 355,
374
+ 356,
375
+ 357,
376
+ 358,
377
+ 359,
378
+ 360,
379
+ 361,
380
+ 362,
381
+ 363,
382
+ 364,
383
+ 365,
384
+ 366,
385
+ 367,
386
+ 368,
387
+ 369,
388
+ 370,
389
+ 371,
390
+ 372,
391
+ 373,
392
+ 374,
393
+ 375,
394
+ 376,
395
+ 377,
396
+ 378,
397
+ 379,
398
+ 380,
399
+ 381,
400
+ 382,
401
+ 383,
402
+ 384,
403
+ 385,
404
+ 386,
405
+ 387,
406
+ 388,
407
+ 389,
408
+ 390,
409
+ 391,
410
+ 392,
411
+ 393,
412
+ 394,
413
+ 395,
414
+ 396,
415
+ 397,
416
+ 398,
417
+ 399,
418
+ 400,
419
+ 401,
420
+ 402,
421
+ 403,
422
+ 404,
423
+ 405,
424
+ 406,
425
+ 407,
426
+ 408,
427
+ 409,
428
+ 410,
429
+ 411,
430
+ 412,
431
+ 413,
432
+ 414,
433
+ 415,
434
+ 416,
435
+ 417,
436
+ 418,
437
+ 419,
438
+ 420,
439
+ 421,
440
+ 422,
441
+ 423,
442
+ 424,
443
+ 425,
444
+ 426,
445
+ 427,
446
+ 428,
447
+ 429,
448
+ 430,
449
+ 431,
450
+ 432,
451
+ 433,
452
+ 434,
453
+ 435,
454
+ 436,
455
+ 437,
456
+ 438,
457
+ 439,
458
+ 440,
459
+ 441,
460
+ 442,
461
+ 443,
462
+ 444,
463
+ 445,
464
+ 446,
465
+ 447,
466
+ 448,
467
+ 449,
468
+ 450,
469
+ 451,
470
+ 452,
471
+ 453,
472
+ 454,
473
+ 455,
474
+ 456,
475
+ 457,
476
+ 458,
477
+ 459,
478
+ 460,
479
+ 461,
480
+ 462,
481
+ 463,
482
+ 464,
483
+ 465,
484
+ 466,
485
+ 467,
486
+ 468,
487
+ 469,
488
+ 470,
489
+ 471,
490
+ 472,
491
+ 473,
492
+ 474,
493
+ 475,
494
+ 476,
495
+ 477,
496
+ 478,
497
+ 479,
498
+ 480,
499
+ 481,
500
+ 482,
501
+ 483,
502
+ 484,
503
+ 485,
504
+ 486,
505
+ 487,
506
+ 488,
507
+ 489,
508
+ 490,
509
+ 491,
510
+ 492,
511
+ 493,
512
+ 494,
513
+ 495,
514
+ 496,
515
+ 497,
516
+ 498,
517
+ 499,
518
+ 500,
519
+ 501,
520
+ 502,
521
+ 503,
522
+ 504,
523
+ 505,
524
+ 506,
525
+ 507,
526
+ 508,
527
+ 509,
528
+ 510,
529
+ 511,
530
+ 512,
531
+ 513,
532
+ 514,
533
+ 515,
534
+ 516,
535
+ 517,
536
+ 518,
537
+ 519,
538
+ 520,
539
+ 521,
540
+ 522,
541
+ 523,
542
+ 524,
543
+ 525,
544
+ 526,
545
+ 527,
546
+ 528,
547
+ 529,
548
+ 530,
549
+ 531,
550
+ 532,
551
+ 533,
552
+ 534,
553
+ 535,
554
+ 536,
555
+ 537,
556
+ 538,
557
+ 539,
558
+ 540,
559
+ 541,
560
+ 542,
561
+ 543,
562
+ 544,
563
+ 545,
564
+ 546,
565
+ 547,
566
+ 548,
567
+ 549,
568
+ 550,
569
+ 551,
570
+ 552,
571
+ 553,
572
+ 554,
573
+ 555,
574
+ 556,
575
+ 557,
576
+ 558,
577
+ 559,
578
+ 560,
579
+ 561,
580
+ 562,
581
+ 563,
582
+ 564,
583
+ 565,
584
+ 566,
585
+ 567,
586
+ 568,
587
+ 569,
588
+ 570,
589
+ 571,
590
+ 572,
591
+ 573,
592
+ 574,
593
+ 575,
594
+ 576,
595
+ 577,
596
+ 578,
597
+ 579,
598
+ 580,
599
+ 581,
600
+ 582,
601
+ 583,
602
+ 584,
603
+ 585,
604
+ 586,
605
+ 587,
606
+ 588,
607
+ 589,
608
+ 590,
609
+ 591,
610
+ 592,
611
+ 593,
612
+ 594,
613
+ 595,
614
+ 596,
615
+ 597,
616
+ 598,
617
+ 599,
618
+ 600,
619
+ 601,
620
+ 602,
621
+ 603,
622
+ 604,
623
+ 605,
624
+ 606,
625
+ 607,
626
+ 608,
627
+ 609,
628
+ 610,
629
+ 611,
630
+ 612,
631
+ 613,
632
+ 614,
633
+ 615,
634
+ 616,
635
+ 617,
636
+ 618,
637
+ 619,
638
+ 620,
639
+ 621,
640
+ 622,
641
+ 623,
642
+ 624,
643
+ 625,
644
+ 626,
645
+ 627,
646
+ 628,
647
+ 629,
648
+ 630,
649
+ 631,
650
+ 632,
651
+ 633,
652
+ 634,
653
+ 635,
654
+ 636,
655
+ 637,
656
+ 638,
657
+ 639,
658
+ 640,
659
+ 641,
660
+ 642,
661
+ 643,
662
+ 644,
663
+ 645,
664
+ 646,
665
+ 647,
666
+ 648,
667
+ 649,
668
+ 650,
669
+ 651,
670
+ 652,
671
+ 653,
672
+ 654,
673
+ 655,
674
+ 656,
675
+ 657,
676
+ 658,
677
+ 659,
678
+ 660,
679
+ 661,
680
+ 662,
681
+ 663,
682
+ 664,
683
+ 665,
684
+ 666,
685
+ 667,
686
+ 668,
687
+ 669,
688
+ 670,
689
+ 671,
690
+ 672,
691
+ 673,
692
+ 674,
693
+ 675,
694
+ 676,
695
+ 677,
696
+ 678,
697
+ 679,
698
+ 680,
699
+ 681,
700
+ 682,
701
+ 683,
702
+ 684,
703
+ 685,
704
+ 686,
705
+ 687,
706
+ 688,
707
+ 689,
708
+ 690,
709
+ 691,
710
+ 692,
711
+ 693,
712
+ 694,
713
+ 695,
714
+ 696,
715
+ 697,
716
+ 698,
717
+ 699,
718
+ 700,
719
+ 701,
720
+ 702,
721
+ 703,
722
+ 704,
723
+ 705,
724
+ 706,
725
+ 707,
726
+ 708,
727
+ 709,
728
+ 710,
729
+ 711,
730
+ 712,
731
+ 713,
732
+ 714,
733
+ 715,
734
+ 716,
735
+ 717,
736
+ 718,
737
+ 719,
738
+ 720,
739
+ 721,
740
+ 722,
741
+ 723,
742
+ 724,
743
+ 725,
744
+ 726,
745
+ 727,
746
+ 728,
747
+ 729,
748
+ 730,
749
+ 731,
750
+ 732,
751
+ 733,
752
+ 734,
753
+ 735,
754
+ 736,
755
+ 737,
756
+ 738,
757
+ 739,
758
+ 740,
759
+ 741,
760
+ 742,
761
+ 743,
762
+ 744,
763
+ 745,
764
+ 746,
765
+ 747,
766
+ 748,
767
+ 749,
768
+ 750,
769
+ 751,
770
+ 752,
771
+ 753,
772
+ 754,
773
+ 755,
774
+ 756,
775
+ 757,
776
+ 758,
777
+ 759,
778
+ 760,
779
+ 761,
780
+ 762,
781
+ 763,
782
+ 764,
783
+ 765,
784
+ 766,
785
+ 767,
786
+ 768,
787
+ 769,
788
+ 770,
789
+ 771,
790
+ 772,
791
+ 773,
792
+ 774,
793
+ 775,
794
+ 776,
795
+ 777,
796
+ 778,
797
+ 779,
798
+ 780,
799
+ 781,
800
+ 782
801
+ ]
802
+ }
803
+ ]
checkpoints/000500/training_state/optimizer_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2818383622ba294aa20d4dea9301559ea08a137ca79ec835802d1122afaf7c61
3
+ size 10896063516
checkpoints/000500/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:908dcf320b9f7841453ea8d3ee5603979dd20285f641c0fe37229eb9b3c41074
3
+ size 15708
checkpoints/000500/training_state/scheduler_state.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_lrs": [
3
+ 2.5e-05
4
+ ],
5
+ "last_epoch": 500,
6
+ "verbose": false,
7
+ "_step_count": 501,
8
+ "_get_lr_called_within_step": false,
9
+ "_last_lr": [
10
+ 1.2512487512487514e-05
11
+ ],
12
+ "lr_lambdas": [
13
+ null
14
+ ]
15
+ }
checkpoints/000500/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 500
3
+ }
checkpoints/000600/pretrained_model/config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi0",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.images.gripper": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 480,
21
+ 640
22
+ ]
23
+ },
24
+ "observation.images.webcam": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 480,
29
+ 640
30
+ ]
31
+ }
32
+ },
33
+ "output_features": {
34
+ "action": {
35
+ "type": "ACTION",
36
+ "shape": [
37
+ 6
38
+ ]
39
+ }
40
+ },
41
+ "device": "cuda",
42
+ "use_amp": false,
43
+ "chunk_size": 50,
44
+ "n_action_steps": 50,
45
+ "max_state_dim": 32,
46
+ "max_action_dim": 32,
47
+ "resize_imgs_with_padding": [
48
+ 224,
49
+ 224
50
+ ],
51
+ "empty_cameras": 0,
52
+ "adapt_to_pi_aloha": false,
53
+ "use_delta_joint_actions_aloha": false,
54
+ "tokenizer_max_length": 48,
55
+ "proj_width": 1024,
56
+ "num_steps": 10,
57
+ "use_cache": true,
58
+ "attention_implementation": "eager",
59
+ "freeze_vision_encoder": true,
60
+ "train_expert_only": false,
61
+ "train_state_proj": true,
62
+ "optimizer_lr": 2.5e-05,
63
+ "optimizer_betas": [
64
+ 0.9,
65
+ 0.95
66
+ ],
67
+ "optimizer_eps": 1e-08,
68
+ "optimizer_weight_decay": 1e-10,
69
+ "scheduler_warmup_steps": 1000,
70
+ "scheduler_decay_steps": 30000,
71
+ "scheduler_decay_lr": 2.5e-06
72
+ }
checkpoints/000600/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e4e6e3f950fdde0460bdd66685b7e7b43cd38d01258624d7252f1bd26d5c54d
3
+ size 7536022544
checkpoints/000600/pretrained_model/train_config.json ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "maelic/hackathon7",
4
+ "root": null,
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ }
61
+ }
62
+ },
63
+ "revision": null,
64
+ "use_imagenet_stats": true,
65
+ "video_backend": "torchcodec"
66
+ },
67
+ "env": null,
68
+ "policy": {
69
+ "type": "pi0",
70
+ "n_obs_steps": 1,
71
+ "normalization_mapping": {
72
+ "VISUAL": "IDENTITY",
73
+ "STATE": "MEAN_STD",
74
+ "ACTION": "MEAN_STD"
75
+ },
76
+ "input_features": {
77
+ "observation.state": {
78
+ "type": "STATE",
79
+ "shape": [
80
+ 6
81
+ ]
82
+ },
83
+ "observation.images.gripper": {
84
+ "type": "VISUAL",
85
+ "shape": [
86
+ 3,
87
+ 480,
88
+ 640
89
+ ]
90
+ },
91
+ "observation.images.webcam": {
92
+ "type": "VISUAL",
93
+ "shape": [
94
+ 3,
95
+ 480,
96
+ 640
97
+ ]
98
+ }
99
+ },
100
+ "output_features": {
101
+ "action": {
102
+ "type": "ACTION",
103
+ "shape": [
104
+ 6
105
+ ]
106
+ }
107
+ },
108
+ "device": "cuda",
109
+ "use_amp": false,
110
+ "chunk_size": 50,
111
+ "n_action_steps": 50,
112
+ "max_state_dim": 32,
113
+ "max_action_dim": 32,
114
+ "resize_imgs_with_padding": [
115
+ 224,
116
+ 224
117
+ ],
118
+ "empty_cameras": 0,
119
+ "adapt_to_pi_aloha": false,
120
+ "use_delta_joint_actions_aloha": false,
121
+ "tokenizer_max_length": 48,
122
+ "proj_width": 1024,
123
+ "num_steps": 10,
124
+ "use_cache": true,
125
+ "attention_implementation": "eager",
126
+ "freeze_vision_encoder": true,
127
+ "train_expert_only": false,
128
+ "train_state_proj": true,
129
+ "optimizer_lr": 2.5e-05,
130
+ "optimizer_betas": [
131
+ 0.9,
132
+ 0.95
133
+ ],
134
+ "optimizer_eps": 1e-08,
135
+ "optimizer_weight_decay": 1e-10,
136
+ "scheduler_warmup_steps": 1000,
137
+ "scheduler_decay_steps": 30000,
138
+ "scheduler_decay_lr": 2.5e-06
139
+ },
140
+ "output_dir": "/scratch/train/pi_shity_version",
141
+ "job_name": "pi0",
142
+ "resume": false,
143
+ "seed": 1000,
144
+ "num_workers": 4,
145
+ "batch_size": 20,
146
+ "steps": 100000,
147
+ "eval_freq": 100,
148
+ "log_freq": 200,
149
+ "save_checkpoint": true,
150
+ "save_freq": 100,
151
+ "use_policy_training_preset": true,
152
+ "optimizer": {
153
+ "type": "adamw",
154
+ "lr": 2.5e-05,
155
+ "weight_decay": 1e-10,
156
+ "grad_clip_norm": 10.0,
157
+ "betas": [
158
+ 0.9,
159
+ 0.95
160
+ ],
161
+ "eps": 1e-08
162
+ },
163
+ "scheduler": {
164
+ "type": "cosine_decay_with_warmup",
165
+ "num_warmup_steps": 1000,
166
+ "num_decay_steps": 30000,
167
+ "peak_lr": 2.5e-05,
168
+ "decay_lr": 2.5e-06
169
+ },
170
+ "eval": {
171
+ "n_episodes": 50,
172
+ "batch_size": 50,
173
+ "use_async_envs": false
174
+ },
175
+ "wandb": {
176
+ "enable": true,
177
+ "disable_artifact": false,
178
+ "project": "pi0_mistral_hackathon",
179
+ "entity": null,
180
+ "notes": null,
181
+ "run_id": "shity_version",
182
+ "mode": null
183
+ }
184
+ }
checkpoints/000600/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:947a8b9f49f0bd12461173ac9c0ddfa6a97daf7a1f13fc015d1e1148ce86ba4b
3
+ size 15708
checkpoints/000600/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 600
3
+ }
wandb/debug-internal.log ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {"time":"2025-04-12T22:01:36.520670877Z","level":"INFO","msg":"stream: starting","core version":"0.19.9","symlink path":"/scratch/train/pi_shity_version/wandb/run-20250412_220136-shity_version/logs/debug-core.log"}
2
+ {"time":"2025-04-12T22:01:36.769914227Z","level":"INFO","msg":"created new stream","id":"shity_version"}
3
+ {"time":"2025-04-12T22:01:36.769954097Z","level":"INFO","msg":"stream: started","id":"shity_version"}
4
+ {"time":"2025-04-12T22:01:36.769978883Z","level":"INFO","msg":"writer: Do: started","stream_id":"shity_version"}
5
+ {"time":"2025-04-12T22:01:36.769995438Z","level":"INFO","msg":"handler: started","stream_id":"shity_version"}
6
+ {"time":"2025-04-12T22:01:36.770025753Z","level":"INFO","msg":"sender: started","stream_id":"shity_version"}
7
+ {"time":"2025-04-12T22:01:37.196484495Z","level":"INFO","msg":"Starting system monitor"}
wandb/debug.log ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_setup.py:_flush():67] Current SDK version is 0.19.9
2
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_setup.py:_flush():67] Configure stats pid to 27554
3
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_setup.py:_flush():67] Loading settings from /root/.config/wandb/settings
4
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_setup.py:_flush():67] Loading settings from /root/lerobot/wandb/settings
5
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_setup.py:_flush():67] Loading settings from environment variables
6
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_init.py:setup_run_log_directory():662] Logging user logs to /scratch/train/pi_shity_version/wandb/run-20250412_220136-shity_version/logs/debug.log
7
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_init.py:setup_run_log_directory():663] Logging internal logs to /scratch/train/pi_shity_version/wandb/run-20250412_220136-shity_version/logs/debug-internal.log
8
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_init.py:init():781] calling init triggers
9
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_init.py:init():786] wandb.init called with sweep_config: {}
10
+ config: {'dataset': {'repo_id': 'maelic/hackathon7', 'root': None, 'episodes': None, 'image_transforms': {'enable': False, 'max_num_transforms': 3, 'random_order': False, 'tfs': {'brightness': {'weight': 1.0, 'type': 'ColorJitter', 'kwargs': {'brightness': [0.8, 1.2]}}, 'contrast': {'weight': 1.0, 'type': 'ColorJitter', 'kwargs': {'contrast': [0.8, 1.2]}}, 'saturation': {'weight': 1.0, 'type': 'ColorJitter', 'kwargs': {'saturation': [0.5, 1.5]}}, 'hue': {'weight': 1.0, 'type': 'ColorJitter', 'kwargs': {'hue': [-0.05, 0.05]}}, 'sharpness': {'weight': 1.0, 'type': 'SharpnessJitter', 'kwargs': {'sharpness': [0.5, 1.5]}}}}, 'revision': None, 'use_imagenet_stats': True, 'video_backend': 'torchcodec'}, 'env': None, 'policy': {'type': 'pi0', 'n_obs_steps': 1, 'normalization_mapping': {'VISUAL': <NormalizationMode.IDENTITY: 'IDENTITY'>, 'STATE': <NormalizationMode.MEAN_STD: 'MEAN_STD'>, 'ACTION': <NormalizationMode.MEAN_STD: 'MEAN_STD'>}, 'input_features': {'observation.state': {'type': <FeatureType.STATE: 'STATE'>, 'shape': [6]}, 'observation.images.gripper': {'type': <FeatureType.VISUAL: 'VISUAL'>, 'shape': [3, 480, 640]}, 'observation.images.webcam': {'type': <FeatureType.VISUAL: 'VISUAL'>, 'shape': [3, 480, 640]}}, 'output_features': {'action': {'type': <FeatureType.ACTION: 'ACTION'>, 'shape': [6]}}, 'device': 'cuda', 'use_amp': False, 'chunk_size': 50, 'n_action_steps': 50, 'max_state_dim': 32, 'max_action_dim': 32, 'resize_imgs_with_padding': [224, 224], 'empty_cameras': 0, 'adapt_to_pi_aloha': False, 'use_delta_joint_actions_aloha': False, 'tokenizer_max_length': 48, 'proj_width': 1024, 'num_steps': 10, 'use_cache': True, 'attention_implementation': 'eager', 'freeze_vision_encoder': True, 'train_expert_only': False, 'train_state_proj': True, 'optimizer_lr': 2.5e-05, 'optimizer_betas': [0.9, 0.95], 'optimizer_eps': 1e-08, 'optimizer_weight_decay': 1e-10, 'scheduler_warmup_steps': 1000, 'scheduler_decay_steps': 30000, 'scheduler_decay_lr': 2.5e-06}, 'output_dir': '/scratch/train/pi_shity_version', 'job_name': 'pi0', 'resume': True, 'seed': 1000, 'num_workers': 4, 'batch_size': 20, 'steps': 100000, 'eval_freq': 100, 'log_freq': 200, 'save_checkpoint': True, 'save_freq': 100, 'use_policy_training_preset': True, 'optimizer': {'type': 'adamw', 'lr': 2.5e-05, 'weight_decay': 1e-10, 'grad_clip_norm': 10.0, 'betas': [0.9, 0.95], 'eps': 1e-08}, 'scheduler': {'type': 'cosine_decay_with_warmup', 'num_warmup_steps': 1000, 'num_decay_steps': 30000, 'peak_lr': 2.5e-05, 'decay_lr': 2.5e-06}, 'eval': {'n_episodes': 50, 'batch_size': 50, 'use_async_envs': False}, 'wandb': {'enable': True, 'disable_artifact': False, 'project': 'pi0_mistral_hackathon', 'entity': None, 'notes': None, 'run_id': 'shity_version', 'mode': None}, '_wandb': {}}
11
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_init.py:init():809] starting backend
12
+ 2025-04-12 22:01:36,516 INFO MainThread:27554 [wandb_init.py:init():813] sending inform_init request
13
+ 2025-04-12 22:01:36,519 INFO MainThread:27554 [backend.py:_multiprocessing_setup():101] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
14
+ 2025-04-12 22:01:36,519 INFO MainThread:27554 [wandb_init.py:init():823] backend started and connected
15
+ 2025-04-12 22:01:36,520 INFO MainThread:27554 [wandb_init.py:init():915] updated telemetry
16
+ 2025-04-12 22:01:36,524 INFO MainThread:27554 [wandb_init.py:init():939] communicating run to backend with 90.0 second timeout
17
+ 2025-04-12 22:01:37,193 INFO MainThread:27554 [wandb_init.py:init():1009] run resumed
18
+ 2025-04-12 22:01:37,195 INFO MainThread:27554 [wandb_init.py:init():1014] starting run threads in backend
19
+ 2025-04-12 22:01:37,293 INFO MainThread:27554 [wandb_run.py:_console_start():2454] atexit reg
20
+ 2025-04-12 22:01:37,294 INFO MainThread:27554 [wandb_run.py:_redirect():2306] redirect: wrap_raw
21
+ 2025-04-12 22:01:37,294 INFO MainThread:27554 [wandb_run.py:_redirect():2371] Wrapping output streams.
22
+ 2025-04-12 22:01:37,294 INFO MainThread:27554 [wandb_run.py:_redirect():2394] Redirects installed.
23
+ 2025-04-12 22:01:37,295 INFO MainThread:27554 [wandb_init.py:init():1056] run started, returning control to user process
wandb/run-20250412_213619-shity_version/files/config.yaml ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _wandb:
2
+ value:
3
+ cli_version: 0.19.9
4
+ m: []
5
+ python_version: 3.10.13
6
+ t:
7
+ "1":
8
+ - 1
9
+ - 41
10
+ - 49
11
+ - 51
12
+ - 55
13
+ "2":
14
+ - 1
15
+ - 11
16
+ - 41
17
+ - 49
18
+ - 51
19
+ - 55
20
+ "3":
21
+ - 13
22
+ - 14
23
+ - 15
24
+ - 16
25
+ - 23
26
+ - 55
27
+ - 61
28
+ "4": 3.10.13
29
+ "5": 0.19.9
30
+ "8":
31
+ - 5
32
+ "12": 0.19.9
33
+ "13": linux-x86_64
34
+ batch_size:
35
+ value: 20
36
+ dataset:
37
+ value:
38
+ episodes: null
39
+ image_transforms:
40
+ enable: false
41
+ max_num_transforms: 3
42
+ random_order: false
43
+ tfs:
44
+ brightness:
45
+ kwargs:
46
+ brightness:
47
+ - 0.8
48
+ - 1.2
49
+ type: ColorJitter
50
+ weight: 1
51
+ contrast:
52
+ kwargs:
53
+ contrast:
54
+ - 0.8
55
+ - 1.2
56
+ type: ColorJitter
57
+ weight: 1
58
+ hue:
59
+ kwargs:
60
+ hue:
61
+ - -0.05
62
+ - 0.05
63
+ type: ColorJitter
64
+ weight: 1
65
+ saturation:
66
+ kwargs:
67
+ saturation:
68
+ - 0.5
69
+ - 1.5
70
+ type: ColorJitter
71
+ weight: 1
72
+ sharpness:
73
+ kwargs:
74
+ sharpness:
75
+ - 0.5
76
+ - 1.5
77
+ type: SharpnessJitter
78
+ weight: 1
79
+ repo_id: maelic/hackathon7
80
+ revision: null
81
+ root: null
82
+ use_imagenet_stats: true
83
+ video_backend: torchcodec
84
+ env:
85
+ value: null
86
+ eval:
87
+ value:
88
+ batch_size: 50
89
+ n_episodes: 50
90
+ use_async_envs: false
91
+ eval_freq:
92
+ value: 100
93
+ job_name:
94
+ value: pi0
95
+ log_freq:
96
+ value: 200
97
+ num_workers:
98
+ value: 4
99
+ optimizer:
100
+ value:
101
+ betas:
102
+ - 0.9
103
+ - 0.95
104
+ eps: 1e-08
105
+ grad_clip_norm: 10
106
+ lr: 2.5e-05
107
+ type: adamw
108
+ weight_decay: 1e-10
109
+ output_dir:
110
+ value: /scratch/train/pi_shity_version
111
+ policy:
112
+ value:
113
+ adapt_to_pi_aloha: false
114
+ attention_implementation: eager
115
+ chunk_size: 50
116
+ device: cuda
117
+ empty_cameras: 0
118
+ freeze_vision_encoder: true
119
+ max_action_dim: 32
120
+ max_state_dim: 32
121
+ n_action_steps: 50
122
+ n_obs_steps: 1
123
+ normalization_mapping:
124
+ ACTION: MEAN_STD
125
+ STATE: MEAN_STD
126
+ VISUAL: IDENTITY
127
+ num_steps: 10
128
+ optimizer_betas:
129
+ - 0.9
130
+ - 0.95
131
+ optimizer_eps: 1e-08
132
+ optimizer_lr: 2.5e-05
133
+ optimizer_weight_decay: 1e-10
134
+ proj_width: 1024
135
+ resize_imgs_with_padding:
136
+ - 224
137
+ - 224
138
+ scheduler_decay_lr: 2.5e-06
139
+ scheduler_decay_steps: 30000
140
+ scheduler_warmup_steps: 1000
141
+ tokenizer_max_length: 48
142
+ train_expert_only: false
143
+ train_state_proj: true
144
+ type: pi0
145
+ use_amp: false
146
+ use_cache: true
147
+ use_delta_joint_actions_aloha: false
148
+ resume:
149
+ value: false
150
+ save_checkpoint:
151
+ value: true
152
+ save_freq:
153
+ value: 100
154
+ scheduler:
155
+ value:
156
+ decay_lr: 2.5e-06
157
+ num_decay_steps: 30000
158
+ num_warmup_steps: 1000
159
+ peak_lr: 2.5e-05
160
+ type: cosine_decay_with_warmup
161
+ seed:
162
+ value: 1000
163
+ steps:
164
+ value: 100000
165
+ use_policy_training_preset:
166
+ value: true
167
+ wandb:
168
+ value:
169
+ disable_artifact: false
170
+ enable: true
171
+ entity: null
172
+ mode: null
173
+ notes: null
174
+ project: pi0_mistral_hackathon
175
+ run_id: shity_version
wandb/run-20250412_213619-shity_version/files/output.log ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Logs will be synced with wandb.
2
+ INFO 2025-04-12 21:36:19 ndb_utils.py:96 Track this run --> https://wandb.ai/helper2424-3commas/pi0_mistral_hackathon/runs/shity_version
3
+ INFO 2025-04-12 21:36:19 ts/train.py:127 Creating dataset
4
+ Resolving data files: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 30/30 [00:00<00:00, 205603.14it/s]
5
+ INFO 2025-04-12 21:36:20 ts/train.py:138 Creating policy
6
+ INFO 2025-04-12 21:37:06 ts/train.py:144 Creating optimizer and scheduler
7
+ INFO 2025-04-12 21:37:06 ts/train.py:156 Output dir: /scratch/train/pi_shity_version
8
+ INFO 2025-04-12 21:37:06 ts/train.py:159 cfg.steps=100000 (100K)
9
+ INFO 2025-04-12 21:37:06 ts/train.py:160 dataset.num_frames=11312 (11K)
10
+ INFO 2025-04-12 21:37:06 ts/train.py:161 dataset.num_episodes=30
11
+ INFO 2025-04-12 21:37:06 ts/train.py:162 num_learnable_params=3088929824 (3B)
12
+ INFO 2025-04-12 21:37:06 ts/train.py:163 num_total_params=3501372212 (4B)
13
+ INFO 2025-04-12 21:37:06 ts/train.py:202 Start offline training on a fixed dataset
14
+ INFO 2025-04-12 21:38:23 ts/train.py:241 Checkpoint policy after step 100
15
+ INFO 2025-04-12 21:40:25 ts/train.py:232 step:200 smpl:4K ep:11 epch:0.35 loss:0.065 grdn:1.385 lr:2.5e-06 updt_s:0.749 data_s:0.007
16
+ WARNING 2025-04-12 21:40:25 db_utils.py:117 WandB logging of key "losses_after_forward" was ignored as its type is not handled by this wrapper.
17
+ WARNING 2025-04-12 21:40:25 db_utils.py:117 WandB logging of key "losses_after_in_ep_bound" was ignored as its type is not handled by this wrapper.
18
+ WARNING 2025-04-12 21:40:25 db_utils.py:117 WandB logging of key "losses_after_rm_padding" was ignored as its type is not handled by this wrapper.
19
+ INFO 2025-04-12 21:40:25 ts/train.py:241 Checkpoint policy after step 200
20
+ INFO 2025-04-12 21:42:23 ts/train.py:241 Checkpoint policy after step 300
21
+ INFO 2025-04-12 21:44:25 ts/train.py:232 step:400 smpl:8K ep:21 epch:0.71 loss:0.033 grdn:0.748 lr:7.5e-06 updt_s:0.746 data_s:0.000
22
+ WARNING 2025-04-12 21:44:25 db_utils.py:117 WandB logging of key "losses_after_forward" was ignored as its type is not handled by this wrapper.
23
+ WARNING 2025-04-12 21:44:25 db_utils.py:117 WandB logging of key "losses_after_in_ep_bound" was ignored as its type is not handled by this wrapper.
24
+ WARNING 2025-04-12 21:44:25 db_utils.py:117 WandB logging of key "losses_after_rm_padding" was ignored as its type is not handled by this wrapper.
25
+ INFO 2025-04-12 21:44:25 ts/train.py:241 Checkpoint policy after step 400
26
+ INFO 2025-04-12 21:46:23 ts/train.py:241 Checkpoint policy after step 500
27
+ INFO 2025-04-12 21:48:23 ts/train.py:232 step:600 smpl:12K ep:32 epch:1.06 loss:0.022 grdn:0.634 lr:1.3e-05 updt_s:0.746 data_s:0.005
28
+ WARNING 2025-04-12 21:48:23 db_utils.py:117 WandB logging of key "losses_after_forward" was ignored as its type is not handled by this wrapper.
29
+ WARNING 2025-04-12 21:48:23 db_utils.py:117 WandB logging of key "losses_after_in_ep_bound" was ignored as its type is not handled by this wrapper.
30
+ WARNING 2025-04-12 21:48:23 db_utils.py:117 WandB logging of key "losses_after_rm_padding" was ignored as its type is not handled by this wrapper.
31
+ INFO 2025-04-12 21:48:23 ts/train.py:241 Checkpoint policy after step 600
32
+ Traceback (most recent call last):
33
+ File "/root/lerobot/lerobot/scripts/train.py", line 288, in <module>
34
+ train()
35
+ File "/root/lerobot/lerobot/configs/parser.py", line 227, in wrapper_inner
36
+ response = fn(cfg, *args, **kwargs)
37
+ File "/root/lerobot/lerobot/scripts/train.py", line 243, in train
38
+ save_checkpoint(checkpoint_dir, step, cfg, policy, optimizer, lr_scheduler)
39
+ File "/root/lerobot/lerobot/common/utils/train_utils.py", line 102, in save_checkpoint
40
+ save_training_state(checkpoint_dir, step, optimizer, scheduler)
41
+ File "/root/lerobot/lerobot/common/utils/train_utils.py", line 127, in save_training_state
42
+ save_optimizer_state(optimizer, save_dir)
43
+ File "/root/lerobot/lerobot/common/optim/optimizers.py", line 101, in save_optimizer_state
44
+ save_file(flat_state, save_dir / OPTIMIZER_STATE)
45
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 286, in save_file
46
+ serialize_file(_flatten(tensors), filename, metadata=metadata)
47
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 496, in _flatten
48
+ return {
49
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 500, in <dictcomp>
50
+ "data": _tobytes(v, k),
51
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 422, in _tobytes
52
+ tensor = tensor.to("cpu")
53
+ KeyboardInterrupt
54
+ Traceback (most recent call last):
55
+ File "/root/lerobot/lerobot/scripts/train.py", line 288, in <module>
56
+ train()
57
+ File "/root/lerobot/lerobot/configs/parser.py", line 227, in wrapper_inner
58
+ response = fn(cfg, *args, **kwargs)
59
+ File "/root/lerobot/lerobot/scripts/train.py", line 243, in train
60
+ save_checkpoint(checkpoint_dir, step, cfg, policy, optimizer, lr_scheduler)
61
+ File "/root/lerobot/lerobot/common/utils/train_utils.py", line 102, in save_checkpoint
62
+ save_training_state(checkpoint_dir, step, optimizer, scheduler)
63
+ File "/root/lerobot/lerobot/common/utils/train_utils.py", line 127, in save_training_state
64
+ save_optimizer_state(optimizer, save_dir)
65
+ File "/root/lerobot/lerobot/common/optim/optimizers.py", line 101, in save_optimizer_state
66
+ save_file(flat_state, save_dir / OPTIMIZER_STATE)
67
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 286, in save_file
68
+ serialize_file(_flatten(tensors), filename, metadata=metadata)
69
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 496, in _flatten
70
+ return {
71
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 500, in <dictcomp>
72
+ "data": _tobytes(v, k),
73
+ File "/root/miniconda3/envs/lerobot/lib/python3.10/site-packages/safetensors/torch.py", line 422, in _tobytes
74
+ tensor = tensor.to("cpu")
75
+ KeyboardInterrupt
wandb/run-20250412_213619-shity_version/files/requirements.txt ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ tokenizers==0.21.1
2
+ frozenlist==1.5.0
3
+ numcodecs==0.13.1
4
+ terminado==0.17.1
5
+ Farama-Notifications==0.0.4
6
+ labmaze==1.0.6
7
+ Flask==3.1.0
8
+ nvidia-cusparse-cu12==12.3.1.170
9
+ jsonschema==4.23.0
10
+ pycparser==2.21
11
+ pycparser==2.22
12
+ argon2-cffi==21.3.0
13
+ multidict==6.4.3
14
+ safetensors==0.5.3
15
+ setuptools==75.8.0
16
+ multiprocess==0.70.16
17
+ urllib3==2.4.0
18
+ urllib3==2.3.0
19
+ gdown==5.2.0
20
+ pytz==2025.2
21
+ beautifulsoup4==4.12.3
22
+ beautifulsoup4==4.13.3
23
+ importlib_metadata==8.6.1
24
+ babel==2.16.0
25
+ cmake==4.0.0
26
+ pyyaml-include==1.4.1
27
+ asciitree==0.3.3
28
+ h5py==3.13.0
29
+ antlr4-python3-runtime==4.9.3
30
+ tzdata==2025.2
31
+ jupyterlab==4.3.4
32
+ datasets==3.5.0
33
+ mergedeep==1.3.4
34
+ nvidia-cusparselt-cu12==0.6.2
35
+ comm==0.2.1
36
+ blinker==1.9.0
37
+ referencing==0.30.2
38
+ nvidia-cuda-cupti-cu12==12.4.127
39
+ nbconvert==7.16.6
40
+ iniconfig==2.1.0
41
+ click==8.1.8
42
+ nest-asyncio==1.6.0
43
+ rerun-sdk==0.22.1
44
+ wcwidth==0.2.5
45
+ wcwidth==0.2.13
46
+ wandb==0.19.9
47
+ argon2-cffi-bindings==21.2.0
48
+ nvidia-cusolver-cu12==11.6.1.9
49
+ ptyprocess==0.7.0
50
+ triton==3.2.0
51
+ parso==0.8.4
52
+ cffi==1.17.1
53
+ executing==0.8.3
54
+ soupsieve==2.6
55
+ soupsieve==2.5
56
+ lerobot==0.1.0
57
+ nvidia-cuda-runtime-cu12==12.4.127
58
+ python-dateutil==2.9.0.post0
59
+ pure-eval==0.2.2
60
+ aiosignal==1.3.2
61
+ hf_transfer==0.1.9
62
+ pandas==2.2.3
63
+ dm-env==1.6
64
+ pygame==2.6.1
65
+ opencv-python-headless==4.11.0.86
66
+ PyOpenGL==3.1.9
67
+ matplotlib-inline==0.1.6
68
+ requests==2.32.3
69
+ propcache==0.3.1
70
+ json5==0.9.25
71
+ Pygments==2.15.1
72
+ zipp==3.21.0
73
+ bleach==6.2.0
74
+ prompt-toolkit==3.0.43
75
+ prompt_toolkit==3.0.50
76
+ scipy==1.15.2
77
+ gitdb==4.0.12
78
+ notebook_shim==0.2.4
79
+ zarr==2.18.3
80
+ pyparsing==3.2.3
81
+ pydantic==2.11.3
82
+ tqdm==4.67.1
83
+ nvidia-cufft-cu12==11.2.1.3
84
+ sniffio==1.3.0
85
+ diffusers==0.33.1
86
+ typing-inspection==0.4.0
87
+ jupyter_client==8.6.3
88
+ gym-aloha==0.1.1
89
+ Jinja2==3.1.6
90
+ exceptiongroup==1.2.0
91
+ stack-data==0.2.0
92
+ TorchCodec==0.2.1
93
+ numba==0.61.2
94
+ smmap==5.0.2
95
+ opencv-python==4.11.0.86
96
+ debugpy==1.8.11
97
+ tifffile==2025.3.30
98
+ setproctitle==1.3.5
99
+ imageio-ffmpeg==0.6.0
100
+ overrides==7.4.0
101
+ fasteners==0.19
102
+ fsspec==2024.12.0
103
+ jedi==0.19.2
104
+ regex==2024.11.6
105
+ jupyterlab_pygments==0.3.0
106
+ jupyter_server==2.15.0
107
+ anyio==4.6.2
108
+ scikit-image==0.25.2
109
+ pluggy==1.5.0
110
+ xxhash==3.5.0
111
+ lxml==5.3.2
112
+ async-lru==2.0.4
113
+ PyYAML==6.0.2
114
+ tomli==2.0.1
115
+ platformdirs==4.3.7
116
+ nvidia-cuda-nvrtc-cu12==12.4.127
117
+ psutil==7.0.0
118
+ psutil==5.9.0
119
+ mypy-extensions==1.0.0
120
+ idna==3.7
121
+ idna==3.10
122
+ ipykernel==6.29.5
123
+ nbformat==5.10.4
124
+ charset-normalizer==3.3.2
125
+ charset-normalizer==3.4.1
126
+ pynput==1.8.1
127
+ jupyterlab_server==2.27.3
128
+ nvidia-nvtx-cu12==12.4.127
129
+ sentry-sdk==2.25.1
130
+ pyserial==3.5
131
+ nvidia-curand-cu12==10.3.5.147
132
+ attrs==25.3.0
133
+ attrs==24.3.0
134
+ GitPython==3.1.44
135
+ rfc3986-validator==0.1.1
136
+ jsonlines==4.0.0
137
+ omegaconf==2.3.0
138
+ typing_extensions==4.13.2
139
+ typing_extensions==4.12.2
140
+ cloudpickle==3.1.1
141
+ tinycss2==1.4.0
142
+ av==14.3.0
143
+ jupyter-events==0.12.0
144
+ imageio==2.37.0
145
+ torchvision==0.21.0
146
+ Brotli==1.0.9
147
+ nvidia-cudnn-cu12==9.1.0.70
148
+ traitlets==5.14.3
149
+ huggingface-hub==0.30.2
150
+ pydantic_core==2.33.1
151
+ pyarrow==19.0.1
152
+ jupyter_core==5.7.2
153
+ deepdiff==8.4.2
154
+ python-json-logger==3.2.1
155
+ orderly-set==5.4.0
156
+ pip==25.0
157
+ jsonschema-specifications==2023.7.1
158
+ ipython==8.30.0
159
+ Send2Trash==1.8.2
160
+ wheel==0.45.1
161
+ prometheus_client==0.21.1
162
+ absl-py==2.2.2
163
+ mpmath==1.3.0
164
+ torch==2.6.0
165
+ dill==0.3.8
166
+ termcolor==3.0.1
167
+ gymnasium==0.29.1
168
+ wrapt==1.17.2
169
+ pandocfilters==1.5.0
170
+ pillow==11.2.1
171
+ h11==0.14.0
172
+ gym-pusht==0.1.5
173
+ nvidia-nvjitlink-cu12==12.4.127
174
+ pymunk==6.11.1
175
+ defusedxml==0.7.1
176
+ nbclient==0.10.2
177
+ fastjsonschema==2.20.0
178
+ jupyter_server_terminals==0.4.4
179
+ pyzmq==26.4.0
180
+ nvidia-cublas-cu12==12.4.5.8
181
+ networkx==3.4.2
182
+ numpy==2.2.4
183
+ toml==0.10.2
184
+ filelock==3.18.0
185
+ annotated-types==0.7.0
186
+ PySocks==1.7.1
187
+ draccus==0.10.0
188
+ tornado==6.4.2
189
+ mistune==3.1.2
190
+ httpcore==1.0.2
191
+ yarl==1.19.0
192
+ mujoco==2.3.7
193
+ aiohttp==3.11.16
194
+ lazy_loader==0.4
195
+ itsdangerous==2.2.0
196
+ shapely==2.1.0
197
+ protobuf==5.29.4
198
+ httpx==0.27.0
199
+ dm-control==1.0.14
200
+ rpds-py==0.22.3
201
+ aiohappyeyeballs==2.6.1
202
+ Werkzeug==3.1.3
203
+ typing-inspect==0.9.0
204
+ decorator==5.1.1
205
+ webencodings==0.5.1
206
+ nvidia-nccl-cu12==2.21.5
207
+ asttokens==3.0.0
208
+ certifi==2025.1.31
209
+ sympy==1.13.1
210
+ dm-tree==0.1.9
211
+ einops==0.8.1
212
+ pexpect==4.8.0
213
+ python-xlib==0.33
214
+ pfzy==0.3.4
215
+ inquirerpy==0.3.4
216
+ MarkupSafe==3.0.2
217
+ packaging==24.2
218
+ transformers==4.51.2
219
+ websocket-client==1.8.0
220
+ docker-pycreds==0.4.0
221
+ async-timeout==5.0.1
222
+ rfc3339-validator==0.1.4
223
+ jupyter-lsp==2.2.0
224
+ six==1.17.0
225
+ llvmlite==0.44.0
226
+ pytest==8.3.5
227
+ evdev==1.9.1
228
+ feetech-servo-sdk==1.0.0
229
+ glfw==2.8.0
230
+ backports.tarfile==1.2.0
231
+ inflect==7.3.1
232
+ zipp==3.19.2
233
+ jaraco.text==3.12.1
234
+ autocommand==2.2.2
235
+ typeguard==4.3.0
236
+ wheel==0.43.0
237
+ jaraco.collections==5.1.0
238
+ tomli==2.0.1
239
+ jaraco.context==5.3.0
240
+ platformdirs==4.2.2
241
+ more-itertools==10.3.0
242
+ importlib_metadata==8.0.0
243
+ jaraco.functools==4.0.1
244
+ typing_extensions==4.12.2
245
+ packaging==24.2