easyminnn commited on
Commit
4a1c884
·
verified ·
1 Parent(s): 703951d

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "action_dim": 32,
3
+ "action_head_cfg": {
4
+ "action_dim": 32,
5
+ "action_horizon": 16,
6
+ "add_pos_embed": true,
7
+ "backbone_embedding_dim": 2048,
8
+ "diffusion_model_cfg": {
9
+ "attention_head_dim": 48,
10
+ "cross_attention_dim": 2048,
11
+ "dropout": 0.2,
12
+ "final_dropout": true,
13
+ "interleave_self_attention": true,
14
+ "norm_type": "ada_norm",
15
+ "num_attention_heads": 32,
16
+ "num_layers": 16,
17
+ "output_dim": 1024,
18
+ "positional_embeddings": null
19
+ },
20
+ "hidden_size": 1024,
21
+ "input_embedding_dim": 1536,
22
+ "max_action_dim": 32,
23
+ "max_state_dim": 64,
24
+ "model_dtype": "float32",
25
+ "noise_beta_alpha": 1.5,
26
+ "noise_beta_beta": 1.0,
27
+ "noise_s": 0.999,
28
+ "num_inference_timesteps": 4,
29
+ "num_target_vision_tokens": 32,
30
+ "num_timestep_buckets": 1000,
31
+ "tune_diffusion_model": true,
32
+ "tune_projector": true,
33
+ "use_vlln": true,
34
+ "vl_self_attention_cfg": {
35
+ "attention_head_dim": 64,
36
+ "dropout": 0.2,
37
+ "final_dropout": true,
38
+ "num_attention_heads": 32,
39
+ "num_layers": 4,
40
+ "positional_embeddings": null
41
+ }
42
+ },
43
+ "action_horizon": 16,
44
+ "architectures": [
45
+ "GR00T_N1_5_Tactile"
46
+ ],
47
+ "attn_implementation": null,
48
+ "backbone_cfg": {
49
+ "eagle_path": "NVEagle/eagle_er-qwen3_1_7B-Siglip2_400M_stage1_5_128gpu_er_v7_1mlp_nops",
50
+ "load_bf16": false,
51
+ "project_to_dim": null,
52
+ "reproject_vision": false,
53
+ "select_layer": 12,
54
+ "tune_llm": false,
55
+ "tune_visual": true,
56
+ "use_flash_attention": true
57
+ },
58
+ "compute_dtype": "bfloat16",
59
+ "hidden_size": 2048,
60
+ "model_dtype": "float32",
61
+ "model_type": "gr00t_n1_5",
62
+ "torch_dtype": "bfloat16",
63
+ "transformers_version": "4.51.3"
64
+ }
experiment_cfg/metadata.json ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "new_embodiment": {
3
+ "statistics": {
4
+ "state": {
5
+ "joint_pos_abs": {
6
+ "max": [
7
+ 0.8468377590179443,
8
+ 0.7539956569671631,
9
+ 0.5745469927787781,
10
+ -1.7240889072418213,
11
+ 0.9919779300689697,
12
+ 3.4747636318206787,
13
+ 0.9461642503738403
14
+ ],
15
+ "min": [
16
+ -0.5892641544342041,
17
+ -0.9039609432220459,
18
+ -0.5897979140281677,
19
+ -2.797158718109131,
20
+ -0.6585066318511963,
21
+ 1.902068853378296,
22
+ -0.8625808358192444
23
+ ],
24
+ "mean": [
25
+ 0.07377757132053375,
26
+ 0.18003661930561066,
27
+ 0.0016148989088833332,
28
+ -2.5599350929260254,
29
+ -0.0436888225376606,
30
+ 2.6136817932128906,
31
+ 0.1067916601896286
32
+ ],
33
+ "std": [
34
+ 0.11827199906110764,
35
+ 0.2992362380027771,
36
+ 0.1049984022974968,
37
+ 0.13403171300888062,
38
+ 0.17177742719650269,
39
+ 0.2794305682182312,
40
+ 0.17959704995155334
41
+ ],
42
+ "q01": [
43
+ -0.1457512293756008,
44
+ -0.5576333731412888,
45
+ -0.27967362582683564,
46
+ -2.774756534099579,
47
+ -0.4874495455622673,
48
+ 1.947138786315918,
49
+ -0.40577536284923554
50
+ ],
51
+ "q99": [
52
+ 0.4589115133881569,
53
+ 0.5267531245946884,
54
+ 0.2608844107389447,
55
+ -2.201024322509766,
56
+ 0.45143646210432054,
57
+ 3.1559039568901053,
58
+ 0.5651241511106466
59
+ ]
60
+ },
61
+ "gripper_close": {
62
+ "max": [
63
+ 0.7621145248413086
64
+ ],
65
+ "min": [
66
+ 0.0
67
+ ],
68
+ "mean": [
69
+ 0.1114826574921608
70
+ ],
71
+ "std": [
72
+ 0.11908694356679916
73
+ ],
74
+ "q01": [
75
+ 0.0
76
+ ],
77
+ "q99": [
78
+ 0.2819383442401886
79
+ ]
80
+ }
81
+ },
82
+ "action": {
83
+ "joint_pos_abs": {
84
+ "max": [
85
+ 0.8729159235954285,
86
+ 0.7510752081871033,
87
+ 0.6012352705001831,
88
+ -1.6904202699661255,
89
+ 1.0410722494125366,
90
+ 3.4794368743896484,
91
+ 0.9732310175895691
92
+ ],
93
+ "min": [
94
+ -0.6261364817619324,
95
+ -0.9543799161911011,
96
+ -0.634001612663269,
97
+ -2.846489429473877,
98
+ -0.6983137726783752,
99
+ 1.8832087516784668,
100
+ -0.8915459513664246
101
+ ],
102
+ "mean": [
103
+ 0.07321646809577942,
104
+ 0.16576333343982697,
105
+ 0.003148175310343504,
106
+ -2.553807258605957,
107
+ -0.04427529498934746,
108
+ 2.610917329788208,
109
+ 0.10387522727251053
110
+ ],
111
+ "std": [
112
+ 0.12389503419399261,
113
+ 0.290284663438797,
114
+ 0.10854695737361908,
115
+ 0.1324664056301117,
116
+ 0.1771559715270996,
117
+ 0.2768799364566803,
118
+ 0.18244117498397827
119
+ ],
120
+ "q01": [
121
+ -0.16883401975035667,
122
+ -0.5613366097211838,
123
+ -0.2914287367463112,
124
+ -2.7689431953430175,
125
+ -0.48490726947784424,
126
+ 1.9312198102474212,
127
+ -0.41215672731399533
128
+ ],
129
+ "q99": [
130
+ 0.467352289557456,
131
+ 0.5477241206169126,
132
+ 0.2789128881692882,
133
+ -2.1965090298652665,
134
+ 0.4599714136123652,
135
+ 3.148701553344726,
136
+ 0.5720321047306058
137
+ ]
138
+ },
139
+ "gripper_close": {
140
+ "max": [
141
+ 1.0
142
+ ],
143
+ "min": [
144
+ 0.0
145
+ ],
146
+ "mean": [
147
+ 0.2008967101573944
148
+ ],
149
+ "std": [
150
+ 0.22869807481765747
151
+ ],
152
+ "q01": [
153
+ 0.0
154
+ ],
155
+ "q99": [
156
+ 0.5319383144378662
157
+ ]
158
+ }
159
+ }
160
+ },
161
+ "modalities": {
162
+ "video": {
163
+ "exterior_image_1_left": {
164
+ "resolution": [
165
+ 1280,
166
+ 720
167
+ ],
168
+ "channels": 3,
169
+ "fps": 10.0
170
+ },
171
+ "wrist_image_left": {
172
+ "resolution": [
173
+ 1280,
174
+ 720
175
+ ],
176
+ "channels": 3,
177
+ "fps": 10.0
178
+ }
179
+ },
180
+ "state": {
181
+ "joint_pos_abs": {
182
+ "absolute": true,
183
+ "rotation_type": null,
184
+ "shape": [
185
+ 7
186
+ ],
187
+ "continuous": true
188
+ },
189
+ "gripper_close": {
190
+ "absolute": true,
191
+ "rotation_type": null,
192
+ "shape": [
193
+ 1
194
+ ],
195
+ "continuous": true
196
+ }
197
+ },
198
+ "action": {
199
+ "joint_pos_abs": {
200
+ "absolute": true,
201
+ "rotation_type": null,
202
+ "shape": [
203
+ 7
204
+ ],
205
+ "continuous": true
206
+ },
207
+ "gripper_close": {
208
+ "absolute": true,
209
+ "rotation_type": null,
210
+ "shape": [
211
+ 1
212
+ ],
213
+ "continuous": true
214
+ }
215
+ }
216
+ },
217
+ "embodiment_tag": "new_embodiment"
218
+ }
219
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ee28e372804f43320c4c2a49a6241a04f60bf0346cae54ab10c99c044831b0d
3
+ size 4999367032
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa66a88755f8007016a57c1eb48aab77f5d17414ff315287c97a3c53e8e199f2
3
+ size 2792292200
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0afb7bfa8685d609263499ad2661ec598ea00a434a6bff18bfafb98587837f79
3
+ size 8961896450
rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d39821f95a153a96ecc065ea854c2ce5ba96c50814691ee4d94c588c93485a32
3
+ size 14512
rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:582076d5b78ffa4248f51f85c0030c3076d35335bceffeff5149e4862b6ba06c
3
+ size 14512
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb677b69d5815e7e1f5f341ea75c4bd0f7789a7a85a36e672ade3f744edffa5f
3
+ size 1064
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff