Add files using upload-large-folder tool
Browse files- .gitattributes +1 -0
- checkpoints/Qwen2.5-14B/babylm_hop_control_10M_seed0/runs/checkpoint-1682/model-00002-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_hop_control_10M_seed0/runs/checkpoint-1682/model-00004-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_hop_control_10M_seed0/runs/checkpoint-1682/model-00006-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_reverse_control_10M_seed0/runs/checkpoint-2072/model-00006-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-2072/model-00002-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-2072/model-00004-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-500/model-00005-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-500/model-00006-of-00006.safetensors +3 -0
- checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/rng_state_0.pth +3 -0
- checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/rng_state_1.pth +3 -0
- checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/scheduler.pt +3 -0
- checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/tokenizer.json +3 -0
- checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/training_args.bin +3 -0
.gitattributes
CHANGED
@@ -109,3 +109,4 @@ checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/artifacts/model
|
|
109 |
checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/artifacts/models--Qwen--Qwen2.5-14B/blobs/e015e2bc9a26b4e46d77913d8c667608ae7e48aa1eca04af5786c2408f4bc0fa filter=lfs diff=lfs merge=lfs -text
|
110 |
checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/runs/checkpoint-1934/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
111 |
checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/runs/checkpoint-500/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
|
|
|
109 |
checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/artifacts/models--Qwen--Qwen2.5-14B/blobs/e015e2bc9a26b4e46d77913d8c667608ae7e48aa1eca04af5786c2408f4bc0fa filter=lfs diff=lfs merge=lfs -text
|
110 |
checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/runs/checkpoint-1934/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
111 |
checkpoints/Qwen2.5-14B/babylm_shuffle_deterministic84_10M_seed0/runs/checkpoint-500/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
112 |
+
checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
checkpoints/Qwen2.5-14B/babylm_hop_control_10M_seed0/runs/checkpoint-1682/model-00002-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:930c57804442b163c8b620f9b31bcf6bfe840ddfff88a063770ab3f843765d4e
|
3 |
+
size 4954847240
|
checkpoints/Qwen2.5-14B/babylm_hop_control_10M_seed0/runs/checkpoint-1682/model-00004-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f6e0402115f0d8d0c7a091dda9fc006e125e2f4928bb5c6ec52e44372630fcd6
|
3 |
+
size 4954847280
|
checkpoints/Qwen2.5-14B/babylm_hop_control_10M_seed0/runs/checkpoint-1682/model-00006-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ddaf8ad2cbda2d8f6730b2f78b3341f55d33c4e6dafb23c9fabe5b38e67c33b
|
3 |
+
size 4734533096
|
checkpoints/Qwen2.5-14B/babylm_reverse_control_10M_seed0/runs/checkpoint-2072/model-00006-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a130cf5563321da98e9731ad1ebc27336fc6cc55ba0cfb8e312c3ca9c95440a3
|
3 |
+
size 4734533096
|
checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-2072/model-00002-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82ec48d89f26fd3fff9a61c6c537de0628511bbd5e468cfce0638e2d0a1a00b2
|
3 |
+
size 4954847240
|
checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-2072/model-00004-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:270b79b3a0a9194c73d14197af94e8387baf532b3dc2114432b4318f104854fe
|
3 |
+
size 4954847280
|
checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-500/model-00005-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1c2808cea829939b2f5b485eed07d1127b9f672028ddb1516c1de12da8493c58
|
3 |
+
size 4954847280
|
checkpoints/Qwen2.5-14B/babylm_reverse_partial_10M_seed0/runs/checkpoint-500/model-00006-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5550d5419abaf1bab9b25250b7e4f1faf27d8fdaa242cca3c8ef6a24961a8b38
|
3 |
+
size 4734533096
|
checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/rng_state_0.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a4586e2577df5c7df99f7f22a9eb9c344aa93bfea80555b0225f947467a9d86
|
3 |
+
size 14448
|
checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/rng_state_1.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1624ef9770b8f455f1fa79fd5fa6ffca4c49ba7f89fc032dc21b0a014579023b
|
3 |
+
size 14448
|
checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f6386f45ee4d0a26e2e48aff4efd621bf67fd10de0e30f2fdf9b1c0e4306aeea
|
3 |
+
size 1064
|
checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ecd83fc7e66c5df56d1de2cf94904a61d23ed07d056f5f3fb227cdd17f187772
|
3 |
+
size 11422536
|
checkpoints/Qwen2.5-7B/babylm_hop_words4_10M_seed0/runs/checkpoint-1122/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4fd2c3e58ff6ea91bdf3a1c1c09aea30d650086b26e5b6839eec99c6e3606359
|
3 |
+
size 6520
|