Training in progress, epoch 1
Browse files
adapter_config.json
CHANGED
@@ -23,16 +23,16 @@
|
|
23 |
"rank_pattern": {},
|
24 |
"revision": null,
|
25 |
"target_modules": [
|
26 |
-
"fc1",
|
27 |
-
"v_proj",
|
28 |
-
"down_proj",
|
29 |
"k_proj",
|
30 |
-
"
|
|
|
31 |
"up_proj",
|
32 |
"out_proj",
|
|
|
|
|
|
|
|
|
33 |
"lm_head",
|
34 |
-
"o_proj",
|
35 |
-
"gate_proj",
|
36 |
"fc2"
|
37 |
],
|
38 |
"task_type": "CAUSAL_LM",
|
|
|
23 |
"rank_pattern": {},
|
24 |
"revision": null,
|
25 |
"target_modules": [
|
|
|
|
|
|
|
26 |
"k_proj",
|
27 |
+
"gate_proj",
|
28 |
+
"o_proj",
|
29 |
"up_proj",
|
30 |
"out_proj",
|
31 |
+
"fc1",
|
32 |
+
"q_proj",
|
33 |
+
"v_proj",
|
34 |
+
"down_proj",
|
35 |
"lm_head",
|
|
|
|
|
36 |
"fc2"
|
37 |
],
|
38 |
"task_type": "CAUSAL_LM",
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2839124552
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3d7beff11cfb171af1c51593a811829b6d5b8bd0b5b4b0b11efd092e50933348
|
3 |
size 2839124552
|
runs/Jun05_09-15-09_gandalf/events.out.tfevents.1749129314.gandalf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b9a9ee31659a34633f560c36e8369d69cc67a08988537af6b9c5fd372b43e527
|
3 |
+
size 31127
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5688
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12425648e22dcc58fc7703edb39adaf9b90bdff3034a84861f993b90ac7741c4
|
3 |
size 5688
|