Upload folder using huggingface_hub
Browse files- .gitattributes +4 -0
- config.json +10 -3
- onnx/model.onnx +3 -0
- onnx/model.onnx_data +3 -0
- onnx/model_fp16.onnx +3 -0
- onnx/model_fp16.onnx_data +3 -0
- onnx/model_q4.onnx +3 -0
- onnx/model_q4.onnx_data +3 -0
- onnx/model_q4f16.onnx +3 -0
- onnx/model_q4f16.onnx_data +3 -0
- tokenizer.json +0 -0
- tokenizer_config.json +3 -2
.gitattributes
CHANGED
@@ -33,3 +33,7 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
onnx/model.onnx_data filter=lfs diff=lfs merge=lfs -text
|
37 |
+
onnx/model_fp16.onnx_data filter=lfs diff=lfs merge=lfs -text
|
38 |
+
onnx/model_q4.onnx_data filter=lfs diff=lfs merge=lfs -text
|
39 |
+
onnx/model_q4f16.onnx_data filter=lfs diff=lfs merge=lfs -text
|
config.json
CHANGED
@@ -21,7 +21,7 @@
|
|
21 |
"eos_token_id": 7,
|
22 |
"hidden_size": 32,
|
23 |
"initializer_range": 0.02,
|
24 |
-
"intermediate_size":
|
25 |
"layer_types": [
|
26 |
"full_attention",
|
27 |
"conv"
|
@@ -39,5 +39,12 @@
|
|
39 |
"transformers_version": "4.54.0.dev0",
|
40 |
"use_cache": true,
|
41 |
"use_pos_enc": true,
|
42 |
-
"vocab_size": 65536
|
43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
"eos_token_id": 7,
|
22 |
"hidden_size": 32,
|
23 |
"initializer_range": 0.02,
|
24 |
+
"intermediate_size": 256,
|
25 |
"layer_types": [
|
26 |
"full_attention",
|
27 |
"conv"
|
|
|
39 |
"transformers_version": "4.54.0.dev0",
|
40 |
"use_cache": true,
|
41 |
"use_pos_enc": true,
|
42 |
+
"vocab_size": 65536,
|
43 |
+
"transformers.js_config": {
|
44 |
+
"kv_cache_dtype": {
|
45 |
+
"q4f16": "float16",
|
46 |
+
"fp16": "float16"
|
47 |
+
},
|
48 |
+
"use_external_data_format": true
|
49 |
+
}
|
50 |
+
}
|
onnx/model.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3153b565008d69db4c17d69c7085c1c201cc3d035b4e5a11876bf9e105538274
|
3 |
+
size 22935
|
onnx/model.onnx_data
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6fc57fe8512eb2d33e79bfa15272921a99db97a05487d00ea0ae8a235f629bff
|
3 |
+
size 12845056
|
onnx/model_fp16.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:000f9bb76f06d9dd59fd6c70cc6ade5dacf954f52fe5709cd2683e536c98b947
|
3 |
+
size 23198
|
onnx/model_fp16.onnx_data
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c347a0906feb282a182a3ca61f6a045d43975f98b54b72582a1fbd1002898336
|
3 |
+
size 6356992
|
onnx/model_q4.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8586910623ac41844751cd63b5f3a1c18539545857949f721a3bc3a7564a1e05
|
3 |
+
size 27175
|
onnx/model_q4.onnx_data
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b49e6af9a2ec163175946388477f2b3c131be92ee6c82967c083e30252aec280
|
3 |
+
size 12648448
|
onnx/model_q4f16.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc4e1f11cf46a472b558817d63777d670482ef334b3e8fe9dfba274eb81eca0c
|
3 |
+
size 27419
|
onnx/model_q4f16.onnx_data
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:11b3d5608904a3e4886b31cda6529efa0a4d3681182489cb22b6ad2cc40dee70
|
3 |
+
size 6291456
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -4070,5 +4070,6 @@
|
|
4070 |
"spaces_between_special_tokens": false,
|
4071 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
4072 |
"use_default_system_prompt": false,
|
4073 |
-
"use_fast": true
|
4074 |
-
}
|
|
|
|
4070 |
"spaces_between_special_tokens": false,
|
4071 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
4072 |
"use_default_system_prompt": false,
|
4073 |
+
"use_fast": true,
|
4074 |
+
"chat_template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"
|
4075 |
+
}
|