Unnamed: 0
int64
0
245k
repo_id
stringlengths
4
122
author
stringlengths
2
42
model_type
stringlengths
2
34
files_per_repo
int64
0
77k
downloads_30d
int64
0
55.9M
library
stringlengths
2
37
likes
int64
0
8.48k
pipeline
stringlengths
5
30
pytorch
bool
2 classes
tensorflow
bool
2 classes
jax
bool
2 classes
license
stringlengths
2
33
languages
stringlengths
2
1.63k
datasets
stringlengths
2
5.05k
co2
stringlengths
3
342
prs_count
int64
0
168
prs_open
int64
0
121
prs_merged
int64
0
167
prs_closed
int64
0
35
discussions_count
int64
0
226
discussions_open
int64
0
155
discussions_closed
int64
0
76
tags
stringlengths
2
7.26k
has_model_index
bool
2 classes
has_metadata
bool
2 classes
has_text
bool
2 classes
text_length
int64
0
849k
4,300
HooshvareLab/albert-fa-zwnj-base-v2
HooshvareLab
albert
11
370
transformers
2
fill-mask
true
true
false
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'albert', 'fill-mask', 'fa', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
698
4,301
HooshvareLab/bert-base-parsbert-armanner-uncased
HooshvareLab
bert
13
876
transformers
2
token-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'token-classification', 'fa', 'arxiv:2005.12515', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
4,673
4,302
HooshvareLab/bert-base-parsbert-ner-uncased
HooshvareLab
bert
13
237,275
transformers
3
token-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'token-classification', 'fa', 'arxiv:2005.12515', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
5,521
4,303
HooshvareLab/bert-base-parsbert-peymaner-uncased
HooshvareLab
bert
13
91
transformers
0
token-classification
true
true
true
apache-2.0
['fa']
null
null
0
0
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'token-classification', 'fa', 'arxiv:2005.12515', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
4,776
4,304
HooshvareLab/bert-base-parsbert-uncased
HooshvareLab
bert
7
40,489
transformers
12
fill-mask
true
true
true
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'arxiv:2005.12515', 'transformers', 'autotrain_compatible', 'has_space']
false
false
true
5,818
4,305
HooshvareLab/bert-fa-base-uncased-clf-digimag
HooshvareLab
bert
12
46
transformers
0
text-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'text-classification', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
2,687
4,306
HooshvareLab/bert-fa-base-uncased-clf-persiannews
HooshvareLab
bert
12
661
transformers
3
text-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'text-classification', 'fa', 'transformers', 'license:apache-2.0', 'has_space']
false
true
true
2,726
4,307
HooshvareLab/bert-fa-base-uncased-ner-arman
HooshvareLab
bert
12
45
transformers
0
token-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'token-classification', 'fa', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
3,073
4,308
HooshvareLab/bert-fa-base-uncased-ner-peyma
HooshvareLab
bert
12
134
transformers
2
token-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'token-classification', 'fa', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
3,176
4,309
HooshvareLab/bert-fa-base-uncased-sentiment-deepsentipers-binary
HooshvareLab
bert
12
805
transformers
3
text-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'text-classification', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
3,226
4,310
HooshvareLab/bert-fa-base-uncased-sentiment-deepsentipers-multi
HooshvareLab
bert
12
154
transformers
0
text-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'text-classification', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
3,227
4,311
HooshvareLab/bert-fa-base-uncased-sentiment-digikala
HooshvareLab
bert
12
224
transformers
1
text-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'text-classification', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
2,633
4,312
HooshvareLab/bert-fa-base-uncased-sentiment-snappfood
HooshvareLab
bert
12
268
transformers
1
text-classification
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'text-classification', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
2,609
4,313
HooshvareLab/bert-fa-base-uncased
HooshvareLab
bert
7
7,964
transformers
4
fill-mask
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'fa', 'arxiv:2005.12515', 'transformers', 'bert-fa', 'bert-persian', 'persian-lm', 'license:apache-2.0', 'autotrain_compatible', 'has_space']
false
true
true
6,869
4,314
HooshvareLab/bert-fa-zwnj-base-ner
HooshvareLab
bert
10
80
transformers
3
token-classification
true
true
true
null
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'token-classification', 'fa', 'transformers', 'autotrain_compatible']
false
true
true
3,629
4,315
HooshvareLab/bert-fa-zwnj-base
HooshvareLab
bert
10
3,564
transformers
4
fill-mask
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'bert', 'fill-mask', 'fa', 'arxiv:2005.12515', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
1,083
4,316
HooshvareLab/distilbert-fa-zwnj-base-ner
HooshvareLab
distilbert
9
414,934
transformers
4
token-classification
true
true
false
null
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'distilbert', 'token-classification', 'fa', 'transformers', 'autotrain_compatible']
false
true
true
3,641
4,317
HooshvareLab/distilbert-fa-zwnj-base
HooshvareLab
distilbert
9
199
transformers
1
fill-mask
true
true
false
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'distilbert', 'fill-mask', 'fa', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
291
4,318
HooshvareLab/gpt2-fa-comment
HooshvareLab
gpt2
12
74
transformers
0
text-generation
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'gpt2', 'text-generation', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
16,752
4,319
HooshvareLab/gpt2-fa-poetry
HooshvareLab
gpt2
12
64
transformers
0
text-generation
true
true
true
apache-2.0
['fa']
null
null
2
2
0
0
0
0
0
['pytorch', 'tf', 'jax', 'gpt2', 'text-generation', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
2,446
4,320
HooshvareLab/gpt2-fa
HooshvareLab
gpt2
12
566
transformers
4
text-generation
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'gpt2', 'text-generation', 'fa', 'transformers', 'license:apache-2.0']
false
true
true
455
4,321
HooshvareLab/roberta-fa-zwnj-base-ner
HooshvareLab
roberta
11
86
transformers
0
token-classification
true
true
true
null
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'roberta', 'token-classification', 'fa', 'transformers', 'autotrain_compatible']
false
true
true
3,635
4,322
HooshvareLab/roberta-fa-zwnj-base
HooshvareLab
roberta
11
879
transformers
0
fill-mask
true
true
true
apache-2.0
['fa']
null
null
1
1
0
0
0
0
0
['pytorch', 'tf', 'jax', 'roberta', 'fill-mask', 'fa', 'transformers', 'license:apache-2.0', 'autotrain_compatible']
false
true
true
291
4,323
Hormigo/roberta-base-bne-finetuned-amazon_reviews_multi
Hormigo
roberta
13
16
transformers
0
text-classification
true
false
false
cc-by-4.0
null
['amazon_reviews_multi']
null
0
0
0
0
0
0
0
['pytorch', 'tensorboard', 'roberta', 'text-classification', 'dataset:amazon_reviews_multi', 'transformers', 'generated_from_trainer', 'license:cc-by-4.0']
false
true
true
1,317
4,324
Hotaa/Roronohota
Hotaa
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,325
Hoya/jjmodel
Hoya
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,326
Hshsannsb/Oussama
Hshsannsb
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,327
Htenn/DialoGPT-small-spongebob
Htenn
gpt2
71
28
transformers
0
conversational
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
27
4,328
Htenn/DialoGPT-small-spongebobv2
Htenn
gpt2
171
120
transformers
0
conversational
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
27
4,329
Huangg/distilgpt2-finetuned-wikitext2
Huangg
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,330
HueJanus/DialoGPT-medium-ricksanchez
HueJanus
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,331
HueJanus/DialoGPT-small-ricksanchez
HueJanus
gpt2
11
13
transformers
0
conversational
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'gpt2', 'text-generation', 'transformers', 'conversational']
false
true
true
30
4,332
Huertas97/en_roberta_base_leetspeak_ner
Huertas97
null
17
0
spacy
1
token-classification
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['en', 'spacy', 'token-classification', 'license:apache-2.0', 'model-index']
false
false
false
0
4,333
Huertas97/es_roberta_base_bne_leetspeak_ner
Huertas97
null
18
0
spacy
1
token-classification
false
false
false
null
null
null
null
0
0
0
0
0
0
0
['es', 'spacy', 'token-classification', 'license:apache-2.0', 'model-index']
false
false
false
0
4,334
HueyNemud/berties
HueyNemud
camembert
8
63
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'camembert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,335
HueyNemud/das22-10-camembert_pretrained
HueyNemud
camembert
13
12
transformers
0
fill-mask
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'camembert', 'fill-mask', 'transformers', 'generated_from_trainer', 'autotrain_compatible']
true
true
true
2,525
4,336
Huffon/klue-roberta-base-nli
Huffon
roberta
8
836
transformers
4
text-classification
true
false
false
null
['ko']
['klue']
null
1
1
0
0
0
0
0
['pytorch', 'roberta', 'text-classification', 'ko', 'dataset:klue', 'transformers', 'nli']
false
true
false
0
4,337
Huffon/qnli
Huffon
electra
7
10
transformers
0
text-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'electra', 'text-classification', 'transformers']
false
false
false
0
4,338
Huffon/sentence-klue-roberta-base
Huffon
roberta
8
1,438
sentence-transformers
5
null
true
false
false
null
['ko']
['klue']
null
0
0
0
0
0
0
0
['pytorch', 'roberta', 'ko', 'dataset:klue', 'arxiv:1908.10084', 'sentence-transformers', 'has_space']
false
true
true
1,943
4,339
HuggingLeg/t5-small-finetuned-xsum
HuggingLeg
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,340
HugoZhu/t5-small-finetuned-xsum
HugoZhu
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,341
Huit/test
Huit
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,342
Humair/all-mpnet-base-v2-finetuned-v2
Humair
mpnet
12
9
sentence-transformers
0
sentence-similarity
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'mpnet', 'sentence-transformers', 'feature-extraction', 'sentence-similarity', 'transformers']
false
true
true
3,792
4,343
HungChau/bert_concept_extraction
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,344
HungChau/bert_concept_extraction_iir_from_kp20k_v1.1
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,345
HungChau/bert_concept_extraction_kp20k_from_iir_v1.1
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,346
HungChau/distilbert-base-cased-concept-extraction-iir-v1.0-concept-extraction-kp20k-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,347
HungChau/distilbert-base-cased-concept-extraction-iir-v1.0-concept-extraction-kp20k-v1.4
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,348
HungChau/distilbert-base-cased-concept-extraction-iir-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,349
HungChau/distilbert-base-cased-concept-extraction-iir-v1.2-concept-extraction-kp20k-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,350
HungChau/distilbert-base-cased-concept-extraction-iir-v1.2-concept-extraction-kp20k-v1.5
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,351
HungChau/distilbert-base-cased-concept-extraction-iir-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,352
HungChau/distilbert-base-cased-concept-extraction-iir-v1.3
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,353
HungChau/distilbert-base-cased-concept-extraction-kp20k-v1.0-concept-extraction-wikipedia-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,354
HungChau/distilbert-base-cased-concept-extraction-kp20k-v1.0-concept-extraction-wikipedia-v1.3
HungChau
distilbert
7
10
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,355
HungChau/distilbert-base-cased-concept-extraction-kp20k-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,356
HungChau/distilbert-base-cased-concept-extraction-kp20k-v1.2-concept-extraction-allwikipedia-v1.0
HungChau
distilbert
7
10
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,357
HungChau/distilbert-base-cased-concept-extraction-kp20k-v1.2-concept-extraction-wikipedia-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,358
HungChau/distilbert-base-cased-concept-extraction-kp20k-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,359
HungChau/distilbert-base-cased-concept-extraction-wikipedia-v1.0-concept-extraction-iir-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,360
HungChau/distilbert-base-cased-concept-extraction-wikipedia-v1.0-concept-extraction-iir-v1.3
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,361
HungChau/distilbert-base-cased-concept-extraction-wikipedia-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,362
HungChau/distilbert-base-cased-concept-extraction-wikipedia-v1.2-concept-extraction-iir-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,363
HungChau/distilbert-base-cased-concept-extraction-wikipedia-v1.2
HungChau
distilbert
7
14
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,364
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.0-concept-extraction-truncated-3edbbc
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,365
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.0-concept-extraction-kp20k-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,366
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.0-concept-extraction-kp20k-v1.3
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,367
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.0-concept-extraction-kp20k-v1.4
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,368
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.0-concept-extraction-wikipedia-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,369
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,370
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.2-concept-extraction-kp20k-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,371
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.2-concept-extraction-kp20k-v1.5
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,372
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,373
HungChau/distilbert-base-uncased-concept-extraction-iir-v1.3
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,374
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0-concept-extracti-truncated-435523
HungChau
distilbert
7
9
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,375
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0-concept-extraction-iir-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,376
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0-concept-extracti-truncated-7d1e33
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,377
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0-concept-extraction-wikipedia-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,378
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0-concept-extraction-wikipedia-v1.1
HungChau
distilbert
7
11
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,379
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0-concept-extraction-wikipedia-v1.3
HungChau
distilbert
7
13
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,380
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,381
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.2-concept-extraction-allwikipedia-v1.0
HungChau
distilbert
7
11
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,382
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.2-concept-extraction-wikipedia-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,383
HungChau/distilbert-base-uncased-concept-extraction-kp20k-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,384
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.0-concept-extraction-iir-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,385
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.0-concept-extraction-iir-v1.3
HungChau
distilbert
7
10
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,386
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.0-concept-extraction-kp20k-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,387
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,388
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.1-concept-extraction-iir-v1.0
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,389
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.1
HungChau
distilbert
7
17
transformers
0
token-classification
true
false
false
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,390
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.2-concept-extraction-iir-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,391
HungChau/distilbert-base-uncased-concept-extraction-wikipedia-v1.2
HungChau
distilbert
7
8
transformers
0
token-classification
true
false
false
null
null
null
null
0
0
0
0
0
0
0
['pytorch', 'distilbert', 'token-classification', 'transformers', 'autotrain_compatible']
false
false
false
0
4,392
HungChau/distilbert-base-uncased-finetuned-ner
HungChau
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,393
HungVo/mt-dnn-ev-mrpc
HungVo
null
4
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
true
132
4,394
Huntersx/cola_model
Huntersx
bert
6
31
transformers
0
text-classification
true
false
true
null
null
null
null
1
1
0
0
0
0
0
['pytorch', 'jax', 'bert', 'text-classification', 'transformers']
false
false
false
0
4,395
Hursh/cg-finetuned-fintech
Hursh
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,396
Husk0440/Husk
Husk0440
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,397
Hwasin/BKP
Hwasin
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,398
Hwasin/bart-finetuned-kp
Hwasin
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0
4,399
Hwasin/bart-finetuned-kp20k
Hwasin
null
1
0
null
0
null
false
false
false
null
null
null
null
0
0
0
0
0
0
0
[]
false
false
false
0