dylanebert HF Staff commited on
Commit
c770c2e
·
0 Parent(s):

initial commit

Browse files
Files changed (5) hide show
  1. .gitattributes +35 -0
  2. .gitignore +11 -0
  3. LICENSE.md +20 -0
  4. README.md +13 -0
  5. app.py +220 -0
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # Virtual environment
7
+ venv/
8
+ .venv/
9
+
10
+ # Distribution
11
+ dist/
LICENSE.md ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) Hugging Face Inc.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the "Software"), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9
+ the Software, and to permit persons to whom the Software is furnished to do so,
10
+ subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Huggingface Mcp
3
+ emoji: 📈
4
+ colorFrom: green
5
+ colorTo: yellow
6
+ sdk: gradio
7
+ sdk_version: 5.30.0
8
+ app_file: app.py
9
+ pinned: false
10
+ license: mit
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from typing import Literal
3
+
4
+ import gradio as gr
5
+ from huggingface_hub import list_models, model_info, hf_hub_download
6
+
7
+
8
+ def search_models(
9
+ search: str = None,
10
+ library: str = None,
11
+ tags: str = None,
12
+ pipeline_tag: str = None,
13
+ sort: Literal[
14
+ "trending_score", "last_modified", "created_at", "downloads", "likes"
15
+ ] = "trending_score",
16
+ direction: Literal["descending", "ascending"] = "descending",
17
+ limit: int = 20,
18
+ ) -> str:
19
+ """
20
+ Search models on Hugging Face Hub.
21
+
22
+ Use this tool to search for models by name, tags, or other filters, and to get a list of model IDs.
23
+ This is the first step when you need to find a specific model before retrieving its details.
24
+
25
+ Parameters:
26
+ search (str, optional): A string to search for in model IDs or names (e.g., "deepseek").
27
+ library (list[str], optional): List of libraries the models use (e.g., ["pytorch", "tensorflow"]).
28
+ tags (list[str], optional): List of tags to filter models by (e.g., ["text-generation", "llama"]).
29
+ pipeline_tag (str, optional): Filter by pipeline tag (e.g., "text-generation").
30
+ sort (Literal["trending_score", "last_modified", "created_at", "downloads", "likes"], default="trending_score"): Sort models by the specified key.
31
+ direction (int, default=-1): Sort direction: -1 for descending, 1 for ascending.
32
+ limit (int, default=20): Maximum number of models to return.
33
+
34
+ Returns:
35
+ list[str]: A list of model IDs matching the search criteria.
36
+
37
+ Examples:
38
+ - To find trending models: search_models(sort="trending_score", limit=10)
39
+ - To search for models related to "deepseek": search_models(search="deepseek", sort="likes", limit=5)
40
+ - To filter by tag: search_models(tags=["text-generation"], pipeline_tag="text-generation")
41
+ """
42
+ try:
43
+ library = library.split(",") if library else None
44
+ tags = tags.split(",") if tags else None
45
+ direction = -1 if direction == "descending" else 1
46
+ models = list_models(
47
+ library=library,
48
+ tags=tags,
49
+ search=search,
50
+ pipeline_tag=pipeline_tag,
51
+ sort=sort,
52
+ direction=direction,
53
+ limit=limit,
54
+ )
55
+ return json.dumps([model.modelId for model in models])
56
+ except Exception as e:
57
+ return f"Error: {e}"
58
+
59
+
60
+ def get_model_info(model_id: str) -> dict:
61
+ """
62
+ Get structured metadata about a model on the Hugging Face Hub.
63
+
64
+ Use this when you need specific fields like downloads, tags, or other metadata.
65
+ For comprehensive model information, use `get_model_card`.
66
+
67
+ This tool requires the exact model ID, which can be obtained using `search_models`.
68
+ If you have a partial name or tag, use `search_models` first to find the exact ID.
69
+
70
+ Parameters:
71
+ model_id (str): The exact model ID in the format "organization/model-name" (e.g., "DeepSeek/DeepSeek-R1").
72
+
73
+ Returns:
74
+ dict: A dictionary containing model information including available fields such as:
75
+ - id: The model ID
76
+ - author: The author of the model
77
+ - created_at: The creation date
78
+ - last_modified: The last modified date
79
+ - downloads: Number of downloads
80
+ - likes: Number of likes
81
+ - tags: List of tags
82
+ - pipeline_tag: The pipeline tag
83
+ - library_name: The library name
84
+ - license: The model license
85
+ - base_model: The base model (if available)
86
+ - siblings: List of repository files (if available)
87
+ - datasets: Datasets used to train the model (if available)
88
+ - spaces: List of spaces using this model (if available)
89
+ - xet_enabled: Whether XET is enabled (if available)
90
+
91
+ Raises:
92
+ Exception: If the model_id is invalid or not found. Use search_models to find the correct ID.
93
+
94
+ Example:
95
+ - First, find the model ID: search_models(search="deepseek", sort="likes", limit=1)
96
+ - Then, get the model info: get_model_info("DeepSeek/DeepSeek-R1")
97
+ """
98
+ try:
99
+ model = model_info(model_id)
100
+ result = {}
101
+
102
+ if hasattr(model, "id") and model.id is not None:
103
+ result["id"] = model.id
104
+
105
+ if hasattr(model, "author") and model.author is not None:
106
+ result["author"] = model.author
107
+
108
+ if hasattr(model, "created_at") and model.created_at is not None:
109
+ result["created_at"] = model.created_at
110
+
111
+ if hasattr(model, "last_modified") and model.last_modified is not None:
112
+ result["last_modified"] = model.last_modified
113
+
114
+ if hasattr(model, "downloads") and model.downloads is not None:
115
+ result["downloads"] = model.downloads
116
+
117
+ if hasattr(model, "likes") and model.likes is not None:
118
+ result["likes"] = model.likes
119
+
120
+ if hasattr(model, "tags") and model.tags is not None:
121
+ result["tags"] = model.tags
122
+
123
+ if hasattr(model, "pipeline_tag") and model.pipeline_tag is not None:
124
+ result["pipeline_tag"] = model.pipeline_tag
125
+
126
+ if hasattr(model, "library_name") and model.library_name is not None:
127
+ result["library_name"] = model.library_name
128
+
129
+ if hasattr(model, "card_data") and model.card_data is not None:
130
+ if (
131
+ hasattr(model.card_data, "license")
132
+ and model.card_data.license is not None
133
+ ):
134
+ result["license"] = model.card_data.license
135
+
136
+ if (
137
+ hasattr(model.card_data, "base_model")
138
+ and model.card_data.base_model is not None
139
+ ):
140
+ result["base_model"] = model.card_data.base_model
141
+
142
+ if (
143
+ hasattr(model.card_data, "datasets")
144
+ and model.card_data.datasets is not None
145
+ ):
146
+ result["datasets"] = model.card_data.datasets
147
+
148
+ if hasattr(model, "siblings") and model.siblings is not None:
149
+ result["siblings"] = model.siblings
150
+
151
+ if hasattr(model, "spaces") and model.spaces is not None:
152
+ result["spaces"] = model.spaces
153
+
154
+ if hasattr(model, "xet_enabled") and model.xet_enabled is not None:
155
+ result["xet_enabled"] = model.xet_enabled
156
+
157
+ return json.dumps(result)
158
+ except Exception as e:
159
+ return f"Error: {e}"
160
+
161
+
162
+ def get_model_card(model_id: str) -> str:
163
+ """
164
+ Get the complete model card (README.md) for a specific model on Hugging Face Hub.
165
+
166
+ Use this when you need comprehensive model documentation including usage examples, model limitations, etc.
167
+ For only structured metadata, use `get_model_info` instead.
168
+
169
+ This tool requires the exact model ID, which can be obtained using `search_models`.
170
+ If you have a partial name or tag, use `search_models` first to find the exact ID.
171
+
172
+ Args:
173
+ model_id (str): The model ID in the format "organization/model-name" (e.g., "DeepSeek/DeepSeek-R1").
174
+
175
+ Returns:
176
+ str: The markdown content of the model card.
177
+
178
+ Example:
179
+ - First, find the model ID: search_models(search="deepseek", sort="likes", limit=1)
180
+ - Then, get the model card: get_model_card("DeepSeek/DeepSeek-R1")
181
+ """
182
+ try:
183
+ filepath = hf_hub_download(model_id, "README.md")
184
+ with open(filepath, "r", encoding="utf-8") as f:
185
+ content = f.read()
186
+
187
+ return content
188
+ except Exception as e:
189
+ return f"Error: {e}"
190
+
191
+
192
+ search_models = gr.Interface(
193
+ fn=search_models,
194
+ inputs=[
195
+ gr.Textbox(label="search", value=""),
196
+ gr.Textbox(label="library", value=""),
197
+ gr.Textbox(label="tags", value=""),
198
+ gr.Textbox(label="pipeline_tag", value=""),
199
+ gr.Radio(label="sort", choices=["trending_score", "last_modified", "created_at", "downloads", "likes"], value="trending_score"),
200
+ gr.Radio(label="direction", choices=["descending", "ascending"], value="descending"),
201
+ gr.Number(label="limit", value=20),
202
+ ],
203
+ outputs="text")
204
+
205
+ get_model_info = gr.Interface(
206
+ fn=get_model_info,
207
+ inputs=[
208
+ gr.Textbox(label="model_id", value=""),
209
+ ],
210
+ outputs="text")
211
+
212
+ get_model_card = gr.Interface(
213
+ fn=get_model_card,
214
+ inputs=[
215
+ gr.Textbox(label="model_id", value=""),
216
+ ],
217
+ outputs="text")
218
+
219
+ demo = gr.TabbedInterface([search_models, get_model_info, get_model_card], ["search_models", "get_model_info", "get_model_card"])
220
+ demo.launch(mcp_server=True)