aldigobbler commited on
Commit
4b13f1a
ยท
verified ยท
1 Parent(s): ff4be00

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +162 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from requests import get
3
+ import requests
4
+ import os
5
+
6
+ base_url = os.getenv("base_url", "http://localhost:8001")
7
+ API_KEY = os.getenv("API_KEY")
8
+
9
+ headers = {
10
+ "X-API-Key": API_KEY
11
+ }
12
+
13
+
14
+ def search_endpoint(
15
+ q: str,
16
+ num: int = 5,
17
+ start: int = 1,
18
+ site: str = None,
19
+ date_restrict: str = None
20
+ ) -> dict:
21
+ """
22
+ Search the internet as if it were Google.
23
+
24
+ Args:
25
+ q: Search query
26
+ num: Number of results to return (default 5) [5 is the maximum]
27
+ start: Starting index for results (default 1)
28
+ site: Restrict search to specific site (optional)
29
+ date_restrict: Date restriction (e.g., 'd1', 'w1', 'm1') (optional)
30
+
31
+ Returns:
32
+ Search results as a dictionary
33
+ """
34
+ params = {
35
+ "q": q,
36
+ "num": num,
37
+ "start": start,
38
+ "site": site,
39
+ "date_restrict": date_restrict
40
+ }
41
+
42
+ try:
43
+ response = get(f"{base_url}/search", params=params, headers=headers)
44
+ response.raise_for_status()
45
+ return response.json()
46
+ except Exception as e:
47
+ return {"error": str(e), "query": q, "results": [], "count": 0}
48
+
49
+ def search_news_endpoint(
50
+ q: str,
51
+ num: int = 10,
52
+ ) -> dict:
53
+ """
54
+ Search news articles, similar to Google News.
55
+
56
+ Args:
57
+ q: Search query
58
+ num: Number of results to return (default 10)
59
+
60
+ Returns:
61
+ News search results as a dictionary
62
+ """
63
+ params = {
64
+ "q": q,
65
+ "num": num,
66
+ }
67
+
68
+ try:
69
+ response = get(f"{base_url}/search/news", params=params, headers=headers)
70
+ response.raise_for_status()
71
+ return response.json()
72
+ except Exception as e:
73
+ return {"error": str(e), "query": q, "results": [], "count": 0}
74
+
75
+ def scrape_endpoint(url: str) -> dict:
76
+ """
77
+ Scrape the content of a given URL, and return it as a nice markdown formatted dictionary.
78
+
79
+ Args:
80
+ url: The URL to scrape
81
+
82
+ Returns:
83
+ Scraped markdown as a dictionary
84
+ """
85
+ try:
86
+ response = get(f"{base_url}/scrape", params={"url": url}, headers=headers)
87
+ response.raise_for_status()
88
+ return response.json()
89
+ except Exception as e:
90
+ return {"error": str(e), "url": url, "content": ""}
91
+
92
+ with gr.Blocks(title="Uplink") as demo:
93
+ gr.Markdown(
94
+ """
95
+ # ๐ŸŒ Uplink
96
+ **A unified search interface for the web and news.**
97
+ """
98
+ )
99
+
100
+ with gr.Tab("General Web Search"):
101
+ with gr.Row():
102
+ with gr.Column():
103
+ query = gr.Textbox(label="Search Query", placeholder="Type your search here...")
104
+ num_results = gr.Slider(minimum=1, maximum=5, value=5, label="Number of Results")
105
+ start = gr.Slider(minimum=1, maximum=100, value=1, label="Starting Index")
106
+ site = gr.Textbox(label="Restrict to Site (optional)", placeholder="e.g. wikipedia.org")
107
+ date_restrict = gr.Dropdown(
108
+ choices=[None, "d1", "w1", "m1"],
109
+ value=None,
110
+ label="Date Restriction (optional)",
111
+ info="d1 = past day, w1 = past week, m1 = past month"
112
+ )
113
+ search_btn = gr.Button("๐Ÿ” Search")
114
+ with gr.Column():
115
+ output = gr.JSON(label="Results")
116
+
117
+ search_btn.click(
118
+ fn=search_endpoint,
119
+ inputs=[query, num_results, start, site, date_restrict],
120
+ outputs=output
121
+ )
122
+
123
+ with gr.Tab("Web Scrape"):
124
+ with gr.Row():
125
+ with gr.Column():
126
+ scrape_url = gr.Textbox(label="URL to Scrape", placeholder="https://example.com")
127
+ scrape_btn = gr.Button("๐ŸŒ Scrape")
128
+ with gr.Column():
129
+ scrape_output = gr.JSON(label="Scraped Content")
130
+
131
+ scrape_btn.click(
132
+ fn=scrape_endpoint,
133
+ inputs=[scrape_url],
134
+ outputs=scrape_output
135
+ )
136
+
137
+ with gr.Tab("News Search"):
138
+ with gr.Row():
139
+ with gr.Column():
140
+ news_query = gr.Textbox(label="News Search Query", placeholder="Type your news topic...")
141
+ news_num_results = gr.Slider(minimum=1, maximum=10, value=5, label="Number of Results")
142
+ news_search_btn = gr.Button("๐Ÿ“ฐ Search News")
143
+ with gr.Column():
144
+ news_output = gr.JSON(label="News Results")
145
+
146
+ news_search_btn.click(
147
+ fn=search_news_endpoint,
148
+ inputs=[news_query, news_num_results],
149
+ outputs=news_output
150
+ )
151
+
152
+ gr.Markdown(
153
+ """
154
+ ---
155
+ <div align="center">
156
+ <sub>Powered by Uplink &middot; Built with FastAPI + Gradio</sub>
157
+ </div>
158
+ """
159
+ )
160
+
161
+ if __name__ == "__main__":
162
+ demo.launch(mcp_server=True)
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio[mcp]
2
+ gradio
3
+ requests