File size: 4,967 Bytes
4b13f1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6f76d7d
4b13f1a
 
 
 
 
 
6f76d7d
4b13f1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68a1d3f
4b13f1a
 
 
 
 
 
 
ffc80a8
 
4b13f1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6f76d7d
4b13f1a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
import gradio as gr
from requests import get
import requests
import os

base_url = os.getenv("base_url", "http://localhost:8001")
API_KEY = os.getenv("API_KEY")

headers = {
    "X-API-Key": API_KEY
}


def search_endpoint(
        q: str,
        num: int = 5,
        start: int = 1,
        site: str = None,
        date_restrict: str = None
) -> dict:
    """
    Search the internet as if it were Google.

    Args:
        q: Search query
        num: Number of results to return (default 5) [5 is the maximum]
        start: Starting index for results (default 1)
        site: Restrict search to specific site (optional)
        date_restrict: Date restriction (e.g., 'd1', 'w1', 'm1') (optional)

    Returns:
        Search results as a dictionary
    """
    params = {
        "q": q,
        "num": num,
        "start": start,
        "site": site,
        "date_restrict": date_restrict
    }
    
    try:
        response = get(f"{base_url}/search", params=params, headers=headers)
        response.raise_for_status()
        return response.json()
    except Exception as e:
        return {"error": str(e), "query": q, "results": [], "count": 0}

def search_news_endpoint(
        q: str,
        num: int = 5,
) -> dict:
    """
    Search news articles, similar to Google News.

    Args:
        q: Search query
        num: Number of results to return (default 5) [maximum of 5]

    Returns:
        News search results as a dictionary
    """
    params = {
        "q": q,
        "num": num,
    }
    
    try:
        response = get(f"{base_url}/search/news", params=params, headers=headers)
        response.raise_for_status()
        return response.json()
    except Exception as e:
        return {"error": str(e), "query": q, "results": [], "count": 0}
    
def scrape_endpoint(url: str) -> dict:
    """
    Scrape the content of a given URL, and return it as a nice markdown formatted dictionary.

    Args:
        url: The URL to scrape

    Returns:
        Scraped markdown as a dictionary
    """
    try:
        response = get(f"{base_url}/scrape", params={"url": url}, headers=headers)
        response.raise_for_status()
        return response.json()
    except Exception as e:
        return {"error": str(e), "url": url, "content": ""}

with gr.Blocks(title="Uplink") as demo:
    gr.Markdown(
        """
        # 🌐 Uplink
        **A unified search interface for the web and news.**
        Github repository: [cappuch/uplink-mcp-hackathon](https://github.com/cappuch/uplink-mcp-hackathon)
        """
    )

    with gr.Tab("General Web Search"):
        with gr.Row():
            with gr.Column():
                query = gr.Textbox(label="Search Query", placeholder="Type your search here...")
                num_results = gr.Slider(minimum=1, maximum=5, value=5, step=1, label="Number of Results")
                start = gr.Slider(minimum=1, maximum=100, value=1, step=1, label="Starting Index")
                site = gr.Textbox(label="Restrict to Site (optional)", placeholder="e.g. wikipedia.org")
                date_restrict = gr.Dropdown(
                    choices=[None, "d1", "w1", "m1"],
                    value=None,
                    label="Date Restriction (optional)",
                    info="d1 = past day, w1 = past week, m1 = past month"
                )
                search_btn = gr.Button("πŸ” Search")
            with gr.Column():
                output = gr.JSON(label="Results")

        search_btn.click(
            fn=search_endpoint,
            inputs=[query, num_results, start, site, date_restrict],
            outputs=output
        )

    with gr.Tab("Web Scrape"):
        with gr.Row():
            with gr.Column():
                scrape_url = gr.Textbox(label="URL to Scrape", placeholder="https://example.com")
                scrape_btn = gr.Button("🌐 Scrape")
            with gr.Column():
                scrape_output = gr.JSON(label="Scraped Content")

        scrape_btn.click(
            fn=scrape_endpoint,
            inputs=[scrape_url],
            outputs=scrape_output
        )

    with gr.Tab("News Search"):
        with gr.Row():
            with gr.Column():
                news_query = gr.Textbox(label="News Search Query", placeholder="Type your news topic...")
                news_num_results = gr.Slider(minimum=1, maximum=5, value=5, step=1, label="Number of Results")
                news_search_btn = gr.Button("πŸ“° Search News")
            with gr.Column():
                news_output = gr.JSON(label="News Results")

        news_search_btn.click(
            fn=search_news_endpoint,
            inputs=[news_query, news_num_results],
            outputs=news_output
        )

    gr.Markdown(
        """
        ---
        <div align="center">
        <sub>Powered by Uplink &middot; Built with FastAPI + Gradio</sub>
        </div>
        """
    )

if __name__ == "__main__":
    demo.launch(mcp_server=True)