Trial and error.
Browse files
app.py
CHANGED
@@ -18,11 +18,7 @@ class BasicAgent:
|
|
18 |
print("BasicAgent initialized.")
|
19 |
|
20 |
print("Loading mistralai/Mistral-7B-Instruct-v0.1 model...")
|
21 |
-
self.
|
22 |
-
self.client = InferenceClient(model=self.model_name)
|
23 |
-
self.tokenizer = None
|
24 |
-
self.model = None
|
25 |
-
|
26 |
|
27 |
def __call__(self, question: str) -> str:
|
28 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
@@ -173,7 +169,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
173 |
|
174 |
# --- Build Gradio Interface using Blocks ---
|
175 |
with gr.Blocks() as demo:
|
176 |
-
gr.Markdown("# Basic Agent Evaluation Runner #
|
177 |
gr.Markdown(
|
178 |
"""
|
179 |
**Instructions:**
|
|
|
18 |
print("BasicAgent initialized.")
|
19 |
|
20 |
print("Loading mistralai/Mistral-7B-Instruct-v0.1 model...")
|
21 |
+
self.client = InferenceClient(token=os.getenv("HF_TOKEN"))
|
|
|
|
|
|
|
|
|
22 |
|
23 |
def __call__(self, question: str) -> str:
|
24 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
|
169 |
|
170 |
# --- Build Gradio Interface using Blocks ---
|
171 |
with gr.Blocks() as demo:
|
172 |
+
gr.Markdown("# Basic Agent Evaluation Runner #6")
|
173 |
gr.Markdown(
|
174 |
"""
|
175 |
**Instructions:**
|