File size: 3,535 Bytes
aada87f
 
 
5591e71
aada87f
 
5591e71
aada87f
5591e71
 
aada87f
 
 
 
 
 
 
5591e71
 
 
aada87f
 
5591e71
aada87f
5591e71
aada87f
 
 
 
 
 
 
 
5591e71
aada87f
 
 
5591e71
aada87f
 
 
 
5591e71
aada87f
 
 
5591e71
aada87f
 
5591e71
 
aada87f
 
 
 
 
5591e71
aada87f
5591e71
 
 
aada87f
 
 
5591e71
aada87f
 
 
 
 
5591e71
 
 
 
aada87f
5591e71
 
 
 
 
aada87f
 
 
 
5591e71
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import gradio as gr
import requests
import os
import json

# --- Configuration ---
# API configuration but hidden from end users
API_URL = "https://router.huggingface.co/nebius/v1/chat/completions"
MODEL_ID = "google/gemma-3-27b-it-fast"  # Real model hidden from users
PUBLIC_MODEL_NAME = "AgriAssist_LLM"     # What users will see
HF_TOKEN = os.getenv("HF_TOKEN")

if not HF_TOKEN:
    raise ValueError("Hugging Face token not found. Please set the HF_TOKEN environment variable or secret.")

HEADERS = {"Authorization": f"Bearer {HF_TOKEN}", "Content-Type": "application/json"}

# --- Logging that doesn't expose real model name ---
print(f"Application configured to use {PUBLIC_MODEL_NAME}.")
print(f"API Endpoint configured.")

# --- Inference Function (Using Hugging Face API) ---
def generate_response(prompt, max_new_tokens=512):
    print(f"Received prompt: {prompt}")
    print(f"Preparing payload for API...")

    payload = {
        "messages": [
            {
                "role": "user",
                "content": prompt
            }
        ],
        "model": MODEL_ID,  # Real model used in API call
        "max_tokens": max_new_tokens,
    }

    print(f"Sending request to API for {PUBLIC_MODEL_NAME}...")
    try:
        response = requests.post(API_URL, headers=HEADERS, json=payload)
        response.raise_for_status()
        result = response.json()
        print(f"{PUBLIC_MODEL_NAME} Response Received Successfully.")

        if "choices" in result and len(result["choices"]) > 0 and "message" in result["choices"][0] and "content" in result["choices"][0]["message"]:
            api_response_content = result["choices"][0]["message"]["content"]
            print(f"{PUBLIC_MODEL_NAME} generated content")
            return api_response_content
        else:
            print(f"Unexpected API response structure")
            return f"Error: {PUBLIC_MODEL_NAME} encountered an issue processing your request. Please try again."

    except requests.exceptions.RequestException as e:
        error_detail = ""
        if e.response is not None:
            try:
                error_detail = e.response.json()
            except json.JSONDecodeError:
                error_detail = e.response.text
        print(f"Error calling API: {e}")
        return f"{PUBLIC_MODEL_NAME} is currently experiencing connectivity issues. Please try again later."

    except Exception as e:
        print(f"An unexpected error occurred: {e}")
        return f"{PUBLIC_MODEL_NAME} encountered an unexpected error. Please try again later."

# --- Gradio Interface ---
iface = gr.Interface(
    fn=generate_response,
    inputs=gr.Textbox(lines=5, label="Enter your prompt", placeholder="Type your question or instruction here..."),
    outputs=gr.Textbox(lines=8, label=f"{PUBLIC_MODEL_NAME} Response:"),
    title=f"Chat with {PUBLIC_MODEL_NAME}",
    description=(f"This demo connects you with {PUBLIC_MODEL_NAME}, a specialized agricultural assistant. "
                "Submit your farming, crop management, or agricultural questions below."),
    allow_flagging="never",
    examples=[
        ["What are sustainable practices for improving soil health in organic farming?"],
        ["Explain crop rotation benefits and scheduling for small vegetable farms."],
        ["How can I identify and treat common tomato plant diseases?"],
        ["What irrigation methods are most water-efficient for drought-prone regions?"],
    ]
)

# --- Launch the App ---
iface.launch(server_name="0.0.0.0", server_port=7860)