Spaces:
Sleeping
Sleeping
File size: 1,129 Bytes
d791652 c13ddf9 d791652 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
# handler.py
import requests
# Replace this with your actual model URL (must be PUBLIC!)
MODEL_URL = "https://api-inference.huggingface.co/models/CLASSIFIED-HEX/X"
def generate_text(prompt, max_tokens=250, temperature=0.7, top_p=0.95, top_k=50, repetition_penalty=1.2, trim_output=False):
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": max_tokens,
"temperature": temperature,
"top_p": top_p,
"top_k": top_k,
"repetition_penalty": repetition_penalty
}
}
try:
response = requests.post(MODEL_URL, json=payload)
if response.status_code != 200:
return f"❌ Error {response.status_code}: {response.text}"
result = response.json()
text = result[0].get("generated_text", "") if isinstance(result, list) else result.get("generated_text", "")
if trim_output and text.startswith(prompt):
text = text[len(prompt):].lstrip()
return text or "⚠️ No output returned."
except Exception as e:
return f"🔥 Generation failed: {str(e)}"
|