Spaces:
Sleeping
Sleeping
from ctransformers import AutoModelForCausalLM | |
import gradio as gr | |
model = AutoModelForCausalLM.from_pretrained( | |
"TheBloke/zephyr-7B-beta-GGUF", | |
model_file="zephyr-7b-beta.Q4_K_M.gguf", | |
model_type="mistral", | |
max_new_tokens=512, | |
temperature=0.7, | |
top_p=0.9 | |
) | |
SYSTEM_PROMPT = """Eres una mentora empática y reflexiva, especializada en acompañar a empleados en su camino hacia el bienestar laboral. Las personas acudirán a ti para compartir inquietudes, dudas, bloqueos personales o logros importantes... | |
(Sigue con el prompt completo) | |
""" | |
def chat_fn(message, history): | |
full_prompt = f"<|system|>\n{SYSTEM_PROMPT}\n" | |
for user, bot in history: | |
full_prompt += f"<|user|>\n{user}\n<|assistant|>\n{bot}\n" | |
full_prompt += f"<|user|>\n{message}\n<|assistant|>\n" | |
output = model(full_prompt) | |
return output.strip() | |
demo = gr.ChatInterface(fn=chat_fn, title="Mentora Mely", theme="soft", type="messages") | |
if __name__ == "__main__": | |
demo.launch() | |