Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from dotenv import load_dotenv | |
from langchain_core.messages import AIMessage, HumanMessage | |
from langchain_community.llms import HuggingFaceEndpoint | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_core.prompts import ChatPromptTemplate | |
# Load environment variables from .env file | |
load_dotenv() | |
# Get the API token from environment variable | |
api_token = os.getenv("HF_Token") | |
# Define the repository ID and task | |
repo_id = "microsoft/phi-2" | |
task = "text-generation" | |
# App config | |
st.set_page_config(page_title="Voyage.AI", page_icon="π") | |
st.title("Travel Planner AI Assistant: Voyage.AI β") | |
# Define the template outside the function | |
template = """ | |
<|im_start|>system | |
You are Voyage.AI, a professional travel planning assistant. Your responses should be helpful, detailed, and focused on providing practical travel advice. When users mention specific locations, provide relevant information about travel options, must-see attractions, and practical tips. | |
Key responsibilities: | |
1. Provide specific travel recommendations based on user's location and destination | |
2. Suggest optimal travel routes and transportation options | |
3. Recommend attractions and activities | |
4. Share practical travel tips about visas, weather, and local customs | |
5. Help with travel planning and itinerary creation | |
Always maintain a professional, friendly tone and provide structured, relevant responses. | |
<|im_end|> | |
<|im_start|>assistant | |
I am Voyage.AI, your travel planning assistant. I can help you create the perfect travel experience with: | |
- Customized travel planning and itineraries | |
- Transportation recommendations | |
- Accommodation suggestions | |
- Local attraction information | |
- Travel tips and practical advice | |
- Cultural insights and local customs | |
- Visa and documentation guidance | |
How may I assist with your travel plans today? | |
<|im_end|> | |
<|im_start|>user | |
{user_question} | |
<|im_end|> | |
<|im_start|>assistant | |
""" | |
prompt = ChatPromptTemplate.from_template(template) | |
# Function to get a response from the model | |
def get_response(user_query, chat_history): | |
# Initialize the Hugging Face Endpoint with parameters passed directly | |
llm = HuggingFaceEndpoint( | |
huggingfacehub_api_token=api_token, | |
repo_id=repo_id, | |
task=task, | |
temperature=0.7, | |
max_new_tokens=512, | |
top_p=0.9, | |
repetition_penalty=1.2 # Slightly increased to reduce repetition | |
) | |
chain = prompt | llm | StrOutputParser() | |
response = chain.invoke({ | |
"chat_history": chat_history, | |
"user_question": user_query, | |
}) | |
return response | |
# Initialize session state | |
if "chat_history" not in st.session_state: | |
st.session_state.chat_history = [ | |
AIMessage(content="Hello! I'm Voyage.AI, your personal travel planning assistant. How can I help you plan your next adventure?"), | |
] | |
# Display chat history | |
for message in st.session_state.chat_history: | |
if isinstance(message, AIMessage): | |
with st.chat_message("AI"): | |
st.write(message.content) | |
elif isinstance(message, HumanMessage): | |
with st.chat_message("Human"): | |
st.write(message.content) | |
# User input | |
user_query = st.chat_input("Type your message here...") | |
if user_query is not None and user_query != "": | |
st.session_state.chat_history.append(HumanMessage(content=user_query)) | |
with st.chat_message("Human"): | |
st.markdown(user_query) | |
response = get_response(user_query, st.session_state.chat_history) | |
# Clean up response and remove any special tokens | |
response = response.replace("<|im_end|>", "").replace("<|im_start|>", "").strip() | |
response = response.replace("AI response:", "").replace("chat response:", "").replace("bot response:", "").strip() | |
response = response.replace("assistant", "").strip() | |
with st.chat_message("AI"): | |
st.write(response) | |
st.session_state.chat_history.append(AIMessage(content=response)) |