Spaces:
Sleeping
Sleeping
import json | |
import requests | |
import os | |
import streamlit as st | |
import pandas as pd | |
import openpyxl | |
import faiss | |
import torch | |
from reportlab.lib.pagesizes import letter | |
from reportlab.pdfgen import canvas | |
from sentence_transformers import SentenceTransformer | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# β Streamlit UI Setup | |
st.set_page_config(page_title="AI-Powered Timetable", layout="wide") | |
st.markdown("<h1 style='text-align: center; color: #4CAF50;'>π AI-Powered Timetable</h1>", unsafe_allow_html=True) | |
# β API Key Input | |
st.sidebar.markdown("## π Enter Hugging Face API Key") | |
hf_api_key = st.sidebar.text_input("API Key", type="password") | |
# β File Upload Section | |
st.sidebar.markdown("## π Upload Your Timetable Files") | |
uploaded_master = st.sidebar.file_uploader("Upload Master Timetable", type=["xlsx"]) | |
uploaded_lab = st.sidebar.file_uploader("Upload Lab Timetable", type=["xlsx"]) | |
uploaded_classroom = st.sidebar.file_uploader("Upload Classroom Timetable", type=["xlsx"]) | |
uploaded_individual = st.sidebar.file_uploader("Upload Individual Timetable", type=["xlsx"]) | |
uploaded_files = { | |
"Master Timetable": uploaded_master, | |
"Lab Timetable": uploaded_lab, | |
"Classroom Timetable": uploaded_classroom, | |
"Individual Timetable": uploaded_individual, | |
} | |
# β Load Timetable Data (Directly from Uploaded File) | |
def load_timetable(file): | |
if not file: | |
return None | |
wb = openpyxl.load_workbook(file) | |
sheet = wb.active | |
return [row for row in sheet.iter_rows(values_only=True)] | |
# β Initialize Sentence Transformer for Embeddings | |
embedder = SentenceTransformer("all-MiniLM-L6-v2") | |
# β Process Uploaded Files and Create RAG Index | |
rag_data = {} | |
for name, file in uploaded_files.items(): | |
if file: | |
timetable_data = load_timetable(file) | |
if timetable_data: | |
rag_data[name] = timetable_data | |
# β Convert timetable data to text format | |
data_texts = ["\n".join(map(str, data)) for data in rag_data.values() if data] | |
if not data_texts: | |
st.error("Error: No extracted timetable content available for AI processing.") | |
st.stop() | |
# β Generate FAISS Embeddings | |
data_embeddings = embedder.encode(data_texts, convert_to_tensor=True) | |
if len(data_embeddings) == 0: | |
st.error("Error: No valid embeddings created. Check your timetable files.") | |
st.stop() | |
dimension = data_embeddings.shape[1] # FIXED ERROR: Ensuring valid shape | |
index = faiss.IndexFlatL2(dimension) | |
index.add(data_embeddings.cpu().numpy()) | |
# β Function to Retrieve Relevant Data from FAISS | |
def retrieve_relevant_text(query, top_k=2): | |
query_embedding = embedder.encode([query], convert_to_tensor=True).cpu().numpy() | |
distances, indices = index.search(query_embedding, top_k) | |
retrieved_texts = [data_texts[idx] for idx in indices[0] if idx < len(data_texts)] | |
return "\n".join(retrieved_texts) | |
# β Ask LLaMA-3 AI via API | |
def ask_llama_api(query): | |
if not hf_api_key: | |
return "Error: Please enter your API key." | |
retrieved_text = retrieve_relevant_text(query) | |
context = f"Relevant timetable data:\n{retrieved_text}\n\nUser Query: {query}" | |
url = "https://api-inference.huggingface.co/v1/chat/completions" | |
headers = { | |
"Authorization": f"Bearer {hf_api_key}", | |
"Content-Type": "application/json" | |
} | |
payload = { | |
"model": "meta-llama/Meta-Llama-3-8B", | |
"messages": [{"role": "user", "content": context}], | |
"max_tokens": 500 | |
} | |
response = requests.post(url, headers=headers, json=payload) | |
if response.status_code == 200: | |
return response.json()["choices"][0]["message"]["content"] | |
else: | |
return f"API Error: {response.status_code} - {response.text}" | |
# β Auto-Schedule Missing Slots | |
def auto_schedule(file): | |
if not file: | |
return "No timetable uploaded." | |
wb = openpyxl.load_workbook(file) | |
sheet = wb.active | |
empty_slots = [] | |
for row_idx, row in enumerate(sheet.iter_rows(min_row=2, values_only=True), start=2): | |
if None in row or "" in row: | |
empty_slots.append(row_idx) | |
for row_idx in empty_slots: | |
query = f"Suggest a subject and faculty for the empty slot in row {row_idx}." | |
suggestion = ask_llama_api(query) | |
try: | |
subject, faculty = suggestion.split(", Faculty: ") | |
sheet.cell(row=row_idx, column=4, value=subject.strip()) | |
sheet.cell(row=row_idx, column=5, value=faculty.strip()) | |
except: | |
continue | |
return f"Auto-scheduling completed for {len(empty_slots)} slots." | |
# β AI Query Section | |
st.markdown("## π€ Ask LLaMA-3 AI About Your Timetable") | |
user_query = st.text_input("Type your question here (e.g., 'Who is free at 10 AM on Monday?')") | |
if st.button("Ask AI via API"): | |
ai_response = ask_llama_api(user_query) | |
st.write("π§ **LLaMA-3 AI Suggests:**", ai_response) | |
# β Auto-Schedule Feature | |
st.markdown("## π Auto-Schedule Missing Timetable Slots") | |
selected_file = st.selectbox("Choose a timetable file to auto-fill missing slots:", list(uploaded_files.keys())) | |
if st.button("Auto-Schedule"): | |
result = auto_schedule(uploaded_files[selected_file]) | |
st.write("β ", result) | |
# β Display Uploaded Timetables | |
st.markdown("## π View Uploaded Timetables") | |
for name, file in uploaded_files.items(): | |
if file: | |
df = pd.read_excel(file) | |
st.markdown(f"### {name}") | |
st.dataframe(df) | |