import streamlit as st
import requests
from datetime import datetime
import uuid
import PyPDF2
from io import BytesIO
import os
from live_search import live_search_engine
st.set_page_config(page_title="NGIBS", page_icon="🧠", layout="wide")
# Minimal CSS - Clean and Simple
st.markdown("""
""", unsafe_allow_html=True)
# Helper Functions
def get_api_key(service):
"""Get API key from environment variables"""
if service == "openrouter":
return os.getenv("OPENROUTER_API_KEY")
return None
def process_uploaded_file(uploaded_file):
if not uploaded_file:
return ""
try:
if uploaded_file.type == "text/plain":
return uploaded_file.read().decode("utf-8")
elif uploaded_file.type == "application/pdf":
pdf_reader = PyPDF2.PdfReader(BytesIO(uploaded_file.read()))
text = ""
for page in pdf_reader.pages:
text += page.extract_text() + "\n"
return text
else:
return f"[File: {uploaded_file.name}]"
except:
return f"[Error reading file: {uploaded_file.name}]"
def query_cloud_model_direct(prompt: str, model: str) -> str:
try:
api_key = get_api_key("openrouter")
if not api_key:
return "❌ OpenRouter API key not configured"
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
data = {
"model": model,
"messages": [{"role": "user", "content": prompt}],
"temperature": 0.7,
"max_tokens": 1000
}
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=data, timeout=60)
response.raise_for_status()
result = response.json()
return result["choices"][0]["message"]["content"]
except Exception as e:
return f"❌ Cloud model error: {str(e)}"
# Session State
if "current_mode" not in st.session_state:
st.session_state.current_mode = "Quick Search Pro"
if "current_chat_id" not in st.session_state:
st.session_state.current_chat_id = None
if "all_chats" not in st.session_state:
st.session_state.all_chats = {}
if "selected_cloud_model" not in st.session_state:
st.session_state.selected_cloud_model = "meta-llama/llama-3.3-70b-instruct:free"
# Chat Functions
def create_new_chat():
chat_id = str(uuid.uuid4())[:8]
st.session_state.current_chat_id = chat_id
st.session_state.all_chats[chat_id] = {
"id": chat_id,
"title": "New Chat",
"created_at": datetime.now(),
"messages": []
}
def save_message(user_msg: str, ai_msg: str):
if not st.session_state.current_chat_id:
create_new_chat()
chat = st.session_state.all_chats[st.session_state.current_chat_id]
chat["messages"].extend([
{"role": "user", "content": user_msg, "timestamp": datetime.now()},
{"role": "assistant", "content": ai_msg, "timestamp": datetime.now()}
])
if len(chat["messages"]) == 2:
title = user_msg[:30] + "..." if len(user_msg) > 30 else user_msg
chat["title"] = title
def delete_chat(chat_id: str):
if chat_id in st.session_state.all_chats:
del st.session_state.all_chats[chat_id]
if st.session_state.current_chat_id == chat_id:
st.session_state.current_chat_id = None
# Header
st.markdown('
🧠 NGIBS
', unsafe_allow_html=True)
st.markdown('Next Generation Intelligent Browsing System
', unsafe_allow_html=True)
# Navigation Bar
#st.markdown("""
#
#
📖 About
#
📚 Docs
#
📦 Products
#
💝 Donate
#
""", unsafe_allow_html=True)
# Navigation Bar
nav_col1, nav_col2, nav_col3, nav_col4 = st.columns(4)
with nav_col1:
if st.link_button("📖 About", "https://jaiho-digital.onrender.com/products/ngibs/ngibs_about.html", use_container_width=True):
pass
with nav_col2:
if st.link_button("📚 Docs", "https://jaiho-digital.onrender.com/products/ngibs/ngibs_docs.html", use_container_width=True):
pass
with nav_col3:
if st.link_button("📦 Products", "https://jaiho-digital.onrender.com/jaiho_products.html#products", use_container_width=True):
pass
with nav_col4:
if st.link_button("💝 Donate", "https://jaiho-digital.onrender.com/products/ngibs/ngibs_donate.html", use_container_width=True):
pass
st.markdown('', unsafe_allow_html=True)
# Offline Version Note
st.markdown("""
📦 Note: We are also building an offline version with local AI models for desktop use
""", unsafe_allow_html=True)
# Sidebar
with st.sidebar:
st.markdown("### 🤖 AI Model")
cloud_models = live_search_engine.cloud_models
selected_cloud = st.selectbox(
"Select Model:",
cloud_models,
index=cloud_models.index(st.session_state.selected_cloud_model) if st.session_state.selected_cloud_model in cloud_models else 0
)
st.session_state.selected_cloud_model = selected_cloud
st.markdown("---")
st.markdown("### 💬 Chat")
if st.button("✨ New Chat", use_container_width=True, type="primary"):
create_new_chat()
st.rerun()
if st.session_state.current_chat_id:
if st.button("🗑️ Clear Current", use_container_width=True):
st.session_state.all_chats[st.session_state.current_chat_id]["messages"] = []
st.rerun()
# Chat History
if st.session_state.all_chats:
st.markdown("**Recent Chats**")
sorted_chats = sorted(
st.session_state.all_chats.items(),
key=lambda x: x[1]['created_at'],
reverse=True
)[:5] # Show only 5 recent chats
for chat_id, chat_data in sorted_chats:
col1, col2 = st.columns([3, 1])
with col1:
active = st.session_state.current_chat_id == chat_id
if st.button(
chat_data['title'],
key=f"chat_{chat_id}",
use_container_width=True,
type="primary" if active else "secondary"
):
st.session_state.current_chat_id = chat_id
st.rerun()
with col2:
if st.button("🗑️", key=f"del_{chat_id}"):
delete_chat(chat_id)
st.rerun()
# Search Modes
st.markdown('', unsafe_allow_html=True)
col1, col2, col3, col4 = st.columns(4)
with col1:
if st.button("⚡+ Quick Search Pro",
type="primary" if st.session_state.current_mode == "Quick Search Pro" else "secondary",
use_container_width=True):
st.session_state.current_mode = "Quick Search Pro"
st.rerun()
with col2:
if st.button("🌐 Live Search",
type="primary" if st.session_state.current_mode == "Live Search" else "secondary",
use_container_width=True):
st.session_state.current_mode = "Live Search"
st.rerun()
with col3:
if st.button("🧠 Deep Search",
type="primary" if st.session_state.current_mode == "Deep Search" else "secondary",
use_container_width=True):
st.session_state.current_mode = "Deep Search"
st.rerun()
with col4:
if st.button("💬 Memory Chat",
type="primary" if st.session_state.current_mode == "Memory Chat" else "secondary",
use_container_width=True):
st.session_state.current_mode = "Memory Chat"
st.rerun()
# Current Mode Info
st.markdown(f"""
Active: {st.session_state.current_mode} | Model: {st.session_state.selected_cloud_model.split('/')[-1]}
""", unsafe_allow_html=True)
# Chat Display
st.markdown('', unsafe_allow_html=True)
if st.session_state.current_chat_id and st.session_state.current_chat_id in st.session_state.all_chats:
chat = st.session_state.all_chats[st.session_state.current_chat_id]
if chat["messages"]:
for msg in chat["messages"]:
with st.chat_message(msg["role"], avatar="🙋♂️" if msg["role"] == "user" else "🤖"):
st.write(msg["content"])
else:
st.info("Start a conversation by typing your message below")
else:
st.info("Create a new chat or select an existing one to start")
# File Upload
uploaded_file = st.file_uploader(
"📎 Upload file (optional)",
type=['txt', 'pdf', 'png', 'jpg', 'jpeg']
)
# Chat Input
if prompt := st.chat_input("Ask me anything..."):
file_content = ""
if uploaded_file:
file_content = process_uploaded_file(uploaded_file)
if file_content:
prompt = f"{prompt}\n\n[File content]:\n{file_content}"
# Display user message
with st.chat_message("user", avatar="🙋♂️"):
st.write(prompt)
# Generate response
with st.spinner("Thinking..."):
if st.session_state.current_mode == "Quick Search Pro":
if not get_api_key("openrouter"):
response = "❌ OpenRouter API key required for this feature"
else:
response = query_cloud_model_direct(prompt, st.session_state.selected_cloud_model)
elif st.session_state.current_mode == "Live Search":
if not get_api_key("openrouter"):
response = "❌ OpenRouter API key required for this feature"
else:
response = live_search_engine.search_and_generate(prompt, st.session_state.selected_cloud_model)
elif st.session_state.current_mode == "Deep Search":
response = "🚧 Deep Search is coming soon! This will provide multi-step reasoning and comprehensive analysis."
elif st.session_state.current_mode == "Memory Chat":
response = "🚧 Memory Chat is coming soon! This will maintain conversation context across multiple exchanges."
else:
response = f"🚧 {st.session_state.current_mode} is in development"
# Display AI response
with st.chat_message("assistant", avatar="🤖"):
st.write(response)
# Save conversation
save_message(prompt, response)
# Footer
st.markdown("---")
st.markdown("""
NGIBS v3.0 | Developed by Arshvir :)
""", unsafe_allow_html=True)