fastAPIv2 / space_app.py
ragV98's picture
new space launch
6d24925
import os
import gradio as gr
from components.indexers.news_indexer import (
build_news_index,
load_news_index
)
from llama_index.core.query_engine import RetrieverQueryEngine
from llama_index.core.settings import Settings
# 💥 Block OpenAI LLM usage
Settings.llm = None
DATA_DIR = "data/raw"
INDEX_DIR = "storage/index"
# Create dummy file if needed
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR, exist_ok=True)
with open(os.path.join(DATA_DIR, "sample.txt"), "w") as f:
f.write("Sample news: India’s Prime Minister spoke today about the tech economy boom.")
# Build index if missing
if not os.path.exists(os.path.join(INDEX_DIR, "docstore.json")):
print("📦 Index not found — building it now...")
build_news_index(data_dir=DATA_DIR, index_dir=INDEX_DIR)
# Load index safely
print("📥 Loading index...")
index = load_news_index(INDEX_DIR)
query_engine = index.as_query_engine()
def query_news(question):
response = query_engine.query(question)
return str(response)
iface = gr.Interface(
fn=query_news,
inputs=gr.Textbox(label="Ask about the news"),
outputs=gr.Textbox(label="Answer"),
title="LucidFeed: News Assistant",
description="Ask questions about the latest headlines."
)
iface.launch()