File size: 2,098 Bytes
154c070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# app.py
from transformers import pipeline
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
import gradio as gr

# Initialize a free question-answering model from Hugging Face
question_answerer = pipeline("question-answering", model="distilbert-base-cased-distilled-squad")

# Load or create data on economic and population growth trends
documents = [
    {"id": 1, "text": "Global economic growth is projected to slow down due to inflation."},
    {"id": 2, "text": "Population growth in developing countries continues to increase."},
    {"id": 3, "text": "Economic growth in advanced economies is experiencing fluctuations due to market changes."},
    # Add more documents as needed
]

# Embed documents for retrieval using SentenceTransformer
embedder = SentenceTransformer('all-MiniLM-L6-v2')  # A lightweight embedding model
document_embeddings = [embedder.encode(doc['text']) for doc in documents]

# Convert embeddings to a FAISS index for similarity search
index = faiss.IndexFlatL2(384)  # Dimension of the embeddings
index.add(np.array(document_embeddings))

# Define the RAG retrieval function
def retrieve_documents(query, top_k=3):
    query_embedding = embedder.encode(query).reshape(1, -1)
    distances, indices = index.search(query_embedding, top_k)
    return [documents[i]['text'] for i in indices[0]]

# Implement the question-answering function with retrieval
def ask_question(question):
    retrieved_docs = retrieve_documents(question)
    context = " ".join(retrieved_docs)
    answer = question_answerer(question=question, context=context)
    return answer['answer']

# Create Gradio Interface for the RAG app
def rag_interface(question):
    answer = ask_question(question)
    return answer

interface = gr.Interface(
    fn=rag_interface,
    inputs="text",
    outputs="text",
    title="Economic and Population Growth Advisor",
    description="Ask questions related to economic and population growth. This app uses retrieval-augmented generation to provide answers based on relevant documents."
)

interface.launch(debug=True)