Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,201 @@
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
-
# สร้างแอปที่มีสองแท็บ
|
4 |
with gr.Blocks() as app:
|
5 |
-
with gr.Tab("
|
6 |
-
gr.
|
7 |
-
|
8 |
-
|
9 |
-
gr.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
-
# เรียกใช้งานแอป
|
12 |
app.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This cell will generate a unified Gradio app.py content based on all 5 apps provided
|
2 |
+
|
3 |
+
import os
|
4 |
import gradio as gr
|
5 |
+
import pandas as pd
|
6 |
+
import numpy as np
|
7 |
+
import joblib
|
8 |
+
import spacy
|
9 |
+
from transformers import pipeline
|
10 |
+
from langchain_core.pydantic_v1 import BaseModel, Field
|
11 |
+
from langchain.prompts import HumanMessagePromptTemplate, ChatPromptTemplate
|
12 |
+
from langchain.output_parsers import PydanticOutputParser
|
13 |
+
from langchain_openai import ChatOpenAI
|
14 |
+
|
15 |
+
# ---------------- Text Translator ---------------- #
|
16 |
+
|
17 |
+
chat = ChatOpenAI()
|
18 |
+
|
19 |
+
class TextTranslator(BaseModel):
|
20 |
+
output: str = Field(description="Translated output text")
|
21 |
+
|
22 |
+
output_parser = PydanticOutputParser(pydantic_object=TextTranslator)
|
23 |
+
format_instructions = output_parser.get_format_instructions()
|
24 |
+
|
25 |
+
def text_translator(input_text: str, language: str) -> str:
|
26 |
+
human_template = f"Enter the text that you want to translate: {{input_text}}, and enter the language that you want it to translate to {{language}}. {format_instructions}"
|
27 |
+
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)
|
28 |
+
chat_prompt = ChatPromptTemplate.from_messages([human_message_prompt])
|
29 |
+
prompt = chat_prompt.format_prompt(input_text=input_text, language=language, format_instructions=format_instructions)
|
30 |
+
messages = prompt.to_messages()
|
31 |
+
response = chat(messages=messages)
|
32 |
+
output = output_parser.parse(response.content)
|
33 |
+
return output.output
|
34 |
+
|
35 |
+
# ---------------- Sentiment Analysis ---------------- #
|
36 |
+
|
37 |
+
sentiment_classifier = pipeline("sentiment-analysis", model="cardiffnlp/twitter-xlm-roberta-base-sentiment")
|
38 |
+
|
39 |
+
def sentiment_analysis(message, history):
|
40 |
+
result = sentiment_classifier(message)
|
41 |
+
return f"Sentiment: {result[0]['label']} (Probability: {result[0]['score']:.2f})"
|
42 |
+
|
43 |
+
# ---------------- Financial Analyst ---------------- #
|
44 |
+
|
45 |
+
nlp = spacy.load('en_core_web_sm')
|
46 |
+
nlp.add_pipe('sentencizer')
|
47 |
+
|
48 |
+
def split_in_sentences(text):
|
49 |
+
doc = nlp(text)
|
50 |
+
return [str(sent).strip() for sent in doc.sents]
|
51 |
+
|
52 |
+
def make_spans(text, results):
|
53 |
+
results_list = [res['label'] for res in results]
|
54 |
+
return list(zip(split_in_sentences(text), results_list))
|
55 |
+
|
56 |
+
auth_token = os.environ.get("HF_Token")
|
57 |
+
|
58 |
+
asr = pipeline("automatic-speech-recognition", "facebook/wav2vec2-base-960h")
|
59 |
+
def speech_to_text(speech):
|
60 |
+
return asr(speech)["text"]
|
61 |
+
|
62 |
+
summarizer = pipeline("summarization", model="knkarthick/MEETING_SUMMARY")
|
63 |
+
def summarize_text(text):
|
64 |
+
return summarizer(text)[0]['summary_text']
|
65 |
+
|
66 |
+
fin_model = pipeline("sentiment-analysis", model='yiyanghkust/finbert-tone', tokenizer='yiyanghkust/finbert-tone')
|
67 |
+
def text_to_sentiment(text):
|
68 |
+
return fin_model(text)[0]["label"]
|
69 |
+
|
70 |
+
def fin_ner(text):
|
71 |
+
api = gr.Interface.load("dslim/bert-base-NER", src='models', use_auth_token=auth_token)
|
72 |
+
return api(text)
|
73 |
+
|
74 |
+
def fin_ext(text):
|
75 |
+
results = fin_model(split_in_sentences(text))
|
76 |
+
return make_spans(text, results)
|
77 |
+
|
78 |
+
def fls(text):
|
79 |
+
fls_model = pipeline("text-classification", model="demo-org/finbert_fls", tokenizer="demo-org/finbert_fls", use_auth_token=auth_token)
|
80 |
+
results = fls_model(split_in_sentences(text))
|
81 |
+
return make_spans(text, results)
|
82 |
+
|
83 |
+
# ---------------- Personal Information Identifier ---------------- #
|
84 |
+
|
85 |
+
def detect_personal_info(text):
|
86 |
+
pii_model = gr.Interface.load("models/iiiorg/piiranha-v1-detect-personal-information")
|
87 |
+
return pii_model(text)
|
88 |
+
|
89 |
+
# ---------------- Customer Churn ---------------- #
|
90 |
+
|
91 |
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
92 |
+
pipeline_path = os.path.join(script_dir, 'toolkit', 'pipeline.joblib')
|
93 |
+
model_path = os.path.join(script_dir, 'toolkit', 'Random Forest Classifier.joblib')
|
94 |
+
|
95 |
+
pipeline_churn = joblib.load(pipeline_path)
|
96 |
+
model_churn = joblib.load(model_path)
|
97 |
+
|
98 |
+
def calculate_total_charges(tenure, monthly_charges):
|
99 |
+
return tenure * monthly_charges
|
100 |
+
|
101 |
+
def predict_churn(SeniorCitizen, Partner, Dependents, tenure,
|
102 |
+
InternetService, OnlineSecurity, OnlineBackup, DeviceProtection, TechSupport,
|
103 |
+
StreamingTV, StreamingMovies, Contract, PaperlessBilling, PaymentMethod,
|
104 |
+
MonthlyCharges):
|
105 |
+
|
106 |
+
TotalCharges = calculate_total_charges(tenure, MonthlyCharges)
|
107 |
+
input_df = pd.DataFrame({
|
108 |
+
'SeniorCitizen': [SeniorCitizen],
|
109 |
+
'Partner': [Partner],
|
110 |
+
'Dependents': [Dependents],
|
111 |
+
'tenure': [tenure],
|
112 |
+
'InternetService': [InternetService],
|
113 |
+
'OnlineSecurity': [OnlineSecurity],
|
114 |
+
'OnlineBackup': [OnlineBackup],
|
115 |
+
'DeviceProtection': [DeviceProtection],
|
116 |
+
'TechSupport': [TechSupport],
|
117 |
+
'StreamingTV': [StreamingTV],
|
118 |
+
'StreamingMovies': [StreamingMovies],
|
119 |
+
'Contract': [Contract],
|
120 |
+
'PaperlessBilling': [PaperlessBilling],
|
121 |
+
'PaymentMethod': [PaymentMethod],
|
122 |
+
'MonthlyCharges': [MonthlyCharges],
|
123 |
+
'TotalCharges': [TotalCharges]
|
124 |
+
})
|
125 |
+
|
126 |
+
cat_cols = [col for col in input_df.columns if input_df[col].dtype == 'object']
|
127 |
+
num_cols = [col for col in input_df.columns if input_df[col].dtype != 'object']
|
128 |
+
|
129 |
+
X_processed = pipeline_churn.transform(input_df)
|
130 |
+
|
131 |
+
cat_encoder = pipeline_churn.named_steps['preprocessor'].named_transformers_['cat'].named_steps['onehot']
|
132 |
+
cat_feature_names = cat_encoder.get_feature_names_out(cat_cols)
|
133 |
+
feature_names = num_cols + list(cat_feature_names)
|
134 |
+
final_df = pd.DataFrame(X_processed, columns=feature_names)
|
135 |
+
|
136 |
+
first_three_columns = final_df.iloc[:, :3]
|
137 |
+
remaining_columns = final_df.iloc[:, 3:]
|
138 |
+
final_df = pd.concat([remaining_columns, first_three_columns], axis=1)
|
139 |
+
|
140 |
+
prediction_probs = model_churn.predict_proba(final_df)[0]
|
141 |
+
return {
|
142 |
+
"Prediction: CHURN 🔴": prediction_probs[1],
|
143 |
+
"Prediction: STAY ✅": prediction_probs[0]
|
144 |
+
}
|
145 |
+
|
146 |
+
# ---------------- Interface ---------------- #
|
147 |
|
|
|
148 |
with gr.Blocks() as app:
|
149 |
+
with gr.Tab("Text Translator"):
|
150 |
+
input_text = gr.Textbox(label="Enter text to translate")
|
151 |
+
lang = gr.Textbox(label="Target language (e.g., Hindi, French)")
|
152 |
+
output_text = gr.Textbox(label="Translated text")
|
153 |
+
gr.Button("Translate").click(fn=text_translator, inputs=[input_text, lang], outputs=output_text)
|
154 |
+
|
155 |
+
with gr.Tab("Sentiment Analysis"):
|
156 |
+
gr.ChatInterface(sentiment_analysis)
|
157 |
+
|
158 |
+
with gr.Tab("Financial Analyst"):
|
159 |
+
audio_input = gr.Audio(source="microphone", type="filepath")
|
160 |
+
text = gr.Textbox(label="Transcribed Text")
|
161 |
+
gr.Button("Transcribe").click(fn=speech_to_text, inputs=audio_input, outputs=text)
|
162 |
+
stext = gr.Textbox(label="Summary")
|
163 |
+
gr.Button("Summarize").click(fn=summarize_text, inputs=text, outputs=stext)
|
164 |
+
gr.Button("Financial Tone").click(fn=text_to_sentiment, inputs=stext, outputs=gr.Label())
|
165 |
+
gr.Button("NER").click(fn=fin_ner, inputs=text, outputs=gr.HighlightedText())
|
166 |
+
gr.Button("Tone per sentence").click(fn=fin_ext, inputs=text, outputs=gr.HighlightedText())
|
167 |
+
gr.Button("Forward-looking").click(fn=fls, inputs=text, outputs=gr.HighlightedText())
|
168 |
+
|
169 |
+
with gr.Tab("Personal Information Identifier"):
|
170 |
+
pii_input = gr.Textbox(label="Enter text to analyze")
|
171 |
+
pii_output = gr.Textbox(label="Detected Personal Info")
|
172 |
+
gr.Button("Detect").click(fn=detect_personal_info, inputs=pii_input, outputs=pii_output)
|
173 |
+
|
174 |
+
with gr.Tab("Customer Churn"):
|
175 |
+
churn_inputs = [
|
176 |
+
gr.Radio(['Yes', 'No'], label="SeniorCitizen"),
|
177 |
+
gr.Radio(['Yes', 'No'], label="Partner"),
|
178 |
+
gr.Radio(['No', 'Yes'], label="Dependents"),
|
179 |
+
gr.Slider(1, 73, step=1, label="Tenure (Months)"),
|
180 |
+
gr.Radio(['DSL', 'Fiber optic', 'No Internet'], label="InternetService"),
|
181 |
+
gr.Radio(['No', 'Yes'], label="OnlineSecurity"),
|
182 |
+
gr.Radio(['No', 'Yes'], label="OnlineBackup"),
|
183 |
+
gr.Radio(['No', 'Yes'], label="DeviceProtection"),
|
184 |
+
gr.Radio(['No', 'Yes'], label="TechSupport"),
|
185 |
+
gr.Radio(['No', 'Yes'], label="StreamingTV"),
|
186 |
+
gr.Radio(['No', 'Yes'], label="StreamingMovies"),
|
187 |
+
gr.Radio(['Month-to-month', 'One year', 'Two year'], label="Contract"),
|
188 |
+
gr.Radio(['Yes', 'No'], label="PaperlessBilling"),
|
189 |
+
gr.Radio(['Electronic check', 'Mailed check', 'Bank transfer (automatic)', 'Credit card (automatic)'], label="PaymentMethod"),
|
190 |
+
gr.Slider(18.40, 118.65, label="MonthlyCharges")
|
191 |
+
]
|
192 |
+
churn_output = gr.Label(label="Churn Prediction")
|
193 |
+
gr.Button("Predict").click(fn=predict_churn, inputs=churn_inputs, outputs=churn_output)
|
194 |
|
|
|
195 |
app.launch()
|
196 |
+
'''
|
197 |
+
|
198 |
+
with open("/mnt/data/app.py", "w") as f:
|
199 |
+
f.write(app_py_content)
|
200 |
+
|
201 |
+
"/mnt/data/app.py"
|