|
import gradio as gr |
|
import pandas as pd |
|
import os |
|
import time |
|
import threading |
|
import tempfile |
|
import logging |
|
import uuid |
|
import shutil |
|
import glob |
|
from datetime import datetime |
|
import sys |
|
import types |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', |
|
handlers=[ |
|
logging.StreamHandler(), |
|
logging.FileHandler('category_analysis_app.log', mode='a') |
|
] |
|
) |
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
def load_module_from_env(module_name, env_var_name): |
|
"""νκ²½λ³μμμ λͺ¨λ μ½λλ₯Ό λ‘λνμ¬ λμ μΌλ‘ λͺ¨λ μμ±""" |
|
try: |
|
module_code = os.getenv(env_var_name) |
|
if not module_code: |
|
raise ValueError(f"νκ²½λ³μ {env_var_name}κ° μ€μ λμ§ μμμ΅λλ€.") |
|
|
|
|
|
module = types.ModuleType(module_name) |
|
|
|
|
|
module.__dict__.update({ |
|
'os': __import__('os'), |
|
'time': __import__('time'), |
|
'logging': __import__('logging'), |
|
'pandas': __import__('pandas'), |
|
'requests': __import__('requests'), |
|
'tempfile': __import__('tempfile'), |
|
'threading': __import__('threading'), |
|
're': __import__('re'), |
|
'random': __import__('random'), |
|
'uuid': __import__('uuid'), |
|
'shutil': __import__('shutil'), |
|
'glob': __import__('glob'), |
|
'datetime': __import__('datetime'), |
|
'types': __import__('types'), |
|
'collections': __import__('collections'), |
|
'Counter': __import__('collections').Counter, |
|
'defaultdict': __import__('collections').defaultdict, |
|
'hmac': __import__('hmac'), |
|
'hashlib': __import__('hashlib'), |
|
'base64': __import__('base64'), |
|
}) |
|
|
|
|
|
exec(module_code, module.__dict__) |
|
|
|
|
|
sys.modules[module_name] = module |
|
|
|
logger.info(f"β
λͺ¨λ {module_name} λ‘λ μλ£") |
|
return module |
|
|
|
except Exception as e: |
|
logger.error(f"β λͺ¨λ {module_name} λ‘λ μ€ν¨: {e}") |
|
raise |
|
|
|
|
|
logger.info("π λͺ¨λ λ‘λ μμ...") |
|
|
|
try: |
|
|
|
api_utils = load_module_from_env('api_utils', 'API_UTILS_CODE') |
|
|
|
|
|
text_utils = load_module_from_env('text_utils', 'TEXT_UTILS_CODE') |
|
|
|
|
|
keyword_search = load_module_from_env('keyword_search', 'KEYWORD_SEARCH_CODE') |
|
|
|
|
|
product_search_module = load_module_from_env('product_search', 'PRODUCT_SEARCH_CODE') |
|
|
|
product_search_module.api_utils = api_utils |
|
product_search_module.text_utils = text_utils |
|
product_search = product_search_module |
|
|
|
|
|
keyword_processor_module = load_module_from_env('keyword_processor', 'KEYWORD_PROCESSOR_CODE') |
|
|
|
keyword_processor_module.text_utils = text_utils |
|
keyword_processor_module.keyword_search = keyword_search |
|
keyword_processor_module.product_search = product_search |
|
keyword_processor = keyword_processor_module |
|
|
|
|
|
export_utils = load_module_from_env('export_utils', 'EXPORT_UTILS_CODE') |
|
|
|
|
|
category_analysis_module = load_module_from_env('category_analysis', 'CATEGORY_ANALYSIS_CODE') |
|
|
|
category_analysis_module.text_utils = text_utils |
|
category_analysis_module.product_search = product_search |
|
category_analysis_module.keyword_search = keyword_search |
|
category_analysis = category_analysis_module |
|
|
|
logger.info("β
λͺ¨λ λͺ¨λ λ‘λ μλ£") |
|
|
|
except Exception as e: |
|
logger.error(f"β λͺ¨λ λ‘λ μ€ μΉλͺ
μ μ€λ₯: {e}") |
|
logger.error("νμν νκ²½λ³μλ€μ΄ μ€μ λμλμ§ νμΈνμΈμ:") |
|
logger.error("- API_UTILS_CODE") |
|
logger.error("- TEXT_UTILS_CODE") |
|
logger.error("- KEYWORD_SEARCH_CODE") |
|
logger.error("- PRODUCT_SEARCH_CODE") |
|
logger.error("- KEYWORD_PROCESSOR_CODE") |
|
logger.error("- EXPORT_UTILS_CODE") |
|
logger.error("- CATEGORY_ANALYSIS_CODE") |
|
raise |
|
|
|
|
|
session_temp_files = {} |
|
session_data = {} |
|
|
|
def cleanup_huggingface_temp_folders(): |
|
"""νκΉ
νμ΄μ€ μμ ν΄λ μ΄κΈ° μ 리""" |
|
try: |
|
|
|
temp_dirs = [ |
|
tempfile.gettempdir(), |
|
"/tmp", |
|
"/var/tmp", |
|
os.path.join(os.getcwd(), "temp"), |
|
os.path.join(os.getcwd(), "tmp"), |
|
"/gradio_cached_examples", |
|
"/flagged" |
|
] |
|
|
|
cleanup_count = 0 |
|
|
|
for temp_dir in temp_dirs: |
|
if os.path.exists(temp_dir): |
|
try: |
|
|
|
session_files = glob.glob(os.path.join(temp_dir, "session_*.xlsx")) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "session_*.csv"))) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "*category*.xlsx"))) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "*category*.csv"))) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "*analysis*.xlsx"))) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "*analysis*.csv"))) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "tmp*.xlsx"))) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "tmp*.csv"))) |
|
|
|
for file_path in session_files: |
|
try: |
|
|
|
if os.path.getmtime(file_path) < time.time() - 3600: |
|
os.remove(file_path) |
|
cleanup_count += 1 |
|
logger.info(f"μ΄κΈ° μ 리: μ€λλ μμ νμΌ μμ - {file_path}") |
|
except Exception as e: |
|
logger.warning(f"νμΌ μμ μ€ν¨ (무μλ¨): {file_path} - {e}") |
|
|
|
except Exception as e: |
|
logger.warning(f"μμ λλ ν 리 μ 리 μ€ν¨ (무μλ¨): {temp_dir} - {e}") |
|
|
|
logger.info(f"β
νκΉ
νμ΄μ€ μμ ν΄λ μ΄κΈ° μ 리 μλ£ - {cleanup_count}κ° νμΌ μμ ") |
|
|
|
|
|
try: |
|
gradio_temp_dir = os.path.join(os.getcwd(), "gradio_cached_examples") |
|
if os.path.exists(gradio_temp_dir): |
|
shutil.rmtree(gradio_temp_dir, ignore_errors=True) |
|
logger.info("Gradio μΊμ ν΄λ μ 리 μλ£") |
|
except Exception as e: |
|
logger.warning(f"Gradio μΊμ ν΄λ μ 리 μ€ν¨ (무μλ¨): {e}") |
|
|
|
except Exception as e: |
|
logger.error(f"μ΄κΈ° μμ ν΄λ μ 리 μ€ μ€λ₯ (κ³μ μ§ν): {e}") |
|
|
|
def setup_clean_temp_environment(): |
|
"""κΉ¨λν μμ νκ²½ μ€μ """ |
|
try: |
|
|
|
cleanup_huggingface_temp_folders() |
|
|
|
|
|
app_temp_dir = os.path.join(tempfile.gettempdir(), "category_analysis_app") |
|
if os.path.exists(app_temp_dir): |
|
shutil.rmtree(app_temp_dir, ignore_errors=True) |
|
os.makedirs(app_temp_dir, exist_ok=True) |
|
|
|
|
|
os.environ['CATEGORY_APP_TEMP'] = app_temp_dir |
|
|
|
logger.info(f"β
μ ν리μΌμ΄μ
μ μ© μμ λλ ν 리 μ€μ : {app_temp_dir}") |
|
|
|
return app_temp_dir |
|
|
|
except Exception as e: |
|
logger.error(f"μμ νκ²½ μ€μ μ€ν¨: {e}") |
|
return tempfile.gettempdir() |
|
|
|
def get_app_temp_dir(): |
|
"""μ ν리μΌμ΄μ
μ μ© μμ λλ ν 리 λ°ν""" |
|
return os.environ.get('CATEGORY_APP_TEMP', tempfile.gettempdir()) |
|
|
|
def get_session_id(): |
|
"""μΈμ
ID μμ±""" |
|
return str(uuid.uuid4()) |
|
|
|
def cleanup_session_files(session_id, delay=300): |
|
"""μΈμ
λ³ μμ νμΌ μ 리 ν¨μ""" |
|
def cleanup(): |
|
time.sleep(delay) |
|
if session_id in session_temp_files: |
|
files_to_remove = session_temp_files[session_id].copy() |
|
del session_temp_files[session_id] |
|
|
|
for file_path in files_to_remove: |
|
try: |
|
if os.path.exists(file_path): |
|
os.remove(file_path) |
|
logger.info(f"μΈμ
{session_id[:8]}... μμ νμΌ μμ : {file_path}") |
|
except Exception as e: |
|
logger.error(f"μΈμ
{session_id[:8]}... νμΌ μμ μ€λ₯: {e}") |
|
|
|
threading.Thread(target=cleanup, daemon=True).start() |
|
|
|
def register_session_file(session_id, file_path): |
|
"""μΈμ
λ³ νμΌ λ±λ‘""" |
|
if session_id not in session_temp_files: |
|
session_temp_files[session_id] = [] |
|
session_temp_files[session_id].append(file_path) |
|
|
|
def cleanup_old_sessions(): |
|
"""μ€λλ μΈμ
λ°μ΄ν° μ 리""" |
|
current_time = time.time() |
|
sessions_to_remove = [] |
|
|
|
for session_id, data in session_data.items(): |
|
if current_time - data.get('last_activity', 0) > 3600: |
|
sessions_to_remove.append(session_id) |
|
|
|
for session_id in sessions_to_remove: |
|
|
|
if session_id in session_temp_files: |
|
for file_path in session_temp_files[session_id]: |
|
try: |
|
if os.path.exists(file_path): |
|
os.remove(file_path) |
|
logger.info(f"μ€λλ μΈμ
{session_id[:8]}... νμΌ μμ : {file_path}") |
|
except Exception as e: |
|
logger.error(f"μ€λλ μΈμ
νμΌ μμ μ€λ₯: {e}") |
|
del session_temp_files[session_id] |
|
|
|
|
|
if session_id in session_data: |
|
del session_data[session_id] |
|
logger.info(f"μ€λλ μΈμ
λ°μ΄ν° μμ : {session_id[:8]}...") |
|
|
|
def update_session_activity(session_id): |
|
"""μΈμ
νλ μκ° μ
λ°μ΄νΈ""" |
|
if session_id not in session_data: |
|
session_data[session_id] = {} |
|
session_data[session_id]['last_activity'] = time.time() |
|
|
|
def create_session_temp_file(session_id, suffix='.xlsx'): |
|
"""μΈμ
λ³ μμ νμΌ μμ± (μ μ© λλ ν 리 μ¬μ©)""" |
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
random_suffix = str(time.time_ns())[-4:] |
|
|
|
|
|
temp_dir = get_app_temp_dir() |
|
filename = f"session_{session_id[:8]}_{timestamp}_{random_suffix}{suffix}" |
|
temp_file_path = os.path.join(temp_dir, filename) |
|
|
|
|
|
with open(temp_file_path, 'w') as f: |
|
pass |
|
|
|
register_session_file(session_id, temp_file_path) |
|
return temp_file_path |
|
|
|
def analyze_product_terms_wrapper(product_name, main_keyword, current_state, session_id): |
|
"""μνλͺ
ν€μλ λΆμ λνΌ ν¨μ - μΈμ
ID μΆκ°""" |
|
update_session_activity(session_id) |
|
|
|
if not product_name: |
|
return "μνλͺ
μ μ
λ ₯ν΄μ£ΌμΈμ.", current_state, None, gr.update(visible=False) |
|
|
|
|
|
result_html, keyword_results = category_analysis.analyze_product_terms(product_name, main_keyword) |
|
|
|
|
|
if current_state is None or not isinstance(current_state, dict): |
|
current_state = {} |
|
|
|
|
|
current_state["keyword_analysis_results"] = keyword_results |
|
current_state["product_name"] = product_name |
|
current_state["main_keyword"] = main_keyword |
|
|
|
|
|
excel_path = download_analysis(current_state, session_id) |
|
|
|
|
|
return result_html, current_state, excel_path, gr.update(visible=True) |
|
|
|
def download_analysis(result, session_id): |
|
"""μΉ΄ν
κ³ λ¦¬ λΆμ κ²°κ³Ό λ€μ΄λ‘λ (μΈμ
λ³)""" |
|
update_session_activity(session_id) |
|
|
|
if not result or not isinstance(result, dict): |
|
logger.warning(f"μΈμ
{session_id[:8]}... λΆμ κ²°κ³Όκ° μμ΅λλ€.") |
|
return None |
|
|
|
try: |
|
|
|
if "keyword_analysis_results" in result: |
|
logger.info(f"μΈμ
{session_id[:8]}... ν€μλ λΆμ κ²°κ³Ό ν¬ν¨νμ¬ λ€μ΄λ‘λ: {len(result['keyword_analysis_results'])}κ° ν€μλ") |
|
|
|
|
|
temp_filename = create_session_temp_file(session_id, '.xlsx') |
|
|
|
|
|
keywords = [] |
|
pc_volumes = [] |
|
mobile_volumes = [] |
|
total_volumes = [] |
|
ranges = [] |
|
category_items = [] |
|
|
|
for kw_result in result["keyword_analysis_results"]: |
|
keywords.append(kw_result.get("ν€μλ", "")) |
|
pc_volumes.append(kw_result.get("PCκ²μλ", 0)) |
|
mobile_volumes.append(kw_result.get("λͺ¨λ°μΌκ²μλ", 0)) |
|
total_volumes.append(kw_result.get("μ΄κ²μλ", 0)) |
|
ranges.append(kw_result.get("κ²μλꡬκ°", "")) |
|
category_items.append(kw_result.get("μΉ΄ν
κ³ λ¦¬νλͺ©", "")) |
|
|
|
|
|
df = pd.DataFrame({ |
|
"ν€μλ": keywords, |
|
"PCκ²μλ": pc_volumes, |
|
"λͺ¨λ°μΌκ²μλ": mobile_volumes, |
|
"μ΄κ²μλ": total_volumes, |
|
"κ²μλꡬκ°": ranges, |
|
"μΉ΄ν
κ³ λ¦¬νλͺ©": category_items |
|
}) |
|
|
|
with pd.ExcelWriter(temp_filename, engine="xlsxwriter") as writer: |
|
df.to_excel(writer, sheet_name="μνλͺ
κ²μ¦ κ²°κ³Ό", index=False) |
|
|
|
ws = writer.sheets["μνλͺ
κ²μ¦ κ²°κ³Ό"] |
|
|
|
|
|
wrap_fmt = writer.book.add_format({ |
|
"text_wrap": True, |
|
"valign": "top" |
|
}) |
|
|
|
|
|
ws.set_column("F:F", 40, wrap_fmt) |
|
|
|
|
|
worksheet = writer.sheets['μνλͺ
κ²μ¦ κ²°κ³Ό'] |
|
worksheet.set_column('A:A', 20) |
|
worksheet.set_column('B:B', 12) |
|
worksheet.set_column('C:C', 12) |
|
worksheet.set_column('D:D', 12) |
|
worksheet.set_column('E:E', 12) |
|
worksheet.set_column('F:F', 40) |
|
|
|
|
|
header_format = writer.book.add_format({ |
|
'bold': True, |
|
'bg_color': '#FB7F0D', |
|
'color': 'white', |
|
'border': 1 |
|
}) |
|
|
|
|
|
for col_num, value in enumerate(df.columns.values): |
|
worksheet.write(0, col_num, value, header_format) |
|
|
|
logger.info(f"μΈμ
{session_id[:8]}... μμ
νμΌ μ μ₯ μλ£: {temp_filename}") |
|
return temp_filename |
|
else: |
|
logger.warning(f"μΈμ
{session_id[:8]}... ν€μλ λΆμ κ²°κ³Όκ° μμ΅λλ€.") |
|
return None |
|
except Exception as e: |
|
logger.error(f"μΈμ
{session_id[:8]}... λ€μ΄λ‘λ μ€ μ€λ₯ λ°μ: {e}") |
|
import traceback |
|
logger.error(traceback.format_exc()) |
|
return None |
|
|
|
def reset_interface(session_id): |
|
"""μΈν°νμ΄μ€ 리μ
ν¨μ - μΈμ
λ³ λ°μ΄ν° μ΄κΈ°ν""" |
|
update_session_activity(session_id) |
|
|
|
|
|
if session_id in session_temp_files: |
|
for file_path in session_temp_files[session_id]: |
|
try: |
|
if os.path.exists(file_path): |
|
os.remove(file_path) |
|
logger.info(f"μΈμ
{session_id[:8]}... 리μ
μ νμΌ μμ : {file_path}") |
|
except Exception as e: |
|
logger.error(f"μΈμ
{session_id[:8]}... 리μ
μ νμΌ μμ μ€λ₯: {e}") |
|
session_temp_files[session_id] = [] |
|
|
|
return ( |
|
"", |
|
"", |
|
"", |
|
None, |
|
None, |
|
gr.update(visible=False) |
|
) |
|
|
|
def product_analyze_with_loading(product_name, main_keyword, current_state, session_id): |
|
"""λ‘λ© νμ ν¨μ""" |
|
update_session_activity(session_id) |
|
return gr.update(visible=True) |
|
|
|
def process_product_analyze(product_name, main_keyword, current_state, session_id): |
|
"""μ€μ λΆμ μν""" |
|
update_session_activity(session_id) |
|
results = analyze_product_terms_wrapper(product_name, main_keyword, current_state, session_id) |
|
|
|
return results + (gr.update(visible=False),) |
|
|
|
|
|
def start_session_cleanup_scheduler(): |
|
"""μΈμ
μ 리 μ€μΌμ€λ¬ μμ""" |
|
def cleanup_scheduler(): |
|
while True: |
|
time.sleep(600) |
|
cleanup_old_sessions() |
|
|
|
cleanup_huggingface_temp_folders() |
|
|
|
threading.Thread(target=cleanup_scheduler, daemon=True).start() |
|
|
|
def cleanup_on_startup(): |
|
"""μ ν리μΌμ΄μ
μμ μ μ 체 μ 리""" |
|
logger.info("π§Ή μΉ΄ν
κ³ λ¦¬ λΆμ μ ν리μΌμ΄μ
μμ - μ΄κΈ° μ 리 μμ
μμ...") |
|
|
|
|
|
cleanup_huggingface_temp_folders() |
|
|
|
|
|
app_temp_dir = setup_clean_temp_environment() |
|
|
|
|
|
global session_temp_files, session_data |
|
session_temp_files.clear() |
|
session_data.clear() |
|
|
|
logger.info(f"β
μ΄κΈ° μ 리 μμ
μλ£ - μ± μ μ© λλ ν 리: {app_temp_dir}") |
|
|
|
return app_temp_dir |
|
|
|
|
|
def create_app(): |
|
|
|
fontawesome_html = """ |
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css"> |
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css"> |
|
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Noto+Sans+KR:wght@300;400;500;700&display=swap"> |
|
""" |
|
|
|
|
|
try: |
|
with open('style.css', 'r', encoding='utf-8') as f: |
|
custom_css = f.read() |
|
except: |
|
|
|
custom_css = """ |
|
:root { |
|
--primary-color: #FB7F0D; |
|
--secondary-color: #ff9a8b; |
|
} |
|
.custom-button { |
|
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)) !important; |
|
color: white !important; |
|
border-radius: 30px !important; |
|
height: 45px !important; |
|
font-size: 16px !important; |
|
font-weight: bold !important; |
|
width: 100% !important; |
|
text-align: center !important; |
|
display: flex !important; |
|
align-items: center !important; |
|
justify-content: center !important; |
|
} |
|
.reset-button { |
|
background: linear-gradient(135deg, #6c757d, #495057) !important; |
|
color: white !important; |
|
border-radius: 30px !important; |
|
height: 45px !important; |
|
font-size: 16px !important; |
|
font-weight: bold !important; |
|
width: 100% !important; |
|
text-align: center !important; |
|
display: flex !important; |
|
align-items: center !important; |
|
justify-content: center !important; |
|
} |
|
.section-title { |
|
border-bottom: 2px solid #FB7F0D; |
|
font-weight: bold; |
|
padding-bottom: 5px; |
|
margin-bottom: 15px; |
|
} |
|
.loading-indicator { |
|
display: flex; |
|
align-items: center; |
|
justify-content: center; |
|
padding: 15px; |
|
background-color: #f8f9fa; |
|
border-radius: 5px; |
|
margin: 10px 0; |
|
border: 1px solid #ddd; |
|
} |
|
.loading-spinner { |
|
border: 4px solid rgba(0, 0, 0, 0.1); |
|
width: 24px; |
|
height: 24px; |
|
border-radius: 50%; |
|
border-left-color: #FB7F0D; |
|
animation: spin 1s linear infinite; |
|
margin-right: 10px; |
|
} |
|
@keyframes spin { |
|
0% { transform: rotate(0deg); } |
|
100% { transform: rotate(360deg); } |
|
} |
|
.progress-bar { |
|
height: 10px; |
|
background-color: #FB7F0D; |
|
border-radius: 5px; |
|
width: 0%; |
|
animation: progressAnim 2s ease-in-out infinite; |
|
} |
|
@keyframes progressAnim { |
|
0% { width: 10%; } |
|
50% { width: 70%; } |
|
100% { width: 10%; } |
|
} |
|
.execution-section { |
|
margin-top: 20px; |
|
background-color: #f9f9f9; |
|
border-radius: 8px; |
|
padding: 15px; |
|
border: 1px solid #e5e5e5; |
|
} |
|
.session-info { |
|
background-color: #e8f4f8; |
|
padding: 8px 12px; |
|
border-radius: 4px; |
|
font-size: 12px; |
|
color: #0c5460; |
|
margin-bottom: 10px; |
|
text-align: center; |
|
} |
|
""" |
|
|
|
with gr.Blocks(css=custom_css, theme=gr.themes.Default( |
|
primary_hue="orange", |
|
secondary_hue="orange", |
|
font=[gr.themes.GoogleFont("Noto Sans KR"), "ui-sans-serif", "system-ui"] |
|
)) as demo: |
|
gr.HTML(fontawesome_html) |
|
|
|
|
|
session_id = gr.State(get_session_id) |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
gr.HTML('<div class="section-title"><i class="fas fa-tag"></i> μνλͺ
λΆμ μ
λ ₯</div>') |
|
|
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
main_keyword = gr.Textbox( |
|
label="λ©μΈ ν€μλ", |
|
placeholder="μ: μ€μ§μ΄" |
|
) |
|
with gr.Column(scale=1): |
|
product_name = gr.Textbox( |
|
label="μνλͺ
", |
|
placeholder="μ: μμ§ μ€μ§μ΄ μ΄μ΄ν μ§λ―Έμ±" |
|
) |
|
|
|
|
|
with gr.Column(elem_classes="execution-section"): |
|
gr.HTML('<div class="section-title"><i class="fas fa-play-circle"></i> μ€ν</div>') |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
analyze_product_btn = gr.Button( |
|
"μνλͺ
λΆμ", |
|
elem_classes=["execution-button", "primary-button"] |
|
) |
|
with gr.Column(scale=1): |
|
reset_btn = gr.Button( |
|
"λͺ¨λ μ
λ ₯ μ΄κΈ°ν", |
|
elem_classes=["execution-button", "secondary-button"] |
|
) |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as progress_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-spinner"></i> λΆμ μ§ν μν</div>') |
|
|
|
progress_html = gr.HTML(""" |
|
<div style="padding: 15px; background-color: #f9f9f9; border-radius: 5px; margin: 10px 0; border: 1px solid #ddd;"> |
|
<div style="margin-bottom: 10px; display: flex; align-items: center;"> |
|
<i class="fas fa-spinner fa-spin" style="color: #FB7F0D; margin-right: 10px;"></i> |
|
<span>μνλͺ
λΆμμ€μ
λλ€. μ μλ§ κΈ°λ€λ €μ£ΌμΈμ...</span> |
|
</div> |
|
<div style="background-color: #e9ecef; height: 10px; border-radius: 5px; overflow: hidden;"> |
|
<div class="progress-bar"></div> |
|
</div> |
|
</div> |
|
""") |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as product_analysis_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-table"></i> μνλͺ
ν€μλ λΆμ κ²°κ³Ό</div>') |
|
|
|
|
|
product_analysis_result = gr.HTML(elem_classes="fade-in") |
|
|
|
|
|
download_file = gr.File( |
|
label="λΆμ κ²°κ³Ό λ€μ΄λ‘λ", |
|
visible=True |
|
) |
|
|
|
|
|
analysis_result_state = gr.State() |
|
|
|
|
|
analyze_product_btn.click( |
|
fn=product_analyze_with_loading, |
|
inputs=[product_name, main_keyword, analysis_result_state, session_id], |
|
outputs=[progress_section] |
|
).then( |
|
fn=process_product_analyze, |
|
inputs=[product_name, main_keyword, analysis_result_state, session_id], |
|
outputs=[ |
|
product_analysis_result, analysis_result_state, |
|
download_file, product_analysis_section, progress_section |
|
] |
|
) |
|
|
|
|
|
reset_btn.click( |
|
fn=reset_interface, |
|
inputs=[session_id], |
|
outputs=[ |
|
main_keyword, product_name, product_analysis_result, |
|
download_file, analysis_result_state, product_analysis_section |
|
] |
|
) |
|
|
|
return demo |
|
|
|
if __name__ == "__main__": |
|
|
|
logger.info("π μΉ΄ν
κ³ λ¦¬ λΆμ μ ν리μΌμ΄μ
μμ...") |
|
|
|
|
|
app_temp_dir = cleanup_on_startup() |
|
|
|
|
|
start_session_cleanup_scheduler() |
|
|
|
|
|
api_utils.initialize_api_configs() |
|
|
|
|
|
gemini_model = text_utils.get_gemini_model() |
|
|
|
logger.info("===== λ©ν°μ μ μΉ΄ν
κ³ λ¦¬ λΆμ Application Startup at %s =====", time.strftime("%Y-%m-%d %H:%M:%S")) |
|
logger.info(f"π μμ νμΌ μ μ₯ μμΉ: {app_temp_dir}") |
|
|
|
|
|
try: |
|
app = create_app() |
|
app.launch( |
|
share=False, |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
max_threads=40, |
|
auth=None, |
|
show_error=True, |
|
quiet=False, |
|
favicon_path=None, |
|
ssl_verify=False |
|
) |
|
except Exception as e: |
|
logger.error(f"μ ν리μΌμ΄μ
μ€ν μ€ν¨: {e}") |
|
raise |
|
finally: |
|
|
|
logger.info("π§Ή μ ν리μΌμ΄μ
μ’
λ£ - μ΅μ’
μ 리 μμ
...") |
|
try: |
|
cleanup_huggingface_temp_folders() |
|
if os.path.exists(app_temp_dir): |
|
shutil.rmtree(app_temp_dir, ignore_errors=True) |
|
logger.info("β
μ΅μ’
μ 리 μλ£") |
|
except Exception as e: |
|
logger.error(f"μ΅μ’
μ 리 μ€ μ€λ₯: {e}") |