Update app.py
Browse files
app.py
CHANGED
@@ -1,42 +1,67 @@
|
|
1 |
"""
|
2 |
-
Square Theory Generator (
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
โ๏ธ **๊ตฌํ ํฌ์ธํธ**
|
13 |
-
- **Gradio Blocks**๋ฅผ ์ฌ์ฉํด ์
๋ ฅยท์ถ๋ ฅ ๋ ์ด์์์ ์์ ๋กญ๊ฒ ๊ตฌ์ฑํฉ๋๋ค. (cf. gradio docs)
|
14 |
-
- **Matplotlib**๋ก ๊ฐ๋จํ ์ฌ๊ฐํ ๋์์ ๊ทธ๋ ค์ ์๊ฐ์ ์ดํด๋ฅผ ๋์ต๋๋ค.
|
15 |
-
- ๋จ์ด ์กฐํฉ ๋ก์ง์ ๋ฐ๋ชจ ๋ชฉ์ ์ ์ฌํ ๋ฒ์ ์
๋๋ค. ํ์ํ๋ฉด WordNetยท๊ผฌ๊ผฌ๋ง ๋ฑ์ผ๋ก ํ์ฅ ๊ฐ๋ฅ.
|
16 |
-
|
17 |
-
Author: ChatGPT (OpenAI)
|
18 |
"""
|
19 |
|
|
|
|
|
20 |
import gradio as gr
|
21 |
import matplotlib.pyplot as plt
|
22 |
-
from matplotlib import patches
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
fig, ax = plt.subplots(figsize=(4, 4))
|
29 |
-
#
|
30 |
square = patches.Rectangle((0, 0), 1, 1, fill=False, linewidth=2)
|
31 |
ax.add_patch(square)
|
32 |
|
33 |
-
|
34 |
-
ax.text(
|
35 |
-
ax.text(1.05,
|
36 |
-
ax.text(-0.05, -0.05,
|
37 |
-
ax.text(1.05, -0.05, b_alt, ha="left", va="top", fontsize=14, fontweight="bold")
|
38 |
|
39 |
-
# Hide axes
|
40 |
ax.set_xticks([])
|
41 |
ax.set_yticks([])
|
42 |
ax.set_xlim(-0.2, 1.2)
|
@@ -44,51 +69,71 @@ def draw_square(a: str, b: str, a_alt: str, b_alt: str):
|
|
44 |
ax.set_aspect("equal")
|
45 |
return fig
|
46 |
|
|
|
|
|
|
|
47 |
|
48 |
-
|
49 |
-
"""Core callback: returns diagram + text outputs."""
|
50 |
-
# Basic phrases
|
51 |
-
top_phrase = f"{a} {b}"
|
52 |
-
bottom_phrase = f"{a_alt} {b_alt}"
|
53 |
-
|
54 |
-
# Simple 2โline slogan
|
55 |
-
slogan = f"{top_phrase}!\n{bottom_phrase}!"
|
56 |
-
|
57 |
-
# Brand suggestions (kor+eng)
|
58 |
-
brand_korean = f"{a_alt}{b_alt}"
|
59 |
-
brand_english = f"{a.capitalize()}{b.capitalize()}"
|
60 |
-
brand_combo = f"{brand_korean} / {brand_english}"
|
61 |
-
|
62 |
-
# Figure
|
63 |
-
fig = draw_square(a, b, a_alt, b_alt)
|
64 |
|
65 |
-
return fig, top_phrase, bottom_phrase, slogan, brand_combo
|
66 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
b = gr.Textbox(label="๋จ์ด 2 (์ค๋ฅธ์ชฝ ์)")
|
74 |
-
with gr.Row():
|
75 |
-
a_alt = gr.Textbox(label="๋์ ๋จ์ด 1 (์ผ์ชฝ ์๋)")
|
76 |
-
b_alt = gr.Textbox(label="๋์ ๋จ์ด 2 (์ค๋ฅธ์ชฝ ์๋)")
|
77 |
|
78 |
-
|
|
|
79 |
|
80 |
fig_out = gr.Plot(label="์ฌ๊ฐํ ๋ค์ด์ด๊ทธ๋จ")
|
81 |
top_out = gr.Textbox(label="์๋ณ ํํ")
|
82 |
bottom_out = gr.Textbox(label="์๋ซ๋ณ ํํ")
|
83 |
-
slogan_out = gr.Textbox(label="
|
84 |
brand_out = gr.Textbox(label="๋ธ๋๋ ๋ค์ ์ ์")
|
85 |
|
86 |
-
|
87 |
-
fn=generate_square,
|
88 |
-
inputs=[a, b, a_alt, b_alt],
|
89 |
-
outputs=[fig_out, top_out, bottom_out, slogan_out, brand_out],
|
90 |
-
)
|
91 |
|
92 |
if __name__ == "__main__":
|
93 |
-
#
|
94 |
demo.launch()
|
|
|
1 |
"""
|
2 |
+
Square Theory Generator (Koreanโfriendly, LLMโpowered)
|
3 |
+
-----------------------------------------------------
|
4 |
+
๋ณ๊ฒฝ ์ฌํญ
|
5 |
+
0) Matplotlib ํ๊ธ ํฐํธ ์๋ ์ค์ ์ผ๋ก ๊นจ์ง ํด๊ฒฐ
|
6 |
+
1) ์
๋ ฅ ํ๋กฌํํธ๋ฅผ ํ๋๋ง ๋๊ณ , ๋๋จธ์ง 3๋จ์ด๋ LLM์ด ์์ฑ
|
7 |
+
2) OpenAI ๊ณต์ Python SDK (`openai`) ์ฌ์ฉ โ ์ฌ์ฉ์๊ฐ ์ ๊ณตํ `client.responses.create` ์์ ๋ฐ์
|
8 |
+
3) LLM์ด ์ฌ๊ฐํ ๋จ์ด + ๋ ์ค ์นดํผยท๋ธ๋๋ ๋ค์๊น์ง JSON์ผ๋ก ๋ฐํ โ ์๋ ์ถ๋ ฅ
|
9 |
+
|
10 |
+
โ ๏ธ **OPENAI_API_KEY** ํ๊ฒฝ ๋ณ์๋ฅผ ๋ฏธ๋ฆฌ ์ค์ ํด ๋์ธ์.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
"""
|
12 |
|
13 |
+
import os
|
14 |
+
import json
|
15 |
import gradio as gr
|
16 |
import matplotlib.pyplot as plt
|
17 |
+
from matplotlib import patches, font_manager, rcParams
|
18 |
+
from openai import OpenAI
|
19 |
+
|
20 |
+
# -------------------------------------------------
|
21 |
+
# 0. ํ๊ธ ํฐํธ ์๋ ํ์ & ์ค์ (Malgun, Nanum ๋ฑ)
|
22 |
+
# -------------------------------------------------
|
23 |
+
|
24 |
+
def _set_korean_font():
|
25 |
+
candidates = [
|
26 |
+
"Malgun Gothic", # Windows
|
27 |
+
"NanumGothic", # Linux (apt install fonts-nanum)
|
28 |
+
"AppleGothic", # macOS
|
29 |
+
"DejaVu Sans", # fallback (๊ธฐ๋ณธ ํฐํธ์ง๋ง ํ๊ธ ์ผ๋ถ ์ง์)
|
30 |
+
]
|
31 |
+
available = {f.name for f in font_manager.fontManager.ttflist}
|
32 |
+
for cand in candidates:
|
33 |
+
if cand in available:
|
34 |
+
rcParams["font.family"] = cand
|
35 |
+
break
|
36 |
+
rcParams["axes.unicode_minus"] = False # ์์ ๋ถํธ ๊นจ์ง ๋ฐฉ์ง
|
37 |
+
|
38 |
+
_set_korean_font()
|
39 |
+
|
40 |
+
# -------------------------------------------------
|
41 |
+
# 1. OpenAI ํด๋ผ์ด์ธํธ ์ด๊ธฐํ
|
42 |
+
# -------------------------------------------------
|
43 |
+
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
44 |
+
if not OPENAI_API_KEY:
|
45 |
+
raise EnvironmentError("OPENAI_API_KEY ํ๊ฒฝ ๋ณ์๋ฅผ ์ค์ ํ์ธ์.")
|
46 |
+
|
47 |
+
client = OpenAI()
|
48 |
+
|
49 |
+
# -------------------------------------------------
|
50 |
+
# 2. Square Theory ๋์ ๊ทธ๋ฆฌ๊ธฐ
|
51 |
+
# -------------------------------------------------
|
52 |
+
|
53 |
+
def draw_square(words):
|
54 |
+
"""words = dict with keys tl, tr, br, bl"""
|
55 |
fig, ax = plt.subplots(figsize=(4, 4))
|
56 |
+
# ์ฌ๊ฐํ ํ๋ ์
|
57 |
square = patches.Rectangle((0, 0), 1, 1, fill=False, linewidth=2)
|
58 |
ax.add_patch(square)
|
59 |
|
60 |
+
ax.text(-0.05, 1.05, words["tl"], ha="right", va="bottom", fontsize=14, fontweight="bold")
|
61 |
+
ax.text(1.05, 1.05, words["tr"], ha="left", va="bottom", fontsize=14, fontweight="bold")
|
62 |
+
ax.text(1.05, -0.05, words["br"], ha="left", va="top", fontsize=14, fontweight="bold")
|
63 |
+
ax.text(-0.05, -0.05, words["bl"], ha="right", va="top", fontsize=14, fontweight="bold")
|
|
|
64 |
|
|
|
65 |
ax.set_xticks([])
|
66 |
ax.set_yticks([])
|
67 |
ax.set_xlim(-0.2, 1.2)
|
|
|
69 |
ax.set_aspect("equal")
|
70 |
return fig
|
71 |
|
72 |
+
# -------------------------------------------------
|
73 |
+
# 3. LLM ํ๋กฌํํธ & ์๋ต ํ์ฑ
|
74 |
+
# -------------------------------------------------
|
75 |
|
76 |
+
SYSTEM_PROMPT = """๋๋ ํ๊ตญ์ด ์นดํผยท๋ธ๋๋ ๋ค์ด๋ฐ ์ ๋ฌธ๊ฐ์ด์ Square Theory(์ฌ๊ฐํ ์ด๋ก ) ๋๏ฟฝ๏ฟฝ๋ฏธ๋ค.\n์ฌ์ฉ์๊ฐ ์ ์ํ ํ๋์ ๋จ์ด(TL)๋ฅผ ๊ธฐ๋ฐ์ผ๋ก, ๋ค์ JSON ํ์์ผ๋ก ์ฌ๊ฐํ์ ์์ฑํ๋ผ.\n- "tl": ์
๋ ฅ ๋จ์ด ๊ทธ๋๋ก\n- "tr": TL๊ณผ ์๋ฏธยท์์ด ๋์๋๋ ๋ค๋ฅธ ๋จ์ด\n- "br": TR๊ณผ ์๋ฏธยท์์ด ๋์๋๋ ๋ค๋ฅธ ๋จ์ด\n- "bl": BR๊ณผ ์๋ฏธยท์์ด ๋์๋๋ ๋ค๋ฅธ ๋จ์ด๋ก TL๊ณผ๋ ์ฐ๊ฒฐ์ด ์์ด์ผ ํ๋ค\n๋ํ, "top_phrase"(tl+tr), "bottom_phrase"(bl+br) ๋ ํํ์ ์์ฐ์ค๋ฌ์ด ํ๊ตญ์ด ๊ด์ฉ๊ตฌ๋ก ์ ์ํ๊ณ ,\n"slogan"(๋ ์ค ์นดํผ)์ "brand"(๋ธ๋๋ ๋ค์ ํ๋ณด) ํ๋๋ฅผ ํจ๊ป ํฌํจํ๋ผ.\n๊ฒฐ๊ณผ๋ ๋ฐ๋์ **ํ๊ธ UTFโ8**๋ก ์ธ์ฝ๋ฉ๋ JSON๋ง ๋ฐํํ๋ค."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
|
|
|
78 |
|
79 |
+
def call_llm(seed):
|
80 |
+
"""Returns parsed dict from LLM JSON output."""
|
81 |
+
response = client.responses.create(
|
82 |
+
model="gpt-4.1-mini",
|
83 |
+
input=[
|
84 |
+
{"role": "system", "content": [{"type": "input_text", "text": SYSTEM_PROMPT}]},
|
85 |
+
{"role": "user", "content": [{"type": "input_text", "text": seed}]},
|
86 |
+
],
|
87 |
+
text={"format": {"type": "text"}},
|
88 |
+
temperature=0.9,
|
89 |
+
max_output_tokens=512,
|
90 |
+
)
|
91 |
+
raw = response.choices[0].message.content[0]["text"]
|
92 |
+
try:
|
93 |
+
data = json.loads(raw)
|
94 |
+
except json.JSONDecodeError as e:
|
95 |
+
raise ValueError(f"LLM ์๋ต์ด JSON ํ์์ด ์๋๋๋ค: {e}\n์๋ณธ: {raw}")
|
96 |
+
return data
|
97 |
+
|
98 |
+
# -------------------------------------------------
|
99 |
+
# 4. Gradio ์ฝ๋ฐฑ
|
100 |
+
# -------------------------------------------------
|
101 |
+
|
102 |
+
def generate(seed_word):
|
103 |
+
data = call_llm(seed_word)
|
104 |
+
|
105 |
+
words = {
|
106 |
+
"tl": data["tl"],
|
107 |
+
"tr": data["tr"],
|
108 |
+
"br": data["br"],
|
109 |
+
"bl": data["bl"],
|
110 |
+
}
|
111 |
+
fig = draw_square(words)
|
112 |
+
return (
|
113 |
+
fig,
|
114 |
+
data.get("top_phrase", ""),
|
115 |
+
data.get("bottom_phrase", ""),
|
116 |
+
data.get("slogan", ""),
|
117 |
+
data.get("brand", ""),
|
118 |
+
)
|
119 |
|
120 |
+
# -------------------------------------------------
|
121 |
+
# 5. Gradio UI
|
122 |
+
# -------------------------------------------------
|
123 |
+
with gr.Blocks(title="Square Theory Generator โ LLM Powered ๐ฐ๐ท") as demo:
|
124 |
+
gr.Markdown("""# ๐ง Square Theory Generator\n๋จ์ด ํ๋๋ง ์
๋ ฅํ๋ฉด, LLM์ด ์ฌ๊ฐํ ๊ตฌ์กฐยท์นดํผยท๋ธ๋๋ ๋ค์์ ์๋์ผ๋ก ์ ์ํฉ๋๋ค.""")
|
|
|
|
|
|
|
|
|
125 |
|
126 |
+
seed = gr.Textbox(label="์๋ ๋จ์ด(TL)", placeholder="์: ๊ณจ๋ ")
|
127 |
+
run_btn = gr.Button("์ฌ๊ฐํ ์์ฑ")
|
128 |
|
129 |
fig_out = gr.Plot(label="์ฌ๊ฐํ ๋ค์ด์ด๊ทธ๋จ")
|
130 |
top_out = gr.Textbox(label="์๋ณ ํํ")
|
131 |
bottom_out = gr.Textbox(label="์๋ซ๋ณ ํํ")
|
132 |
+
slogan_out = gr.Textbox(label="๋ ์ค ์ฌ๋ก๊ฑด")
|
133 |
brand_out = gr.Textbox(label="๋ธ๋๋ ๋ค์ ์ ์")
|
134 |
|
135 |
+
run_btn.click(fn=generate, inputs=seed, outputs=[fig_out, top_out, bottom_out, slogan_out, brand_out])
|
|
|
|
|
|
|
|
|
136 |
|
137 |
if __name__ == "__main__":
|
138 |
+
# launch(): share=True ํ๋ฉด ์ธ๋ถ์์ ์ ์ ๊ฐ๋ฅ
|
139 |
demo.launch()
|