Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -14,15 +14,11 @@ except Exception as e:
|
|
| 14 |
def generate_code(prompt: str, backend_choice: str, max_tokens: int, temperature: float, top_p: float):
|
| 15 |
system_message = (
|
| 16 |
"You are an AI assistant programmed to generate website codes only. "
|
| 17 |
-
"You must not use triple backticks (```html, ```python,
|
| 18 |
"If multiple files are needed, separate them clearly using:\n"
|
| 19 |
-
"
|
| 20 |
-
"
|
| 21 |
-
"
|
| 22 |
-
"The user will select backend type (Static / Flask / Node.js), and you must generate accordingly: "
|
| 23 |
-
"- For Static: simple index.html.\n"
|
| 24 |
-
"- For Flask or Node.js: include minimal backend scripts with index.html frontend.\n"
|
| 25 |
-
"If the user requests non-website code, reply with:\n"
|
| 26 |
"'hey there! am here to create websites for you unfortunately am programmed to not create codes! otherwise I would go on the naughty list :-('."
|
| 27 |
)
|
| 28 |
user_prompt = f"USER_PROMPT = {prompt}\nUSER_BACKEND = {backend_choice}"
|
|
@@ -45,7 +41,7 @@ def generate_code(prompt: str, backend_choice: str, max_tokens: int, temperature
|
|
| 45 |
if isinstance(token, str):
|
| 46 |
response_stream += token
|
| 47 |
full_response += token
|
| 48 |
-
yield response_stream
|
| 49 |
cleaned_response = full_response.strip()
|
| 50 |
cleaned_response = re.sub(r"^\s*```[a-z]*\s*\n?", "", cleaned_response)
|
| 51 |
cleaned_response = re.sub(r"\n?\s*```\s*$", "", cleaned_response)
|
|
@@ -57,19 +53,36 @@ def generate_code(prompt: str, backend_choice: str, max_tokens: int, temperature
|
|
| 57 |
for phrase in common_phrases:
|
| 58 |
if cleaned_response.lower().startswith(phrase.lower()):
|
| 59 |
cleaned_response = cleaned_response[len(phrase):].lstrip()
|
| 60 |
-
yield cleaned_response.strip()
|
| 61 |
except Exception as e:
|
| 62 |
-
yield f"## Error\n\nFailed to generate code.\n**Reason:** {e}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
|
| 64 |
with gr.Blocks(css=".gradio-container { max-width: 90% !important; }") as demo:
|
| 65 |
gr.Markdown("# ✨ Website Code Generator ✨")
|
| 66 |
gr.Markdown(
|
| 67 |
-
"Describe the website you want. The AI will generate
|
| 68 |
"**Rules:**\n"
|
| 69 |
"- Backend hint (Static / Flask / Node.js).\n"
|
| 70 |
"- Always fully SFW and minimal errors.\n"
|
| 71 |
-
"- Only
|
| 72 |
-
"-
|
| 73 |
)
|
| 74 |
with gr.Row():
|
| 75 |
with gr.Column(scale=2):
|
|
@@ -82,16 +95,10 @@ with gr.Blocks(css=".gradio-container { max-width: 90% !important; }") as demo:
|
|
| 82 |
["Static", "Flask", "Node.js"],
|
| 83 |
label="Backend Context",
|
| 84 |
value="Static",
|
| 85 |
-
info="Hint only. Always generates only index.html."
|
| 86 |
)
|
| 87 |
generate_button = gr.Button("✨ Generate Website Code", variant="primary")
|
| 88 |
with gr.Column(scale=3):
|
| 89 |
-
|
| 90 |
-
label="Generated Code",
|
| 91 |
-
language="html",
|
| 92 |
-
lines=30,
|
| 93 |
-
interactive=False,
|
| 94 |
-
)
|
| 95 |
with gr.Accordion("Advanced Settings", open=False):
|
| 96 |
max_tokens_slider = gr.Slider(
|
| 97 |
minimum=512,
|
|
@@ -101,10 +108,14 @@ with gr.Blocks(css=".gradio-container { max-width: 90% !important; }") as demo:
|
|
| 101 |
label="Max New Tokens"
|
| 102 |
)
|
| 103 |
temperature_slider = gr.Slider(
|
| 104 |
-
minimum=0.1, maximum=1.2,
|
|
|
|
|
|
|
|
|
|
| 105 |
)
|
| 106 |
top_p_slider = gr.Slider(
|
| 107 |
-
minimum=0.1,
|
|
|
|
| 108 |
value=0.9,
|
| 109 |
step=0.05,
|
| 110 |
label="Top-P"
|
|
@@ -112,7 +123,11 @@ with gr.Blocks(css=".gradio-container { max-width: 90% !important; }") as demo:
|
|
| 112 |
generate_button.click(
|
| 113 |
fn=generate_code,
|
| 114 |
inputs=[prompt_input, backend_radio, max_tokens_slider, temperature_slider, top_p_slider],
|
| 115 |
-
outputs=
|
|
|
|
|
|
|
|
|
|
|
|
|
| 116 |
)
|
| 117 |
|
| 118 |
if __name__ == "__main__":
|
|
|
|
| 14 |
def generate_code(prompt: str, backend_choice: str, max_tokens: int, temperature: float, top_p: float):
|
| 15 |
system_message = (
|
| 16 |
"You are an AI assistant programmed to generate website codes only. "
|
| 17 |
+
"You must not use triple backticks (```html, ```python, etc.). "
|
| 18 |
"If multiple files are needed, separate them clearly using:\n"
|
| 19 |
+
"TAB.NAME={filename}\n"
|
| 20 |
+
"Only generate code. No explanations, no phrases like 'Here is the code'. "
|
| 21 |
+
"If user asks non-website code, reply:\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
"'hey there! am here to create websites for you unfortunately am programmed to not create codes! otherwise I would go on the naughty list :-('."
|
| 23 |
)
|
| 24 |
user_prompt = f"USER_PROMPT = {prompt}\nUSER_BACKEND = {backend_choice}"
|
|
|
|
| 41 |
if isinstance(token, str):
|
| 42 |
response_stream += token
|
| 43 |
full_response += token
|
| 44 |
+
yield {"raw": response_stream}
|
| 45 |
cleaned_response = full_response.strip()
|
| 46 |
cleaned_response = re.sub(r"^\s*```[a-z]*\s*\n?", "", cleaned_response)
|
| 47 |
cleaned_response = re.sub(r"\n?\s*```\s*$", "", cleaned_response)
|
|
|
|
| 53 |
for phrase in common_phrases:
|
| 54 |
if cleaned_response.lower().startswith(phrase.lower()):
|
| 55 |
cleaned_response = cleaned_response[len(phrase):].lstrip()
|
| 56 |
+
yield {"final": cleaned_response.strip()}
|
| 57 |
except Exception as e:
|
| 58 |
+
yield {"error": f"## Error\n\nFailed to generate code.\n**Reason:** {e}"}
|
| 59 |
+
|
| 60 |
+
def display_output(output_dict):
|
| 61 |
+
if "error" in output_dict:
|
| 62 |
+
return gr.Markdown(output_dict["error"])
|
| 63 |
+
if "raw" in output_dict:
|
| 64 |
+
return gr.Markdown("Generating...")
|
| 65 |
+
cleaned_text = output_dict["final"]
|
| 66 |
+
file_splits = re.split(r'TAB\.NAME=\{(.+?)\}', cleaned_text)
|
| 67 |
+
components = []
|
| 68 |
+
if not file_splits[0].strip():
|
| 69 |
+
file_splits = file_splits[1:]
|
| 70 |
+
for i in range(0, len(file_splits), 2):
|
| 71 |
+
filename = file_splits[i].strip()
|
| 72 |
+
content = file_splits[i+1].strip() if (i+1) < len(file_splits) else ""
|
| 73 |
+
components.append(gr.Markdown(f"### {filename}"))
|
| 74 |
+
components.append(gr.Code(value=content, label=filename, language="html" if filename.endswith(".html") else "python" if filename.endswith(".py") else "javascript" if filename.endswith(".js") else "text", interactive=False))
|
| 75 |
+
return components
|
| 76 |
|
| 77 |
with gr.Blocks(css=".gradio-container { max-width: 90% !important; }") as demo:
|
| 78 |
gr.Markdown("# ✨ Website Code Generator ✨")
|
| 79 |
gr.Markdown(
|
| 80 |
+
"Describe the website you want. The AI will generate website code.\n\n"
|
| 81 |
"**Rules:**\n"
|
| 82 |
"- Backend hint (Static / Flask / Node.js).\n"
|
| 83 |
"- Always fully SFW and minimal errors.\n"
|
| 84 |
+
"- Only websites. No other codes.\n"
|
| 85 |
+
"- Multiple files use TAB.NAME={filename}."
|
| 86 |
)
|
| 87 |
with gr.Row():
|
| 88 |
with gr.Column(scale=2):
|
|
|
|
| 95 |
["Static", "Flask", "Node.js"],
|
| 96 |
label="Backend Context",
|
| 97 |
value="Static",
|
|
|
|
| 98 |
)
|
| 99 |
generate_button = gr.Button("✨ Generate Website Code", variant="primary")
|
| 100 |
with gr.Column(scale=3):
|
| 101 |
+
output_display = gr.Group()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
with gr.Accordion("Advanced Settings", open=False):
|
| 103 |
max_tokens_slider = gr.Slider(
|
| 104 |
minimum=512,
|
|
|
|
| 108 |
label="Max New Tokens"
|
| 109 |
)
|
| 110 |
temperature_slider = gr.Slider(
|
| 111 |
+
minimum=0.1, maximum=1.2,
|
| 112 |
+
value=0.7,
|
| 113 |
+
step=0.1,
|
| 114 |
+
label="Temperature"
|
| 115 |
)
|
| 116 |
top_p_slider = gr.Slider(
|
| 117 |
+
minimum=0.1,
|
| 118 |
+
maximum=1.0,
|
| 119 |
value=0.9,
|
| 120 |
step=0.05,
|
| 121 |
label="Top-P"
|
|
|
|
| 123 |
generate_button.click(
|
| 124 |
fn=generate_code,
|
| 125 |
inputs=[prompt_input, backend_radio, max_tokens_slider, temperature_slider, top_p_slider],
|
| 126 |
+
outputs=output_display,
|
| 127 |
+
).then(
|
| 128 |
+
fn=display_output,
|
| 129 |
+
inputs=output_display,
|
| 130 |
+
outputs=output_display,
|
| 131 |
)
|
| 132 |
|
| 133 |
if __name__ == "__main__":
|