| import os |
| from dotenv import load_dotenv |
| load_dotenv() |
|
|
| import gradio as gr |
| from threading import Thread |
| import tiktoken |
| import logging |
| from pathlib import Path |
|
|
| from src.config import Config |
| from src.logger import Logger |
| from src.project import ProjectManager |
| from src.state import AgentState |
| from src.agents import Agent |
|
|
| |
| base_dir = Path("/code") |
| for dir_name in ["db", "logs", "projects", "screenshots", "pdfs", ".gradio"]: |
| dir_path = base_dir / dir_name |
| dir_path.mkdir(exist_ok=True) |
| os.chmod(dir_path, 0o755) |
|
|
| |
| manager = ProjectManager() |
| AgentState = AgentState() |
| config = Config() |
| logger = Logger() |
| TIKTOKEN_ENC = tiktoken.get_encoding("cl100k_base") |
|
|
| |
| logging.basicConfig(level=logging.INFO) |
| logger = logging.getLogger(__name__) |
|
|
| def process_message(message, base_model="gpt-3.5-turbo", project_name="default", search_engine="duckduckgo"): |
| try: |
| agent = Agent(base_model=base_model, search_engine=search_engine.lower()) |
| |
| state = AgentState.get_latest_state(project_name) |
| if not state: |
| agent.execute(message, project_name) |
| else: |
| if AgentState.is_agent_completed(project_name): |
| agent.subsequent_execute(message, project_name) |
| else: |
| agent.execute(message, project_name) |
| |
| |
| messages = manager.get_messages(project_name) |
| return messages[-1]["message"] if messages else "No response generated" |
| except Exception as e: |
| logger.error(f"Error processing message: {str(e)}") |
| return f"An error occurred: {str(e)}" |
|
|
| def create_gradio_interface(): |
| with gr.Blocks( |
| title="Devika AI Assistant", |
| theme=gr.themes.Soft(), |
| analytics_enabled=False |
| ) as interface: |
| gr.Markdown(""" |
| # π€ Devika AI Assistant |
| |
| Devika is an advanced AI coding assistant that helps you with: |
| - Writing and debugging code |
| - Creating new projects |
| - Answering programming questions |
| - And much more! |
| |
| Simply type your request below and Devika will help you out. |
| """) |
| |
| with gr.Row(): |
| with gr.Column(scale=2): |
| message_input = gr.Textbox( |
| label="Your Message", |
| placeholder="Type your coding request here...", |
| lines=3 |
| ) |
| |
| with gr.Row(): |
| model_dropdown = gr.Dropdown( |
| choices=[ |
| |
| "gpt-3.5-turbo", |
| "gpt-4", |
| |
| "claude-3-opus", |
| |
| "sonar-reasoning-pro", |
| "sonar-reasoning", |
| "sonar-pro", |
| "sonar", |
| |
| "llama-3.1-sonar-small-128k-online", |
| "llama-3.1-sonar-large-128k-online", |
| "llama-3.1-sonar-huge-128k-online" |
| ], |
| value="gpt-3.5-turbo", |
| label="Model" |
| ) |
| search_engine_dropdown = gr.Dropdown( |
| choices=["DuckDuckGo", "Bing", "Google"], |
| value="DuckDuckGo", |
| label="Search Engine" |
| ) |
| |
| submit_btn = gr.Button("Send Message", variant="primary") |
| |
| with gr.Column(scale=3): |
| output_box = gr.Markdown(label="Devika's Response") |
| |
| |
| examples = [ |
| ["Create a React component for a todo list", "gpt-3.5-turbo", "DuckDuckGo"], |
| ["Help me understand how to use Python decorators", "gpt-3.5-turbo", "DuckDuckGo"], |
| ["Write a Node.js API endpoint for user authentication", "gpt-3.5-turbo", "DuckDuckGo"] |
| ] |
| |
| gr.Examples( |
| examples=examples, |
| inputs=[message_input, model_dropdown, search_engine_dropdown], |
| outputs=output_box, |
| fn=lambda x, y, z: process_message(x, y, "default", z) |
| ) |
| |
| submit_btn.click( |
| fn=process_message, |
| inputs=[message_input, model_dropdown, gr.Textbox(value="default", visible=False), search_engine_dropdown], |
| outputs=output_box |
| ) |
| |
| return interface |
|
|
| |
| interface = create_gradio_interface() |
|
|
| if __name__ == "__main__": |
| interface.launch( |
| server_name="0.0.0.0", |
| server_port=7860, |
| share=False, |
| debug=False, |
| show_error=True |
| ) |