import os import json import requests from dotenv import load_dotenv from openai import OpenAI from pypdf import PdfReader import gradio as gr # Load environment variables securely load_dotenv(override=True) # Validate critical environment variables REQUIRED_ENV_VARS = ['DEEPSEEK_API_KEY', 'PUSHOVER_TOKEN', 'PUSHOVER_USER'] missing_vars = [var for var in REQUIRED_ENV_VARS if not os.getenv(var)] if missing_vars: raise EnvironmentError(f"Missing required environment variables: {missing_vars}") # Initialize OpenAI client with error handling try: deepseek_client = OpenAI( api_key=os.getenv('DEEPSEEK_API_KEY'), base_url="https://api.deepseek.com/v1" ) except Exception as e: raise RuntimeError(f"Failed to initialize DeepSeek client: {str(e)}") def push(text: str) -> bool: """Send notification via Pushover with error handling""" try: response = requests.post( "https://api.pushover.net/1/messages.json", data={ "token": os.getenv("PUSHOVER_TOKEN"), "user": os.getenv("PUSHOVER_USER"), "message": text, }, timeout=10 # Add timeout to prevent hanging ) response.raise_for_status() return True except requests.exceptions.RequestException as e: print(f"Push notification failed: {str(e)}") return False def record_user_details(email: str, name: str = "Name not provided", notes: str = "not provided") -> dict: """Record user contact information""" push(f"Recording {name} with email {email} and notes {notes}") return {"recorded": "ok", "email": email} def record_unknown_question(question: str) -> dict: """Record unanswered questions for follow-up""" push(f"Recording question: {question}") return {"recorded": "ok", "question": question} # Define tools as constants for better maintainability RECORD_USER_DETAILS_JSON = { "name": "record_user_details", "description": "Record user contact information for follow-up", "parameters": { "type": "object", "properties": { "email": { "type": "string", "description": "The email address of this user" }, "name": { "type": "string", "description": "The user's name, if provided" }, "notes": { "type": "string", "description": "Additional context about the conversation" } }, "required": ["email"], "additionalProperties": False } } RECORD_UNKNOWN_QUESTION_JSON = { "name": "record_unknown_question", "description": "Record questions that couldn't be answered", "parameters": { "type": "object", "properties": { "question": { "type": "string", "description": "The question or request that needs forwarding" } }, "required": ["question"], "additionalProperties": False } } TOOLS = [ {"type": "function", "function": RECORD_USER_DETAILS_JSON}, {"type": "function", "function": RECORD_UNKNOWN_QUESTION_JSON} ] class ProfessionalAssistant: """Assistant for handling professional inquiries and qualifying prospects""" def __init__(self): self.deepseek = deepseek_client self.name = "Pagaebinyo Lucky Ben (Pagi)" self.linkedin = self._extract_linkedin_data() self.summary = self._load_summary() def _extract_linkedin_data(self) -> str: """Extract text from LinkedIn PDF with error handling""" try: reader = PdfReader("me/linkedin.pdf") linkedin_text = "" for page in reader.pages: text = page.extract_text() if text: linkedin_text += text + "\n" return linkedin_text except Exception as e: print(f"Error reading LinkedIn PDF: {str(e)}") return "LinkedIn information currently unavailable" def _load_summary(self) -> str: """Load professional summary from file""" try: with open("me/summary.txt", "r", encoding="utf-8") as f: return f.read() except FileNotFoundError: print("Summary file not found") return "Professional summary unavailable" def _handle_tool_call(self, tool_calls) -> list: """Process function tool calls from the API""" results = [] for tool_call in tool_calls: try: tool_name = tool_call.function.name arguments = json.loads(tool_call.function.arguments) print(f"Tool called: {tool_name}", flush=True) # Safely get and call the tool function tool_func = globals().get(tool_name) if tool_func and callable(tool_func): result = tool_func(**arguments) results.append({ "role": "tool", "content": json.dumps(result), "tool_call_id": tool_call.id }) except json.JSONDecodeError: print(f"Error decoding arguments for {tool_name}") except Exception as e: print(f"Error executing tool {tool_name}: {str(e)}") return results def system_prompt(self) -> str: """Generate the system prompt for the assistant""" return f""" You are a professional intake assistant for {self.name}, a Marine Engineer and Software Engineer. Your ONLY job is to qualify prospects and collect contact information. STRICT RESPONSE RULES: 1. ONLY provide basic professional background from the summary/LinkedIn data 2. For ANY specific technical questions: Use record_unknown_question tool 3. For ANY requests outside basic background info: redirect to contact form 4. Keep ALL responses under 2 sentences 5. Always end with directing them to the contact form PROFESSIONAL BACKGROUND: Summary: {self.summary} LinkedIn: {self.linkedin[:1000]}... # Truncate to avoid token limits INITIAL MESSAGE: Hello, I'm the intake assistant for {self.name}. I can share basic professional background, but for specific project discussions, please use the contact form to connect directly. """ def chat(self, message: str, history: list) -> str: """Process chat message and return response""" messages = [ {"role": "system", "content": self.system_prompt()} ] + history + [ {"role": "user", "content": message} ] try: response = self.deepseek.chat.completions.create( model="deepseek-chat", messages=messages, tools=TOOLS ) if response.choices[0].finish_reason == "tool_calls": message_obj = response.choices[0].message tool_calls = message_obj.tool_calls results = self._handle_tool_call(tool_calls) # Add tool responses and get final completion messages.append(message_obj) messages.extend(results) final_response = self.deepseek.chat.completions.create( model="deepseek-chat", messages=messages ) return final_response.choices[0].message.content else: return response.choices[0].message.content except Exception as e: print(f"Chat error: {str(e)}") return "I apologize, but I'm experiencing technical difficulties. Please try again later or use the contact form." def extract_initial_message(system_text: str) -> str: """Extract the initial message from system prompt""" lines = system_text.split('\n') for i, line in enumerate(lines): if "INITIAL MESSAGE:" in line: return lines[i+1].strip() if i+1 < len(lines) else "Hello, how can I help you today?" return "Hello, I'm the intake assistant. How can I help you?" def ui_send(assistant: ProfessionalAssistant, user_msg: str, chat_state: list) -> tuple: """Process user message and update chat state""" if not user_msg.strip(): return chat_state, chat_state # Prevent empty messages try: reply = assistant.chat(user_msg, chat_state) updated_history = chat_state + [ {"role": "user", "content": user_msg}, {"role": "assistant", "content": reply}, ] return updated_history, updated_history except Exception as e: print(f"UI send error: {str(e)}") error_message = "I apologize, but I'm experiencing technical difficulties. Please try again later." updated_history = chat_state + [ {"role": "user", "content": user_msg}, {"role": "assistant", "content": error_message}, ] return updated_history, updated_history def save_contact(name: str, email: str, notes: str) -> str: """Save contact information with validation""" if not name.strip(): return "❌ Please provide your full name." if not email.strip() or "@" not in email: return "❌ Please provide a valid email address." if not notes.strip(): return "❌ Please describe your project needs." try: record_user_details( email=email.strip(), name=name.strip(), notes=notes.strip(), ) return "✅ Inquiry submitted successfully! Lt. Ben will respond within 24 hours." except Exception as e: return f"❌ Error submitting inquiry: {str(e)}" def create_ui(): """Create and configure the Gradio UI""" assistant = ProfessionalAssistant() with gr.Blocks( theme=gr.themes.Soft(), title="Lt. Pagaebinyo Lucky Ben - Professional Assistant", css=""" .gradio-container { max-width: 1400px !important; margin: 0 auto !important; background: #f8fafc; } .hero { text-align: center; margin-bottom: 2rem; background: linear-gradient(135deg, #1e40af 0%, #3730a3 100%); color: white; padding: 2.5rem; border-radius: 16px; box-shadow: 0 10px 25px rgba(30, 64, 175, 0.2); } .hero h1 { font-size: 2.5rem; margin-bottom: 0.5rem; font-weight: 700; color: white; } .hero p { font-size: 1.2rem; color: rgba(255, 255, 255, 0.95); font-weight: 400; } .contact-form { background: linear-gradient(135deg, #1e40af 0%, #3730a3 100%); border-radius: 16px; padding: 2rem; color: white; box-shadow: 0 10px 25px rgba(30, 64, 175, 0.2); } .contact-form h3 { color: white !important; margin-bottom: 1rem !important; font-size: 1.5rem !important; font-weight: 600 !important; } .contact-form p { color: rgba(255, 255, 255, 0.9) !important; margin-bottom: 1.5rem; font-weight: 400; } .expertise-section { margin-top: 3rem; padding: 2rem 0; background: #f8fafc; } .expertise-title { text-align: center; font-size: 2rem; font-weight: 700; margin-bottom: 2rem; color: #1f2937; } .expertise-grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); gap: 2rem; margin-top: 2rem; } .expertise-card { background: white; padding: 2rem; border-radius: 16px; border-top: 4px solid #1e40af; box-shadow: 0 4px 20px rgba(0, 0, 0, 0.08); transition: all 0.3s ease; position: relative; } .expertise-card:hover { transform: translateY(-8px); box-shadow: 0 12px 30px rgba(0, 0, 0, 0.15); border-top-color: #3730a3; } .expertise-card .icon { width: 48px; height: 48px; background: linear-gradient(135deg, #1e40af 0%, #3730a3 100%); border-radius: 12px; display: flex; align-items: center; justify-content: center; margin-bottom: 1.5rem; font-size: 24px; color: white; font-weight: 600; } .expertise-card h4 { color: #1f2937; font-size: 1.3rem; font-weight: 700; margin-bottom: 1rem; line-height: 1.3; } .expertise-card p { color: #4b5563; line-height: 1.7; font-size: 0.95rem; } .chat-container { background: white; border-radius: 16px; box-shadow: 0 4px 20px rgba(0, 0, 0, 0.08); padding: 1.5rem; border: 1px solid #e5e7eb; } .chat-title { color: #1f2937 !important; font-size: 1.4rem !important; font-weight: 600 !important; margin-bottom: 1rem !important; } /* Gradio component overrides */ .gr-button-primary { background: linear-gradient(135deg, #1e40af 0%, #3730a3 100%) !important; border: none !important; font-weight: 600 !important; } .gr-button-primary:hover { background: linear-gradient(135deg, #1d4ed8 0%, #4338ca 100%) !important; transform: translateY(-1px); box-shadow: 0 4px 12px rgba(30, 64, 175, 0.3) !important; } /* Input field styling */ .gr-textbox textarea, .gr-textbox input { border: 2px solid #e5e7eb !important; border-radius: 8px !important; } .gr-textbox textarea:focus, .gr-textbox input:focus { border-color: #1e40af !important; box-shadow: 0 0 0 3px rgba(30, 64, 175, 0.1) !important; } @media (max-width: 768px) { .gradio-container { padding: 1rem !important; } .hero h1 { font-size: 2rem !important; } .hero p { font-size: 1rem !important; } .expertise-grid { grid-template-columns: 1fr; gap: 1.5rem; } .contact-form, .chat-container { padding: 1.5rem; } .expertise-card { padding: 1.5rem; } } """ ) as demo: # Header gr.HTML("""

Lt. Pagaebinyo Lucky Ben (Pagi)

Marine Engineer • Software Engineer • AI Workflow Orchestration Specialist

""") with gr.Row(): # Chat interface with gr.Column(scale=3, elem_classes="chat-container"): gr.HTML('

Professional Inquiry Assistant

') chatbot = gr.Chatbot( type="messages", height=450, show_copy_button=True, show_label=False ) user_input = gr.Textbox( placeholder="Ask about professional background or describe your project needs...", label="Your Message", max_lines=3 ) submit_btn = gr.Button("Send Message", variant="primary", size="lg") # Contact form with gr.Column(scale=2, elem_classes="contact-form"): gr.HTML("

Direct Contact

") gr.HTML("

For detailed project discussions or technical consultations:

") lead_name = gr.Textbox( label="Full Name", max_lines=1, placeholder="Enter your full name" ) lead_email = gr.Textbox( label="Email", max_lines=1, placeholder="your.email@company.com" ) lead_notes = gr.Textbox( label="Project Details", placeholder="Describe your needs, timeline, budget, and specific requirements...", lines=6 ) save_btn = gr.Button("Submit Inquiry", variant="primary", size="lg") save_status = gr.Markdown() # Expertise section gr.HTML("""
Areas of Expertise

Marine Engineering

Naval systems design and optimization, generator management systems, preventive maintenance protocols, fleet operations, and marine power plant efficiency. Extensive experience with diesel engines, electrical systems, and shipboard automation.

Software Development

Full-stack development with Python/FastAPI, database design and optimization, ERP system implementation, custom web applications, API development, and system integration. Strong focus on scalable, maintainable solutions.

🔧

AI Implementation

AI workflow orchestration, process automation, predictive maintenance systems, machine learning integration, and intelligent decision support systems. Specialized in bridging AI capabilities with real-world engineering applications.

""") # Initialize chat state chat_state = gr.State([]) # Event handlers def handle_submit(user_msg, state): if not user_msg.strip(): return state, state, user_msg new_state, _ = ui_send(assistant, user_msg, state) return new_state, new_state, "" submit_btn.click( fn=handle_submit, inputs=[user_input, chat_state], outputs=[chatbot, chat_state, user_input] ) user_input.submit( fn=handle_submit, inputs=[user_input, chat_state], outputs=[chatbot, chat_state, user_input] ) save_btn.click( fn=save_contact, inputs=[lead_name, lead_email, lead_notes], outputs=[save_status] ) # Initialize with welcome message def init_chat(): initial_msg = extract_initial_message(assistant.system_prompt()) initial_state = [{"role": "assistant", "content": initial_msg}] return initial_state, initial_state demo.load( fn=init_chat, outputs=[chatbot, chat_state] ) return demo if __name__ == "__main__": demo = create_ui() demo.launch(share=True)