Spaces:
Sleeping
Sleeping
File size: 7,991 Bytes
e0da2eb 0e8510e e0da2eb 0e8510e 04b88f3 e0da2eb 5db3b89 724073a 0e8510e 5db3b89 04b88f3 724073a e0da2eb 04b88f3 e0da2eb 0e8510e 04b88f3 e0da2eb 04b88f3 0e8510e 04b88f3 e0da2eb 0e8510e 04b88f3 0e8510e 04b88f3 724073a 0e8510e 04b88f3 0e8510e 04b88f3 0e8510e 724073a 0e8510e 724073a 04b88f3 0e8510e e0da2eb 04b88f3 e0da2eb c7dbe52 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
import gradio as gr
import pandas as pd
import plotly.graph_objects as go
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.messages import HumanMessage
import os
import random
import json # <<< THE FIX IS HERE
# --- Configuration ---
GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY")
# --- Stage 1: Data Ingestion (Simulation) ---
def fetch_market_chatter(niche_description: str) -> list[str]:
"""Simulates scraping Reddit, Hacker News, etc., for a given niche."""
print(f"Simulating scraping for: {niche_description}")
base_comments = [
"Ugh, another project management tool that charges per user. I'm a solo founder, this kills me.",
"I love the idea of Notion but it's just too slow and bloated now. I need something faster.",
"Why can't any of these tools just have a simple, reliable integration with Google Calendar?",
"I'm a writer, not a project manager. I just need a clean way to organize my chapters and research.",
"Asana is too complex. Trello is too simple. Is there anything in between?",
"The real magic would be a tool that automatically generates a weekly summary of my progress.",
"Their customer support is a joke. Took three days to get a reply on a critical bug.",
"The 'all-in-one' promise is a lie. It does 10 things poorly instead of one thing well.",
"If someone built a beautiful, minimalist PM tool for visual artists, I'd pay $50/month in a heartbeat."
]
return random.sample(base_comments, k=random.randint(5, len(base_comments)))
# --- Stage 2 & 3: AI Analysis Pipeline ---
def analyze_resonance(niche_description: str, comments: list[str]) -> dict:
"""Uses an LLM to perform topic modeling, pain point extraction, and sentiment analysis."""
if not GEMINI_API_KEY:
return {"error": "GEMINI_API_KEY not set in Space secrets."}
try:
llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro-latest", google_api_key=GEMINI_API_KEY)
comments_formatted = "\n".join([f"- \"{c}\"" for c in comments])
prompt = f"""
You are a market research analyst with superhuman insight. Analyze the following raw market chatter (comments from Reddit, Hacker News, etc.) for a proposed product idea.
**Proposed Product Idea:** "{niche_description}"
**Raw Market Chatter:**
{comments_formatted}
**Your Task:**
Analyze the chatter and return a valid JSON object with the following structure. Do not include any text, code block markers, or explanations outside the single JSON object.
{{
"pain_points": [
"A summary of the most common complaint or frustration.",
"A summary of the second most common complaint.",
"A summary of a third, more niche complaint."
],
"magic_words": [
"A positive, benefit-oriented word or phrase people use.",
"Another evocative word people use to describe their ideal solution.",
"A third powerful, emotional word."
],
"target_villain": "The name of a competitor or a type of product that people frequently complain about.",
"unserved_tribe": "A description of a specific user subgroup whose needs are not being met.",
"resonance_score": A number between 1 and 100 representing how well the proposed product idea fits the market chatter,
"resonance_justification": "A one-sentence explanation for the score you gave."
}}
"""
response = llm.invoke([HumanMessage(content=prompt)])
# Clean up the response to ensure it's valid JSON
json_str = response.content.strip().replace("```json", "").replace("```", "")
return json.loads(json_str) # This line now works because `json` is imported
except Exception as e:
# Catch JSON decoding errors specifically
return {"error": f"Failed to analyze resonance. The AI model may have returned an invalid format. Details: {e}"}
# --- Main Orchestrator ---
def run_precog_analysis(niche_description: str):
# This orchestrates the entire pipeline
yield "Initializing Pre-Cog Engine...", None, None, gr.Button("Analyzing...", interactive=False)
# Stage 1
comments = fetch_market_chatter(niche_description)
yield f"Scanning Market Chatter... ({len(comments)} data points found)", None, None, gr.Button("Analyzing...", interactive=False)
# Stage 2 & 3
analysis_result = analyze_resonance(niche_description, comments)
if "error" in analysis_result:
# Display the error in the main report area for visibility
yield f"## Analysis Error\n\n**Details:** {analysis_result['error']}", "", "Error", gr.Button("Analyze Again", interactive=True)
return
# --- Prepare Outputs ---
score = analysis_result.get('resonance_score', 0)
color = "green" if score > 65 else "orange" if score > 40 else "red"
report_md = f"""
<div style="text-align:center; border: 2px solid {color}; border-radius:10px; padding:20px;">
<h2 style="margin:0;">Market Resonance Score</h2>
<p style="font-size: 80px; font-weight:bold; margin:0; color:{color};">{score}/100</p>
<p style="margin:0; font-style:italic;">"{analysis_result.get('resonance_justification', 'No justification provided.')}"</p>
</div>
### ๐ฅ Top 3 Unspoken Pain Points
1. **{analysis_result.get('pain_points', ['N/A'])[0]}**
2. **{analysis_result.get('pain_points', ['N/A'])[1]}**
3. **{analysis_result.get('pain_points', ['N/A'])[2]}**
### โจ Your "Magic Words" for Marketing
Use these exact words in your landing page copy. This is how your customers talk.
- `{analysis_result.get('magic_words', ['N/A'])[0]}`
- `{analysis_result.get('magic_words', ['N/A'])[1]}`
- `{analysis_result.get('magic_words', ['N/A'])[2]}`
"""
strategy_md = f"""
### ๐ฏ Your Go-to-Market Strategy
**Your "Villain":**
Position your product as the direct antidote to **{analysis_result.get('target_villain', 'existing complex tools')}**. Your marketing should say, "Tired of [Villain's problem]? We fixed it."
**Your Unserved "Tribe":**
Focus your initial launch on this niche group: **{analysis_result.get('unserved_tribe', 'no specific tribe identified')}**. They are desperately looking for a solution and will become your first evangelists.
"""
yield report_md, strategy_md, "Analysis Complete", gr.Button("Analyze Resonance", interactive=True)
# --- Gradio UI ---
with gr.Blocks(theme=gr.themes.Glass(), css=".gradio-container{max-width: 800px !important}") as demo:
gr.Markdown("# ๐ฎ Pre-Cog: The Market Resonance Engine")
gr.Markdown("Stop guessing. Analyze real-time market chatter to see if your idea will succeed *before* you build it.")
with gr.Column():
niche_input = gr.Textbox(label="Describe Your Product Idea or Niche", placeholder="e.g., 'An AI-powered project management tool for solopreneurs' or 'A better way to learn guitar online'")
submit_btn = gr.Button("Analyze Resonance", variant="primary")
gr.Markdown("---")
with gr.Row():
with gr.Column(scale=2):
gr.Markdown("## Resonance Report")
report_output = gr.Markdown()
with gr.Column(scale=1):
gr.Markdown("## Strategy")
strategy_output = gr.Markdown()
status_output = gr.Markdown()
submit_btn.click(
fn=run_precog_analysis,
inputs=[niche_input],
outputs=[report_output, strategy_output, status_output, submit_btn]
)
gr.Markdown("---")
gr.Markdown("### This is a demo of the Pre-Cog Engine. \n The Pro version provides access to live data streams, deeper analysis, and continuous market monitoring. \n **[๐ Inquire About Early Access on Gumroad](https://gumroad.com/)**")
if __name__ == "__main__":
demo.launch() |