Spaces:
Runtime error
Runtime error
File size: 3,728 Bytes
f4d6ebe ce43e4d f4d6ebe |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import gradio as gr
from detector import CustomDetector
import logging
# Set up logging to capture detailed errors
logging.basicConfig(level=logging.ERROR)
logger = logging.getLogger(__name__)
# Initialize the detector
try:
detector = CustomDetector()
logger.info("Detector initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize detector: {str(e)}")
raise RuntimeError(f"Failed to initialize detector: {str(e)}")
def detect_text(text):
"""
Compute the AI-generated text score for the input text.
Returns 'Most likely AI-generated' or 'Most likely human-generated' based on a 0.2 threshold.
"""
if not text.strip():
return "Please enter some text."
try:
logger.info(f"Processing text: {text[:50]}...")
score = detector.my_detector([text])[0] # Process single text
result = "Most likely AI-generated" if score > 0.3 else "Most likely human-generated"
logger.info(f"Result: {result}")
return result
except Exception as e:
error_msg = f"Error: {str(e)}"
logger.error(error_msg)
return error_msg
# Minimal CSS for a clean, professional look
custom_css = """
/* General styling */
body {
font-family: -apple-system, BlinkMacSystemFont, sans-serif;
background: #f5f7fa;
color: #1a1a1a;
line-height: 1.6;
}
/* Flexible container */
.gradio-container {
max-width: 95%;
width: 600px;
margin: 1rem auto;
padding: 1rem;
background: #ffffff;
border-radius: 8px;
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
}
/* Title */
h1 {
font-size: 2rem;
font-weight: 600;
text-align: center;
color: #2c3e50;
margin-bottom: 1rem;
}
/* Input textbox */
textarea {
border: 1px solid #d1d5db !important;
border-radius: 6px !important;
padding: 0.75rem !important;
font-size: 1rem !important;
}
/* Output textbox */
.output-text {
background: #f9fafb !important;
border-radius: 6px !important;
padding: 1rem !important;
font-size: 1rem !important;
border: 1px solid #d1d5db !important;
}
/* Button */
button {
background: #4b5563 !important;
color: white !important;
border: none !important;
padding: 0.5rem 1rem !important;
border-radius: 6px !important;
font-weight: 500 !important;
}
/* Accordion */
.gr-accordion {
margin-top: 1rem;
border: 1px solid #d1d5db;
border-radius: 6px;
}
/* Responsive design */
@media (max-width: 600px) {
.gradio-container {
margin: 0.5rem;
padding: 0.5rem;
width: 98%;
}
h1 {
font-size: 1.6rem;
}
}
"""
# Citation for the expandable tab
citation_markdown = """
## Citation
Please cite our work as:
**Zero-Shot Statistical Tests for LLM-Generated Text Detection using Finite Sample Concentration Inequalities**
Tara Radvand, Mojtaba Abdolmaleki, Mohamed Mostagir, Ambuj Tewari
[arXiv:2501.02406](https://arxiv.org/abs/2501.02406)
Year: 2025
"""
# Set up the Gradio interface
with gr.Blocks(css=custom_css, theme=None) as iface:
gr.Markdown("# AI-Generated Text Detector")
gr.Markdown("Enter text to detect if it was generated by an AI model. Powered by a custom detector using tiiuae/falcon-rw-1b.")
input_text = gr.Textbox(
lines=5,
placeholder="Enter text here to check if it's AI-generated...",
label="Input Text"
)
output = gr.Textbox(label="Detection Result")
gr.Button("Detect").click(
fn=detect_text,
inputs=input_text,
outputs=output
)
with gr.Accordion("Citation", open=False):
gr.Markdown(citation_markdown)
# Launch the app
if __name__ == "__main__":
iface.launch() |