File size: 1,951 Bytes
fcfebda
8ff539d
 
fcfebda
aca43a0
 
fcfebda
8ff539d
fcfebda
 
8ff539d
fcfebda
8ff539d
 
 
 
 
 
 
 
 
 
 
 
 
fcfebda
aca43a0
 
 
 
 
 
fcfebda
aca43a0
 
 
 
 
fcfebda
8ff539d
aca43a0
8ff539d
 
aca43a0
fcfebda
 
8ff539d
 
fcfebda
aca43a0
fcfebda
8ff539d
 
 
 
fcfebda
 
 
 
8ff539d
 
fcfebda
aca43a0
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import gradio as gr
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
import torch

MODEL_NAME = "Devishetty100/redmoon-gibberishdetective"

try:
    # Try loading with pipeline first
    classifier = pipeline(
        "text-classification",
        model=MODEL_NAME
    )
except:
    try:
        # Fallback: Load components separately
        tokenizer = AutoTokenizer.from_pretrained("distilbert-base-uncased")
        model = AutoModelForSequenceClassification.from_pretrained(MODEL_NAME)
        classifier = pipeline(
            "text-classification",
            model=model,
            tokenizer=tokenizer,
            device=0 if torch.cuda.is_available() else -1
        )
    except Exception as e:
        raise gr.Error(f"Failed to load model: {str(e)}\n\nPlease ensure all model files exist in the repository.")

def predict(text):
    try:
        if not text.strip():
            return {"Error": "Please enter some text"}
        
        result = classifier(text)[0]
        return {
            "Prediction": result['label'],
            "Confidence": f"{result['score']:.2%}"
        }
    except Exception as e:
        return {"Error": str(e)}

with gr.Blocks(theme=gr.themes.Soft()) as demo:
    gr.Markdown("""
    # πŸ” Gibberish Detective
    *Using model: {MODEL_NAME}*
    """)
    
    with gr.Row():
        input_text = gr.Textbox(label="Input Text", placeholder="Enter text here...", lines=3)
        output_json = gr.JSON(label="Results")
    
    gr.Examples(
        examples=[
            ["This is proper English"],
            ["Xkjsd hfkjshdf 9834 kjsdhf!"],
            ["Hello world"],
            ["Hfjsd kjsadf lkjsdf 1234!"]
        ],
        inputs=input_text
    )
    
    input_text.submit(predict, inputs=input_text, outputs=output_json)
    gr.Button("Analyze").click(predict, inputs=input_text, outputs=output_json)

if __name__ == "__main__":
    demo.launch()