File size: 1,639 Bytes
e082d82
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import time
import sys
import os
from transformers import pipeline, set_seed

# Initialize Hugging Face text generation pipeline
generator = pipeline("text-generation", model="flux-dev")  # You can swap this with another HF model
set_seed(42)

# Simulate typing output
def type_out(text, speed=0.02):
    for char in text:
        sys.stdout.write(char)
        sys.stdout.flush()
        time.sleep(speed)
    print()

# Clear terminal screen
def clear_screen():
    os.system('cls' if os.name == 'nt' else 'clear')

# Generate AI response using HF pipeline
def codette_generate(prompt: str) -> str:
    output = generator(prompt, max_length=100, num_return_sequences=1)
    return output[0]['generated_text']

# Launch the terminal interface
def launch_codette_terminal():
    clear_screen()
    type_out("🧬 Welcome to Codette Terminal (Hugging Face Edition) 🧬", 0.03)
    type_out("Ask anything. Type 'exit' to quit.\n", 0.02)

    while True:
        try:
            user_input = input("πŸ–‹οΈ You > ").strip()
            if user_input.lower() in ['exit', 'quit']:
                type_out("\n🧠 Codette signing off... Stay recursive πŸŒ€\n", 0.04)
                break
            elif not user_input:
                continue
            type_out("πŸ’­ Thinking...", 0.04)
            time.sleep(1)
            response = codette_generate(user_input)
            type_out(response.strip(), 0.01)
        except KeyboardInterrupt:
            print("\n[Interrupted] Use 'exit' to quit.\n")
        except Exception as e:
            type_out(f"[Error] {e}", 0.01)

if __name__ == "__main__":
    launch_codette_terminal()