localm / attempts /chat-copil-web.html
mihailik's picture
Moving to a proper app.
811916b
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>On-Device AI Chat</title>
<style>
body {
margin: 0;
font-family: sans-serif;
background: #111;
color: #eee;
display: flex;
flex-direction: column;
height: 100vh;
}
header {
padding: 10px;
background: #222;
display: flex;
gap: 10px;
align-items: center;
}
#chat {
flex: 1;
overflow-y: auto;
padding: 10px;
display: flex;
flex-direction: column;
gap: 8px;
}
.msg {
display: flex;
gap: 8px;
align-items: flex-start;
}
.avatar {
width: 24px;
height: 24px;
border-radius: 50%;
background: #444;
color: #ccc;
font-size: 12px;
font-weight: bold;
display: flex;
align-items: center;
justify-content: center;
}
.bubble {
background: #333;
padding: 8px 10px;
border-radius: 8px;
max-width: 80%;
white-space: pre-wrap;
}
.user .bubble {
background: #2a2a2a;
}
.error .bubble {
background: #4a1a1a;
color: #f88;
}
form {
padding: 10px;
background: #222;
display: flex;
gap: 10px;
}
textarea {
flex: 1;
padding: 8px;
border-radius: 6px;
border: none;
background: #1a1a1a;
color: #eee;
}
button {
padding: 6px 10px;
background: #444;
color: #eee;
border: none;
border-radius: 6px;
}
</style>
</head>
<body>
<header>
<strong>Model:</strong>
<select id="model">
<option value="Xenova/distilgpt2">distilgpt2</option>
<option value="Xenova/phi-3-mini-4k-instruct">phi-3-mini</option>
<option value="Xenova/t5-small">t5-small</option>
</select>
<span id="status">Idle</span>
</header>
<div id="chat" aria-live="polite" aria-busy="false"></div>
<form id="composer">
<textarea id="input" placeholder="Say something…" rows="2"></textarea>
<button id="send" type="submit">Send</button>
</form>
<script type="module">
(async function () {
const elChat = document.getElementById('chat');
const elInput = document.getElementById('input');
const elSend = document.getElementById('send');
const elForm = document.getElementById('composer');
const elModel = document.getElementById('model');
const elStatus = document.getElementById('status');
let pipeline, AutoTokenizer;
try {
const lib = await import('https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.7.1');
pipeline = lib.pipeline;
AutoTokenizer = lib.AutoTokenizer;
} catch (err) {
const div = document.createElement('div');
div.className = 'msg error';
div.innerHTML = `<div class="avatar">!</div><div class="bubble">❌ Failed to load Transformers.js: ${err.message}</div>`;
elChat.appendChild(div);
console.error('Transformers.js load error:', err);
return;
}
const state = {
busy: false,
modelId: elModel.value,
history: [],
resources: new Map(),
};
const setBusy = (busy, label = '') => {
state.busy = busy;
[elInput, elSend, elModel].forEach(el => el.disabled = busy);
elChat.setAttribute('aria-busy', String(busy));
elStatus.textContent = busy ? label : 'Idle';
};
const addMessage = (role, text, variant = '') => {
const msg = document.createElement('div');
msg.className = `msg ${role} ${variant}`;
const avatar = document.createElement('div');
avatar.className = 'avatar';
avatar.textContent = role === 'user' ? 'U' : (variant === 'error' ? '!' : 'AI');
const bubble = document.createElement('div');
bubble.className = 'bubble';
bubble.textContent = text;
msg.append(avatar, bubble);
elChat.appendChild(msg);
elChat.scrollTop = elChat.scrollHeight;
return bubble;
};
const pushError = (err, context = 'Error') => {
const msg = `${context}: ${err?.message || err}`;
console.error(context, err);
addMessage('assistant', msg, 'error');
state.history.push({ role: 'assistant', content: msg });
elStatus.textContent = 'Error';
};
const ensureModelLoaded = async (modelId) => {
if (state.resources.has(modelId)) return state.resources.get(modelId);
setBusy(true, 'Loading…');
const task = modelId.includes('t5') ? 'text2text-generation' : 'text-generation';
const pipe = await pipeline(task, modelId);
const tokenizer = await AutoTokenizer.from_pretrained(modelId).catch(() => null);
const res = { task, pipe, tokenizer };
state.resources.set(modelId, res);
setBusy(false);
return res;
};
const buildPrompt = (tokenizer, task, history) => {
const messages = [...history];
if (tokenizer?.apply_chat_template) {
try {
return tokenizer.apply_chat_template(messages, { add_generation_prompt: true, tokenize: false });
} catch {}
}
return messages.map(m => `${m.role}: ${m.content}`).join('\n') + '\nassistant:';
};
const generateReply = async (text) => {
addMessage('user', text);
state.history.push({ role: 'user', content: text });
const botBubble = addMessage('assistant', '…');
try {
const { task, pipe, tokenizer } = await ensureModelLoaded(state.modelId);
setBusy(true, 'Generating…');
const prompt = buildPrompt(tokenizer, task, state.history);
const output = await pipe(prompt, {
max_new_tokens: 100,
temperature: 0.7,
top_p: 0.95,
do_sample: true,
repetition_penalty: 1.1,
return_full_text: false,
});
const result = output?.[0] || {};
const textOut = result.generated_text || result.summary_text || result.translation_text || '(no output)';
botBubble.textContent = textOut.trim();
state.history.push({ role: 'assistant', content: textOut.trim() });
} catch (err) {
pushError(err, 'Generation failed');
botBubble.textContent = '(error)';
} finally {
setBusy(false);
}
};
elForm.addEventListener('submit', async (e) => {
e.preventDefault();
const text = elInput.value.trim();
if (!text || state.busy) return;
elInput.value = '';
try {
await generateReply(text);
} catch (err) {
pushError(err, 'Submit error');
}
});
elInput.addEventListener('keydown', (e) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
elForm.requestSubmit();
}
});
elModel.addEventListener('change', async () => {
if (state.busy) return;
state.modelId = elModel.value;
addMessage('assistant', `Switched to model: ${state.modelId}`);
try {
await ensureModelLoaded(state.modelId);
} catch (err) {
pushError(err, 'Model load failed');
}
});
window.addEventListener('error', e => pushError(e.error || e.message, 'Uncaught error'));
window.addEventListener('unhandledrejection', e => pushError(e.reason, 'Unhandled rejection'));
addMessage('assistant', 'Hello! Choose a model and say something.');
setTimeout(() => ensureModelLoaded(state.modelId).catch(err => pushError(err, 'Initial load')), 100);
})();
</script>
</body>
</html>