chat-with-df / src /routes /+page.svelte
abidlabs's picture
abidlabs HF Staff
Upload 13 files
b4f19d0 verified
<script lang="ts">
import Dataframe from "@gradio/dataframe";
import { onMount } from 'svelte';
interface Message {
text: string;
sender: "user" | "bot";
}
let value = {
data: [
["Alice", 25, "Yes", "2024-01-15", "Engineer", "San Francisco", 85000, "Active"],
["Bob", 30, "No", "2024-02-20", "Designer", "New York", 75000, "Inactive"],
["Charlie", 35, "Yes", "2024-03-10", "Manager", "Chicago", 95000, "Active"],
["Diana", 28, "No", "2024-04-05", "Developer", "Austin", 80000, "Inactive"],
["Eve", 32, "Yes", "2024-05-12", "Analyst", "Seattle", 70000, "Active"],
["Frank", 29, "Yes", "2024-06-18", "Engineer", "Boston", 82000, "Active"],
["Grace", 27, "No", "2024-07-22", "Designer", "Portland", 68000, "Inactive"],
["Henry", 33, "Yes", "2024-08-30", "Manager", "Denver", 88000, "Active"],
["Ivy", 26, "Yes", "2024-09-14", "Developer", "Miami", 72000, "Active"],
["Jack", 31, "No", "2024-10-08", "Analyst", "Atlanta", 65000, "Inactive"],
["Kate", 34, "Yes", "2024-11-25", "Engineer", "Phoenix", 90000, "Active"],
["Liam", 28, "Yes", "2024-12-01", "Designer", "Dallas", 78000, "Active"],
["Maya", 29, "No", "2025-01-15", "Manager", "Houston", 92000, "Inactive"],
["Noah", 36, "Yes", "2025-02-20", "Developer", "Philadelphia", 85000, "Active"],
["Olivia", 27, "Yes", "2025-03-10", "Analyst", "Detroit", 70000, "Active"]
],
headers: ["Name", "Age", "Active", "Join Date", "Role", "Location", "Salary", "Status"],
metadata: null
};
let datatype = ["str", "number", "str", "date", "str", "str", "number", "str"];
let messages: Message[] = [];
let newMessage = "What is Alice's salary?";
let isGenerating = false;
let generator: any = null;
let isModelReady = false;
onMount(async () => {
messages = [...messages, { text: "🤖 Model initializing...", sender: "bot" }];
await initializeModel();
});
async function initializeModel() {
try {
console.log('Initializing Gemma model...');
// Load the library in chunks to prevent blocking
const transformersModule = await import('https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.7.1');
// Update loading message
messages = [...messages, { text: "📦 Loading model components...", sender: "bot" }];
// Small delay to allow UI update
await new Promise(resolve => setTimeout(resolve, 100));
const { pipeline } = transformersModule;
// Update loading message
messages = [...messages, { text: "🚀 Starting model...", sender: "bot" }];
// Small delay to allow UI update
await new Promise(resolve => setTimeout(resolve, 100));
generator = await pipeline(
'text-generation',
'onnx-community/gemma-3-270m-it-ONNX',
{ dtype: 'fp32' }
);
console.log('Model loaded successfully');
isModelReady = true;
messages = [...messages, { text: "✅ Model ready! Ask me anything about the employee data.", sender: "bot" }];
} catch (error) {
console.error('Failed to initialize model:', error);
messages = [...messages, { text: "⚠️ Failed to load AI model. Please refresh the page to try again.", sender: "bot" }];
}
}
async function sendMessage() {
if (newMessage.trim() && !isGenerating) {
const userMessage = newMessage.trim();
messages = [...messages, { text: userMessage, sender: "user" }];
if (!isModelReady) {
messages = [...messages, { text: "⏳ Model is still initializing. Please wait...", sender: "bot" }];
newMessage = "";
return;
}
isGenerating = true;
newMessage = "";
// Add a placeholder response that will be updated
const placeholderIndex = messages.length;
messages = [...messages, { text: "🤔 Thinking...", sender: "bot" }];
// Use requestAnimationFrame to ensure UI updates before heavy processing
requestAnimationFrame(async () => {
try {
const systemPrompt = `You are a helpful AI assistant that answers questions about employee data. Here's the current data:
${value.headers.join(' | ')}
${value.data.map(row => row.join(' | ')).join('\n')}
Keep your responses concise and helpful. Answer questions about the data above.`;
// Break the processing into smaller chunks to prevent blocking
const processInChunks = async () => {
const response = await generator([
{ role: "system", content: systemPrompt },
{ role: "user", content: userMessage }
], {
max_new_tokens: 256,
do_sample: true,
temperature: 0.7,
top_p: 0.9
});
const generatedContent = response[0].generated_text.at(-1).content;
// Update the placeholder message
messages = messages.map((msg, index) =>
index === placeholderIndex ? { text: generatedContent, sender: "bot" } : msg
);
};
// Use a small delay to allow UI to update
await new Promise(resolve => setTimeout(resolve, 50));
await processInChunks();
} catch (error) {
console.error('Generation error:', error);
// Update the placeholder message with error
messages = messages.map((msg, index) =>
index === placeholderIndex ? { text: 'Sorry, I encountered an error. Please try again.', sender: "bot" } : msg
);
} finally {
isGenerating = false;
}
});
}
}
function handleKeyPress(event: KeyboardEvent) {
if (event.key === "Enter") {
sendMessage();
}
}
</script>
<div class="container">
<div class="dataframe-section">
<Dataframe
bind:value
{datatype}
show_search="none"
show_row_numbers={false}
show_copy_button={false}
show_fullscreen_button={false}
editable={true}
show_label={false}
/>
<div class="credits">
❤︎ &nbsp; Built with Gemma 3 270M Onnx & Gradio's Dataframe.js
</div>
</div>
<div class="chatbot-section">
<h2>Chatbot</h2>
<div class="chat-messages">
{#each messages as message}
<div class="message {message.sender}">
<div class="message-content">{message.text}</div>
</div>
{/each}
</div>
</div>
</div>
<div class="chat-input-container">
<div class="chat-input">
<input
type="text"
bind:value={newMessage}
placeholder="What is Alice's salary?"
on:keypress={handleKeyPress}
autofocus
/>
<button on:click={sendMessage}>Send</button>
</div>
</div>
<style>
.container {
display: flex;
height: 100vh;
width: 100vw;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
}
.dataframe-section {
width: 80%;
padding: 20px;
border-right: 2px solid #e0e0e0;
overflow: hidden;
padding-bottom: 100px;
}
.credits {
text-align: center;
margin-top: 20px;
padding: 10px;
color: #666;
font-size: 12px;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
border-top: 1px solid #e0e0e0;
background-color: #f8f9fa;
border-radius: 0 0 8px 8px;
}
.chatbot-section {
width: 20%;
padding: 20px;
display: flex;
flex-direction: column;
background-color: #f8f9fa;
}
h2 {
margin: 0 0 20px 0;
color: #333;
font-size: 20px;
font-weight: 600;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
}
.chat-messages {
flex: 1;
overflow-y: auto;
margin-bottom: 20px;
padding: 15px;
background: white;
border-radius: 12px;
border: 1px solid #e1e5e9;
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.05);
}
.message {
margin-bottom: 12px;
}
.message.user {
text-align: right;
}
.message.bot {
text-align: left;
}
.message-content {
display: inline-block;
padding: 10px 16px;
border-radius: 20px;
max-width: 85%;
word-wrap: break-word;
font-size: 14px;
line-height: 1.4;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
}
.message.user .message-content {
background-color: #007acc;
color: white;
font-weight: 500;
}
.message.bot .message-content {
background-color: #f1f3f4;
color: #202124;
font-weight: 400;
}
.chat-input {
display: flex;
gap: 10px;
}
.chat-input input {
flex: 1;
padding: 12px 16px;
border: 2px solid #e1e5e9;
border-radius: 8px;
font-size: 14px;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
transition: border-color 0.2s ease;
}
.chat-input input:focus {
outline: none;
border-color: #007acc;
}
.chat-input button {
padding: 12px 20px;
background-color: #007acc;
color: white;
border: none;
border-radius: 8px;
cursor: pointer;
font-size: 14px;
font-weight: 500;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
transition: background-color 0.2s ease;
}
.chat-input button:hover {
background-color: #005a9e;
}
.chat-input-container {
position: fixed;
bottom: 0;
left: 0;
right: 0;
background: white;
border-top: 1px solid #e1e5e9;
padding: 20px;
box-shadow: 0 -2px 8px rgba(0, 0, 0, 0.05);
}
.chat-input {
max-width: 1200px;
margin: 0 auto;
display: flex;
gap: 10px;
}
</style>