import React, { useState, useEffect } from 'react'; import { createRoot } from 'react-dom/client'; import html2canvas from 'html2canvas'; const transformerImageDataUri = '/0_WGCl24jT_rWfgZWL.png'; const App = () => { // Component definitions for the Transformer architecture const components = [ { id: 'inputs', label: 'Inputs', side: 'encoder' }, { id: 'input-embedding', label: 'Input Embedding', side: 'encoder' }, { id: 'positional-encoding', label: 'Positional Encoding', side: 'encoder' }, { id: 'multi-head-attention', label: 'Multi-Head Attention', side: 'encoder' }, { id: 'add-norm', label: 'Add & Norm', side: 'encoder' }, { id: 'feedforward', label: 'Feedforward', side: 'encoder' }, { id: 'outputs', label: 'Outputs (shifted right)', side: 'decoder' }, { id: 'output-embedding', label: 'Output Embedding', side: 'decoder' }, { id: 'decoder-positional-encoding', label: 'Positional Encoding', side: 'decoder' }, { id: 'masked-multi-head-attention', label: 'Masked Multi-Head Attention', side: 'decoder' }, { id: 'decoder-add-norm', label: 'Add & Norm', side: 'decoder' }, { id: 'cross-attention', label: 'Cross Attention', side: 'decoder' }, { id: 'decoder-feedforward', label: 'Feedforward', side: 'decoder' }, { id: 'linear', label: 'Linear', side: 'decoder' }, { id: 'softmax', label: 'Softmax', side: 'decoder' }, { id: 'output-probabilities', label: 'Output Probabilities', side: 'decoder' }, ]; const [notes, setNotes] = useState<{ [key: string]: string }>({}); const [showToast, setShowToast] = useState(false); const [name, setName] = useState(''); // Load notes from localStorage on initial render useEffect(() => { try { const savedNotes = localStorage.getItem('transformer-notes'); if (savedNotes) { setNotes(JSON.parse(savedNotes)); } } catch (error) { console.error("Failed to load notes from localStorage", error); } }, []); const handleNoteChange = (id: string, value: string) => { setNotes((prev: { [key: string]: string }) => ({ ...prev, [id]: value })); }; const handleSaveNotes = async () => { const appContainer = document.querySelector('.app-container') as HTMLElement; if (!appContainer) return; try { const canvas = await html2canvas(appContainer, { backgroundColor: '#121212' }); const image = canvas.toDataURL('image/png'); const link = document.createElement('a'); link.href = image; link.download = 'transformer-notes.png'; link.click(); } catch (error) { console.error('Failed to capture screenshot', error); } }; const encoderComponents = components.filter(c => c.side === 'encoder'); const decoderComponents = components.filter(c => c.side === 'decoder'); const renderNoteColumn = (title: string, componentsToRender: { id: string; label: string; side: string }[]) => (
An interactive diagram of the Transformer model from "Attention Is All You Need". Add your notes in the text boxes to explain each part. Your notes will be saved locally in your browser.