g
File size: 23,424 Bytes
34c7121
 
 
 
d5b0fae
276a148
34c7121
6b44370
d5b0fae
 
 
 
 
 
 
 
 
 
 
 
 
8e1842a
d5b0fae
 
 
276a148
 
d5b0fae
 
276a148
d5b0fae
 
 
 
276a148
d5b0fae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
276a148
d5b0fae
276a148
 
 
 
 
d5b0fae
 
 
 
 
276a148
 
d5b0fae
8e1842a
d5b0fae
 
 
276a148
 
 
d5b0fae
 
276a148
d5b0fae
 
 
 
 
34c7121
d5b0fae
276a148
d5b0fae
34c7121
276a148
 
d5b0fae
276a148
34c7121
276a148
6b44370
d5b0fae
 
 
 
b352433
276a148
d5b0fae
 
 
 
 
 
 
 
34c7121
276a148
d5b0fae
 
276a148
d5b0fae
276a148
 
34c7121
276a148
d5b0fae
 
 
 
34c7121
d5b0fae
 
276a148
 
 
 
 
 
 
 
d5b0fae
276a148
 
d5b0fae
34c7121
d5b0fae
4ec33a2
d5b0fae
 
 
4ec33a2
 
d5b0fae
4ec33a2
6b44370
d5b0fae
8e1842a
d5b0fae
4ec33a2
d5b0fae
 
4ec33a2
d5b0fae
8e1842a
4ec33a2
276a148
34c7121
4ec33a2
d5b0fae
34c7121
d5b0fae
 
8e1842a
d5b0fae
276a148
 
34c7121
 
276a148
 
 
 
e7706f8
d5b0fae
276a148
 
 
 
 
 
 
 
34c7121
276a148
 
 
34c7121
d5b0fae
276a148
 
d5b0fae
6b44370
276a148
6b44370
276a148
d5b0fae
276a148
d5b0fae
 
 
276a148
d5b0fae
 
34c7121
276a148
 
 
d5b0fae
8e1842a
276a148
 
 
d5b0fae
34c7121
4ec33a2
d5b0fae
276a148
 
34c7121
 
d5b0fae
276a148
d5b0fae
 
276a148
 
 
d5b0fae
 
 
 
 
 
34c7121
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
    <title>AI Assistant (Gemma 3 1B - Final HTML Attempt)</title>
    <style>
        /* CSSλŠ” 이전과 동일 */
        @import url('https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap');
        :root { /* Using the neutral blue theme */
            --primary-color: #007bff; --secondary-color: #6c757d; --text-color: #212529;
            --bg-color: #f8f9fa; --user-msg-bg: #e7f5ff; --user-msg-text: #004085;
            --bot-msg-bg: #ffffff; --bot-msg-border: #dee2e6; --system-msg-color: #6c757d;
            --error-color: #721c24; --error-bg: #f8d7da; --error-border: #f5c6cb;
            --warning-color: #856404; --warning-bg: #fff3cd; --warning-border: #ffeeba;
            --success-color: #155724; --success-bg: #d4edda; --success-border: #c3e6cb;
            --border-color: #dee2e6; --input-bg: #ffffff; --input-border: #ced4da;
            --button-bg: var(--primary-color); --button-hover-bg: #0056b3; --button-disabled-bg: #adb5bd;
            --scrollbar-thumb: var(--primary-color); --scrollbar-track: #e9ecef;
            --header-bg: #ffffff; --header-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
            --container-shadow: 0 4px 15px rgba(0, 0, 0, 0.07);
        }
        * { box-sizing: border-box; margin: 0; padding: 0; }
        html { height: 100%; }
        body { font-family: 'Roboto', sans-serif; display: flex; flex-direction: column; align-items: center; justify-content: flex-start; min-height: 100vh; background-color: var(--bg-color); color: var(--text-color); padding: 10px; overscroll-behavior: none; }
        #control-panel { background: var(--header-bg); padding: 15px; border-radius: 8px; margin-bottom: 10px; box-shadow: var(--header-shadow); width: 100%; max-width: 600px; border: 1px solid var(--border-color); text-align: center; }
        #loadModelButton { padding: 10px 20px; font-size: 1em; background-color: var(--primary-color); color: white; border: none; border-radius: 5px; cursor: pointer; transition: background-color 0.2s; margin-bottom: 10px; }
        #loadModelButton:hover:not(:disabled) { background-color: var(--button-hover-bg); }
        #loadModelButton:disabled { background-color: var(--button-disabled-bg); cursor: not-allowed; }
        #model-status { font-size: 0.9em; padding: 10px; border-radius: 4px; text-align: center; min-height: 40px; line-height: 1.4; }
        #model-status.info { background-color: #e2e3e5; border: 1px solid #d6d8db; color: #383d41; }
        #model-status.loading { background-color: var(--warning-bg); border: 1px solid var(--warning-border); color: var(--warning-color); }
        #model-status.success { background-color: var(--success-bg); border: 1px solid var(--success-border); color: var(--success-color); }
        #model-status.error { background-color: var(--error-bg); border: 1px solid var(--error-border); color: var(--error-color); }
        #chat-container { width: 100%; max-width: 600px; height: 75vh; max-height: 700px; background-color: #ffffff; border-radius: 12px; box-shadow: var(--container-shadow); display: flex; flex-direction: column; overflow: hidden; border: 1px solid var(--border-color); }
        h1 { text-align: center; color: var(--primary-color); padding: 15px; background-color: var(--header-bg); border-bottom: 1px solid var(--border-color); font-size: 1.2em; font-weight: 500; flex-shrink: 0; box-shadow: var(--header-shadow); position: relative; z-index: 10; }
        #chatbox { flex-grow: 1; overflow-y: auto; padding: 15px; display: flex; flex-direction: column; gap: 12px; scrollbar-width: thin; scrollbar-color: var(--scrollbar-thumb) var(--scrollbar-track); background-color: var(--bg-color); }
        #chatbox::-webkit-scrollbar { width: 6px; } #chatbox::-webkit-scrollbar-track { background: var(--scrollbar-track); border-radius: 3px; } #chatbox::-webkit-scrollbar-thumb { background-color: var(--scrollbar-thumb); border-radius: 3px; }
        #messages div { padding: 10px 15px; border-radius: 16px; max-width: 85%; word-wrap: break-word; line-height: 1.5; font-size: 1em; box-shadow: 0 1px 2px rgba(0,0,0,0.05); position: relative; animation: fadeIn 0.25s ease-out; }
        @keyframes fadeIn { from { opacity: 0; transform: translateY(5px); } to { opacity: 1; transform: translateY(0); } }
        .user-message { background: var(--user-msg-bg); color: var(--user-msg-text); align-self: flex-end; border-bottom-right-radius: 4px; margin-left: auto; }
        .bot-message { background-color: var(--bot-msg-bg); border: 1px solid var(--bot-msg-border); align-self: flex-start; border-bottom-left-radius: 4px; margin-right: auto; }
        .bot-message a { color: var(--primary-color); text-decoration: none; } .bot-message a:hover { text-decoration: underline; }
        .system-message { font-style: italic; color: var(--system-msg-color); text-align: center; font-size: 0.85em; background-color: transparent; box-shadow: none; align-self: center; max-width: 100%; padding: 5px 0; animation: none; }
        .error-message { color: var(--error-color); font-weight: 500; background-color: var(--error-bg); border: 1px solid var(--error-border); padding: 10px 15px; border-radius: 8px; align-self: stretch; text-align: left; }
        #input-area { display: flex; padding: 10px 12px; border-top: 1px solid var(--border-color); background-color: var(--header-bg); align-items: center; gap: 8px; flex-shrink: 0; }
        #userInput { flex-grow: 1; padding: 10px 15px; border: 1px solid var(--input-border); border-radius: 20px; outline: none; font-size: 1em; font-family: 'Roboto', sans-serif; background-color: var(--input-bg); transition: border-color 0.2s ease; min-height: 42px; resize: none; overflow-y: auto; }
        #userInput:focus { border-color: var(--primary-color); }
        .control-button { padding: 0; border: none; border-radius: 50%; cursor: pointer; background-color: var(--button-bg); color: white; width: 42px; height: 42px; font-size: 1.3em; display: flex; align-items: center; justify-content: center; flex-shrink: 0; transition: background-color 0.2s ease, transform 0.1s ease; box-shadow: 0 1px 2px rgba(0,0,0,0.08); }
        .control-button:hover:not(:disabled) { background-color: var(--button-hover-bg); transform: translateY(-1px); }
        .control-button:active:not(:disabled) { transform: scale(0.95); }
        .control-button:disabled { background-color: var(--button-disabled-bg); cursor: not-allowed; transform: none; box-shadow: none; }
        #toggleSpeakerButton.muted { background-color: #aaa; }
         @media (max-width: 600px) { /* Responsive styles */
             body { padding: 5px; justify-content: flex-start; } #control-panel { margin-bottom: 5px; padding: 12px; }
             #chat-container { width: 100%; height: auto; flex-grow: 1; border-radius: 12px; max-height: none; margin-bottom: 5px; }
             h1 { font-size: 1.1em; padding: 12px; } #chatbox { padding: 12px 8px; gap: 10px; }
             #messages div { max-width: 90%; font-size: 0.95em; padding: 9px 14px;}
             #input-area { padding: 8px; gap: 5px; } #userInput { padding: 9px 14px; min-height: 40px; }
             .control-button { width: 40px; height: 40px; font-size: 1.2em; }
         }
    </style>
    <!-- Using LATEST version of Transformers.js via CDN -->
    <script type="importmap">
    {
      "imports": {
        "@xenova/transformers": "https://cdn.jsdelivr.net/npm/@xenova/transformers@latest"
      }
    }
    </script>
</head>
<body>
    <div id="control-panel">
        <h2>Model Loader</h2>
        <button id="loadModelButton">Load Gemma 3 1B Model (Q4)</button>
        <div id="model-status" class="info">Click button to load `onnx-community/gemma-3-1b-it-ONNX-GQA` (Q4). **Warning:** This model is known to be incompatible with Transformers.js via CDN and loading is expected to fail.</div>
    </div>

    <div id="chat-container">
        <h1 id="chatbot-name">AI Assistant</h1>
        <div id="chatbox">
            <div id="messages">
                <!-- Chat messages appear here -->
            </div>
        </div>
        <div id="input-area">
             <textarea id="userInput" placeholder="Please attempt to load the model first..." rows="1" disabled></textarea>
            <button id="speechButton" class="control-button" title="Speak message" disabled>🎀</button>
            <button id="toggleSpeakerButton" class="control-button" title="Toggle AI speech output" disabled>πŸ”Š</button>
            <button id="sendButton" class="control-button" title="Send message" disabled>➀</button>
        </div>
    </div>

    <script type="module">
        // Import necessary functions from the loaded library.
        import { pipeline, env } from '@xenova/transformers';

        // --- Configuration ---
        const MODEL_NAME = 'onnx-community/gemma-3-1b-it-ONNX-GQA'; // The specific model requested
        const TASK = 'text-generation';
        const QUANTIZATION = 'q4'; // As specified in the model card example

        // --- Environment Setup ---
        env.allowRemoteModels = true;
        env.useBrowserCache = true;
        env.backends.onnx.executionProviders = ['webgpu', 'wasm'];
        console.log('Using Execution Providers:', env.backends.onnx.executionProviders);
        env.backends.onnx.prefer_alternative_execution_providers = true;

        // --- DOM Elements ---
        const chatbox = document.getElementById('messages');
        const userInput = document.getElementById('userInput');
        const sendButton = document.getElementById('sendButton');
        const chatbotNameElement = document.getElementById('chatbot-name');
        const speechButton = document.getElementById('speechButton');
        const toggleSpeakerButton = document.getElementById('toggleSpeakerButton');
        const modelStatus = document.getElementById('model-status');
        const loadModelButton = document.getElementById('loadModelButton');

        // --- State Management ---
        let generator = null;
        let isLoadingModel = false;
        let conversationHistory = []; // Stores { role: 'user' | 'assistant' | 'system', content: '...' }
        let botState = { botName: "AI Assistant", userName: "User", botSettings: { useSpeechOutput: true } };
        const stateKey = 'gemma3_1b_strict_state_v2'; // Adjusted key
        const historyKey = 'gemma3_1b_strict_history_v2';

        // --- Web Speech API ---
        let recognition = null;
        let synthesis = window.speechSynthesis;
        let targetVoice = null;
        let isListening = false;

        // --- Initialization ---
        window.addEventListener('load', () => {
            loadState();
            chatbotNameElement.textContent = botState.botName;
            updateSpeakerButtonUI();
            initializeSpeechAPI();
            setupInputAutosize();
            updateChatUIState(false);
            displayHistory();
            setTimeout(loadVoices, 500);
            loadModelButton.addEventListener('click', handleLoadModelClick);
            console.log("Using Transformers.js (latest) loaded via import map.");
            displayMessage('system', `Using latest Transformers.js. Ready to load ${MODEL_NAME}.`, false);
        });

        // --- State Persistence ---
        function loadState() { /* No changes */
             const savedState = localStorage.getItem(stateKey); if (savedState) { try { const loaded = JSON.parse(savedState); botState = { ...botState, ...loaded, botSettings: { ...botState.botSettings, ...(loaded.botSettings || {}) } }; } catch(e) {} }
             const savedHistory = localStorage.getItem(historyKey); if (savedHistory) { try { conversationHistory = JSON.parse(savedHistory); if (!Array.isArray(conversationHistory)) conversationHistory = []; } catch(e) { conversationHistory = []; } }
         }
         function saveState() { /* No changes */
             localStorage.setItem(stateKey, JSON.stringify(botState)); localStorage.setItem(historyKey, JSON.stringify(conversationHistory));
         }
         function displayHistory() { /* No changes */
             chatbox.innerHTML = ''; conversationHistory.forEach(msg => { if (msg.role === 'user' || msg.role === 'assistant') { displayMessage(msg.role === 'user' ? 'user' : 'bot', msg.content, false); } });
         }

        // --- UI Update Functions ---
        function displayMessage(sender, text, animate = true, isError = false) { /* No changes */
             const messageDiv = document.createElement('div'); let messageClass = sender === 'user' ? 'user-message' : sender === 'bot' ? 'bot-message' : 'system-message'; if (sender === 'system' && isError) messageClass = 'error-message'; messageDiv.classList.add(messageClass); if (!animate) messageDiv.style.animation = 'none'; text = text.replace(/</g, "<").replace(/>/g, ">"); text = text.replace(/\[(.*?)\]\((.*?)\)/g, '<a href="$2" target="_blank" rel="noopener noreferrer">$1</a>'); text = text.replace(/\*\*(.*?)\*\*/g, '<strong>$1</strong>').replace(/\*(.*?)\*/g, '<em>$1</em>'); text = text.replace(/\n/g, '<br>'); messageDiv.innerHTML = text; chatbox.appendChild(messageDiv); chatbox.scrollTo({ top: chatbox.scrollHeight, behavior: animate ? 'smooth' : 'auto' });
         }
        function updateModelStatus(message, type = 'info') { /* No changes */
            modelStatus.textContent = message; modelStatus.className = 'model-status ' + type; console.log(`Model Status (${type}): ${message}`);
        }
        function updateChatUIState(isModelLoadedSuccessfully) { /* No changes */
            userInput.disabled = !isModelLoadedSuccessfully || isLoadingModel; sendButton.disabled = !isModelLoadedSuccessfully || isLoadingModel || userInput.value.trim() === ''; speechButton.disabled = !isModelLoadedSuccessfully || isLoadingModel || isListening || !recognition; toggleSpeakerButton.disabled = !isModelLoadedSuccessfully || isLoadingModel || !synthesis; loadModelButton.disabled = isLoadingModel || isModelLoadedSuccessfully; if (isModelLoadedSuccessfully) { userInput.placeholder = "How can I help you today?"; } else if (isLoadingModel) { userInput.placeholder = "Model loading..."; } else { userInput.placeholder = "Please attempt to load the model first..."; }
        }
        function updateSpeakerButtonUI() { /* No changes */
            toggleSpeakerButton.textContent = botState.botSettings.useSpeechOutput ? 'πŸ”Š' : 'πŸ”‡'; toggleSpeakerButton.title = botState.botSettings.useSpeechOutput ? 'Turn off AI speech' : 'Turn on AI speech'; toggleSpeakerButton.classList.toggle('muted', !botState.botSettings.useSpeechOutput);
        }
        function showSpeechStatus(message) { console.log("Speech Status:", message); }
        function setupInputAutosize() { userInput.addEventListener('input', () => { userInput.style.height = 'auto'; userInput.style.height = userInput.scrollHeight + 'px'; updateChatUIState(generator !== null); }); }

        // --- Model & AI Logic ---
        async function handleLoadModelClick() { /* No changes */
            if (isLoadingModel || generator) return; isLoadingModel = true; generator = null; updateChatUIState(false); await initializeModel(MODEL_NAME); isLoadingModel = false; updateChatUIState(generator !== null);
        }

        // Initialize model EXACTLY as per the documentation example
        async function initializeModel(modelId) {
            updateModelStatus(`Loading ${modelId} with { dtype: "${QUANTIZATION}" }... (Strict doc example)`, 'loading');
            displayMessage('system', `Attempting to load ${modelId} using documented method (dtype: ${QUANTIZATION})...`, false);

            try {
                // Pipeline creation EXACTLY as in the example
                generator = await pipeline(TASK, modelId, {
                    dtype: QUANTIZATION, // Use q4 as requested
                    progress_callback: (progress) => {
                         const msg = `[Loading: ${progress.status}] ${progress.file ? progress.file.split('/').pop() : ''} (${Math.round(progress.progress || 0)}%)`;
                         updateModelStatus(msg, 'loading');
                     }
                });

                // If successful (HIGHLY UNLIKELY)
                updateModelStatus(`${modelId} loaded successfully!`, 'success');
                displayMessage('system', `[SUCCESS] ${modelId} loaded.`, false);

            } catch (error) {
                // Catch and report the expected error
                console.error(`Model loading failed for ${modelId} (Strict Attempt):`, error);
                let errorMsg = `Failed to load ${modelId}: ${error.message}.`;
                if (error.message.includes("Unsupported model type") || error.message.includes("gemma3_text")) {
                    errorMsg += " As expected, the 'gemma3_text' model type is unsupported by this library version.";
                } else if (error.message.includes("split is not a function")) {
                     errorMsg += " As expected, a TypeError occurred during config parsing (incompatibility).";
                 } else {
                     errorMsg += " Unknown error. Check console/network/memory.";
                 }
                updateModelStatus(errorMsg, 'error');
                displayMessage('system', `[ERROR] ${errorMsg}`, true, true);
                generator = null; // Ensure generator is null on failure
            }
        }

        // Build messages array EXACTLY as per documentation example
        function buildMessages(newUserMessage) { /* No changes */
             let messages = [{ role: "system", content: "You are a helpful assistant." }]; messages = messages.concat(conversationHistory); messages.push({ role: "user", content: newUserMessage }); console.log("Input Messages for Pipeline:", messages); return messages;
         }

         // Cleanup response EXACTLY as per documentation example (with safety checks)
        function cleanupResponse(output) { /* No changes */
             try { if (output && output.length > 0 && output[0].generated_text && Array.isArray(output[0].generated_text)) { const lastMessage = output[0].generated_text.at(-1); if (lastMessage && (lastMessage.role === 'assistant' || lastMessage.role === 'model') && typeof lastMessage.content === 'string') { let content = lastMessage.content.trim(); content = content.replace(/<end_of_turn>/g, '').trim(); if (content.length > 0) return content; } } } catch (e) { console.error("Error parsing generator output with .at(-1):", e, "Output:", output); } console.warn("Could not extract response using output[0].generated_text.at(-1).content. Output structure might differ.", output); const fallbacks = [ "Sorry, response format was unexpected.", "My response might be garbled.", "Error processing the AI answer." ]; return fallbacks[Math.floor(Math.random() * fallbacks.length)];
         }


        // --- Main Interaction Logic ---
        async function handleUserMessage() { /* No changes needed, uses messages format */
            const userText = userInput.value.trim(); if (!userText || !generator || isLoadingModel) return; userInput.value = ''; userInput.style.height = 'auto'; updateChatUIState(true); displayMessage('user', userText); conversationHistory.push({ role: 'user', content: userText }); updateModelStatus("AI thinking...", "loading"); const messages = buildMessages(userText); try { const outputs = await generator(messages, { max_new_tokens: 512, do_sample: false }); const replyText = cleanupResponse(outputs); console.log("Cleaned AI Output:", replyText); displayMessage('bot', replyText); conversationHistory.push({ role: 'assistant', content: replyText }); if (botState.botSettings.useSpeechOutput && synthesis && targetVoice) { speakText(replyText); } saveState(); } catch (error) { console.error("AI response generation error:", error); displayMessage('system', `[ERROR] Failed to generate response: ${error.message}`, true, true); } finally { if(generator) updateModelStatus(`${MODEL_NAME} ready.`, "success"); updateChatUIState(generator !== null); userInput.focus(); }
        }

        // --- Speech API Functions ---
        function initializeSpeechAPI() { /* No changes needed */
              const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; if (SpeechRecognition) { recognition = new SpeechRecognition(); recognition.lang = 'en-US'; recognition.continuous = false; recognition.interimResults = false; recognition.onstart = () => { isListening = true; updateChatUIState(generator !== null); console.log('Listening...'); }; recognition.onresult = (event) => { userInput.value = event.results[0][0].transcript; userInput.dispatchEvent(new Event('input')); handleUserMessage(); }; recognition.onerror = (event) => { console.error("Speech error:", event.error); updateModelStatus(`Speech recognition error (${event.error})`, 'error'); setTimeout(() => updateModelStatus(generator ? `${MODEL_NAME} ready.` : 'Model not loaded.', generator ? 'success' : 'error'), 3000); }; recognition.onend = () => { isListening = false; updateChatUIState(generator !== null); console.log('Stopped listening.'); }; } else { console.warn("Speech Recognition not supported."); } if (!synthesis) { console.warn("Speech Synthesis not supported."); } else { toggleSpeakerButton.addEventListener('click', () => { botState.botSettings.useSpeechOutput = !botState.botSettings.useSpeechOutput; updateSpeakerButtonUI(); saveState(); if (!botState.botSettings.useSpeechOutput) synthesis.cancel(); }); } updateChatUIState(false);
          }
          function loadVoices() { /* No changes needed */ if (!synthesis) return; let voices = synthesis.getVoices(); if (voices.length === 0) { synthesis.onvoiceschanged = () => { voices = synthesis.getVoices(); findAndSetVoice(voices); }; } else { findAndSetVoice(voices); } }
          function findAndSetVoice(voices) { /* No changes needed */ targetVoice = voices.find(v => v.lang === 'en-US') || voices.find(v => v.lang.startsWith('en-')); if (targetVoice) { console.log("Using English voice:", targetVoice.name, targetVoice.lang); } else { console.warn("No suitable English voice found."); } }
          function speakText(text) { /* No changes needed */ if (!synthesis || !botState.botSettings.useSpeechOutput || !targetVoice) return; synthesis.cancel(); const utterance = new SpeechSynthesisUtterance(text); utterance.voice = targetVoice; utterance.lang = targetVoice.lang; utterance.rate = 1.0; utterance.pitch = 1.0; synthesis.speak(utterance); }

        // --- Event Listeners ---
        sendButton.addEventListener('click', handleUserMessage);
        userInput.addEventListener('keypress', (e) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleUserMessage(); } });
        speechButton.addEventListener('click', () => { if (recognition && !isListening && generator && !isLoadingModel) { try { recognition.start(); } catch (error) { console.error("Rec start fail:", error); updateModelStatus(`Failed to start recognition`, 'error'); setTimeout(() => updateModelStatus(generator ? `${MODEL_NAME} ready.` : 'Model not loaded.', generator ? 'success' : 'error'), 2000); isListening = false; updateChatUIState(generator !== null); } } });

    </script>
</body>
</html>