<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Mini-Omni HTML Demo</title>
    <style>
        body { font-family: Arial, sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }
        #recordButton { width: 100px; height: 100px; border-radius: 50%; background-color: #f0f0f0; border: none; cursor: pointer; }
        #recordButton:active { background-color: #ff4444; }
        #chatHistory { height: 300px; overflow-y: scroll; border: 1px solid #ccc; padding: 10px; margin-bottom: 20px; }
        .message { margin-bottom: 10px; }
        .user { color: blue; }
        .ai { color: green; }
    </style>
</head>
<body>
    <h1>Mini-Omni Chat Demo</h1>
    <div id="chatHistory"></div>
    <button id="recordButton">Hold to Speak</button>
    <audio id="audioPlayback" controls style="display:none;"></audio>

    <script>
        const API_URL = '/chat';
        const recordButton = document.getElementById('recordButton');
        const chatHistory = document.getElementById('chatHistory');
        const audioPlayback = document.getElementById('audioPlayback');
        let mediaRecorder;
        let audioChunks = [];

        recordButton.addEventListener('mousedown', startRecording);
        recordButton.addEventListener('mouseup', stopRecording);
        recordButton.addEventListener('mouseleave', stopRecording);

        async function startRecording() {
            try {
                const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
                mediaRecorder = new MediaRecorder(stream);
                mediaRecorder.ondataavailable = event => {
                    audioChunks.push(event.data);
                };
                mediaRecorder.start();
                updateChatHistory('User', 'Recording...');
            } catch (error) {
                console.error('Error accessing microphone:', error);
                alert('Error accessing microphone. Please ensure you have given permission.');
            }
        }

        function stopRecording() {
            if (mediaRecorder && mediaRecorder.state === 'recording') {
                mediaRecorder.stop();
                mediaRecorder.onstop = async () => {
                    const audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
                    audioChunks = [];
                    updateChatHistory('User', URL.createObjectURL(audioBlob));
                    await sendAudioToAPI(audioBlob);
                };
            }
        }

        async function sendAudioToAPI(audioBlob) {
            try {
                const reader = new FileReader();
                reader.readAsDataURL(audioBlob);
                reader.onloadend = async function() {
                    const base64Audio = reader.result.split(',')[1];
                    const response = await fetch(API_URL, {
                        method: 'POST',
                        headers: {
                            'Content-Type': 'application/json',
                        },
                        body: JSON.stringify({ audio: base64Audio })
                    });

                    if (response.ok) {
                        const reader = response.body.getReader();
                        const stream = new ReadableStream({
                            async start(controller) {
                                while (true) {
                                    const { done, value } = await reader.read();
                                    if (done) break;
                                    controller.enqueue(value);
                                }
                                controller.close();
                            }
                        });

                        const responseBlob = await new Response(stream).blob();
                        const audioUrl = URL.createObjectURL(responseBlob);
                        updateChatHistory('AI', audioUrl);
                    
                        // Play the audio response
                        const audio = new Audio(audioUrl);
                        audio.play();
                    } else {
                        console.error('API response not ok:', response.status);
                        updateChatHistory('AI', 'Error in API response');
                    }
                };
            } catch (error) {
                console.error('Error sending audio to API:', error);
                if (error.name === 'TypeError' && error.message === 'Failed to fetch') {
                    updateChatHistory('AI', 'Error: Unable to connect to the server. Please ensure the server is running and accessible.');
                } else {
                    updateChatHistory('AI', 'Error communicating with the server: ' + error.message);
                }
            }
        }

        function updateChatHistory(speaker, content) {
            const messageElement = document.createElement('div');
            messageElement.className = 'message ' + (speaker === 'User' ? 'user' : 'ai');
            if (content.startsWith('blob:') || content.startsWith('data:')) {
                messageElement.innerHTML = `<strong>${speaker}:</strong> <audio src="${content}" controls></audio>`;
            } else {
                messageElement.innerHTML = `<strong>${speaker}:</strong> ${content}`;
            }
            chatHistory.appendChild(messageElement);
            chatHistory.scrollTop = chatHistory.scrollHeight;
        }
    </script>
</body>
</html>