Spaces:
Running
Running
<html> | |
<head> | |
<title>Continuous Speech Demo</title> | |
<style> | |
body { | |
font-family: sans-serif; | |
padding: 20px; | |
max-width: 800px; | |
margin: 0 auto; | |
} | |
button { | |
padding: 10px 20px; | |
margin: 10px 5px; | |
font-size: 16px; | |
} | |
#status { | |
margin: 10px 0; | |
padding: 10px; | |
background: #e8f5e9; | |
border-radius: 4px; | |
} | |
#output { | |
white-space: pre-wrap; | |
padding: 15px; | |
background: #f5f5f5; | |
border-radius: 4px; | |
margin: 10px 0; | |
min-height: 100px; | |
max-height: 400px; | |
overflow-y: auto; | |
} | |
</style> | |
</head> | |
<body> | |
<div class="controls"> | |
<button id="start">Start Listening</button> | |
<button id="stop" disabled>Stop Listening</button> | |
<button id="clear">Clear Text</button> | |
</div> | |
<div id="status">Ready</div> | |
<div id="output"></div> | |
<input type="hidden" id="streamlit-data" value=""> | |
<script> | |
if (!('webkitSpeechRecognition' in window)) { | |
alert('Speech recognition not supported'); | |
} else { | |
const recognition = new webkitSpeechRecognition(); | |
const startButton = document.getElementById('start'); | |
const stopButton = document.getElementById('stop'); | |
const clearButton = document.getElementById('clear'); | |
const status = document.getElementById('status'); | |
const output = document.getElementById('output'); | |
let fullTranscript = ''; | |
recognition.continuous = true; | |
recognition.interimResults = true; | |
const startRecognition = () => { | |
recognition.start(); | |
status.textContent = 'Listening...'; | |
startButton.disabled = true; | |
stopButton.disabled = false; | |
}; | |
window.addEventListener('load', () => setTimeout(startRecognition, 1000)); | |
startButton.onclick = startRecognition; | |
stopButton.onclick = () => { | |
recognition.stop(); | |
status.textContent = 'Stopped'; | |
startButton.disabled = false; | |
stopButton.disabled = true; | |
}; | |
clearButton.onclick = () => { | |
fullTranscript = ''; | |
output.textContent = ''; | |
sendDataToPython({ value: '', dataType: "json" }); | |
}; | |
recognition.onresult = (event) => { | |
let interimTranscript = ''; | |
let finalTranscript = ''; | |
for (let i = event.resultIndex; i < event.results.length; i++) { | |
const transcript = event.results[i][0].transcript; | |
if (event.results[i].isFinal) { | |
finalTranscript += transcript + '\n'; | |
} else { | |
interimTranscript += transcript; | |
} | |
} | |
if (finalTranscript) { | |
fullTranscript += finalTranscript; | |
output.textContent = fullTranscript; | |
output.scrollTop = output.scrollHeight; | |
document.getElementById('streamlit-data').value = fullTranscript; | |
sendDataToPython({ value: fullTranscript, dataType: "json" }); | |
} else { | |
output.textContent = fullTranscript + (interimTranscript ? '... ' + interimTranscript : ''); | |
} | |
}; | |
recognition.onend = () => { | |
if (!stopButton.disabled) { | |
recognition.start(); | |
} | |
}; | |
recognition.onerror = (event) => { | |
console.error('Recognition error:', event.error); | |
status.textContent = 'Error: ' + event.error; | |
startButton.disabled = false; | |
stopButton.disabled = true; | |
}; | |
function sendMessageToStreamlitClient(type, data) { | |
var outData = Object.assign({ isStreamlitMessage: true, type: type }, data); | |
window.parent.postMessage(outData, "*"); | |
} | |
function init() { | |
sendMessageToStreamlitClient("streamlit:componentReady", { apiVersion: 1 }); | |
} | |
function setFrameHeight(height) { | |
sendMessageToStreamlitClient("streamlit:setFrameHeight", { height: height }); | |
} | |
function sendDataToPython(data) { | |
sendMessageToStreamlitClient("streamlit:setComponentValue", data); | |
} | |
window.addEventListener("load", function() { | |
window.setTimeout(function() { | |
setFrameHeight(document.documentElement.clientHeight); | |
}, 0); | |
}); | |
init(); | |
} | |
</script> | |
</body> | |
</html> |