hi everyone
wondering if anyone has ever tried making webkitSpeechRecognition and webkitAudioContext work together?
the basic idea is to have the speech recognition work and show a mic level meter on the page at the same time.
my sample code kind of works, but it asks to access the microphone twice, while my idea is to connect the level checker to the existing context
any suggestion appreciated, thanks!
thomas
var that = this;
var recognition = new webkitSpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.onstart = function()
{
var context = new webkitAudioContext();
navigator.webkitGetUserMedia(
{
audio: true
},
function(stream)
{
var liveSource = context.createMediaStreamSource(stream);
var levelChecker = context.createJavaScriptNode(1024, 1 ,1);
liveSource.connect(levelChecker);
levelChecker.connect(context.destination);
levelChecker.onaudioprocess = that.processAudio;
}
);
that.isRunning(true);
};