Browse Source

additional fix for recognition

style change
master
Niko 4 years ago
parent
commit
b56f6dfa22
3 changed files with 102 additions and 99 deletions
  1. 11
    12
      client/index.html
  2. 2
    2
      client/style.css
  3. 89
    85
      client/ws-client.js

+ 11
- 12
client/index.html View File

</head> </head>


<body> <body>
<h1>Speech Recognition</h1>
<p id=info>Press the button to activate microphone.</p>
<p id=query hidden>query</p>

<button>Start new test</button>
<button id="speechBtn">Speech</button>
<button onclick="getElementById('query').innerHTML = 'changed'">Change query</button>

<div>
<p class="server">Server offline</p>
<p class="output" style="font-style: italic;">... erkannte worte ...</p>
</div>
<h1>Digitaler Demenztest</h1>
<p id=info>Press the button to activate microphone.</p>
<button>Start new test</button>
<button id="speechBtn">Test Button</button>
<div>
<p class="server">Server offline</p>
<p class="quest">Question: </p>
<p class="output" style="font-style: italic;">Erkannte worte ...</p>
</div>


<script src="ws-client.js"></script> <script src="ws-client.js"></script>
</body> </body>

+ 2
- 2
client/style.css View File



html { html {
height: 100%; height: 100%;
background-color: teal;
background-color: rgb(65, 143, 143);
} }


body { body {


div p { div p {
padding: 20px; padding: 20px;
background-color: rgba(0,0,0,0.2);
background-color: rgba(51, 51, 51, 0.2);
} }


div { div {

+ 89
- 85
client/ws-client.js View File

var testBtn = document.querySelector('button'); var testBtn = document.querySelector('button');
var testBtn2 = document.getElementById('speechBtn'); var testBtn2 = document.getElementById('speechBtn');
var infoPara = document.getElementById('info'); var infoPara = document.getElementById('info');
var userPrompt = document.getElementById('query');
var questionNumDisplay = document.querySelector('.quest');
// #endregion // #endregion


// websocket to communicate with the server // websocket to communicate with the server
ws.onmessage = function (payload) { ws.onmessage = function (payload) {
var dialogflowResult = JSON.parse(payload.data); var dialogflowResult = JSON.parse(payload.data);
checkIntent(dialogflowResult); checkIntent(dialogflowResult);
document.querySelector('h1').innerHTML = dialogflowResult.intent.displayName;
// document.querySelector('h1').innerHTML = dialogflowResult.intent.displayName;
}; };
// #endregion // #endregion


function startQuestion (number) { function startQuestion (number) {
question = number; question = number;
state = 'answer'; state = 'answer';
questionNumDisplay.textContent = 'Question: ' + question;
handleQuestion(); handleQuestion();
} }


window.setTimeout( window.setTimeout(
function () { function () {
recognition.stop(); recognition.stop();
console.log('recognition stopped');
handleAnswer(answerQuery);
window.setTimeout(
function () {
handleAnswer(answerQuery);
answerQuery = '';
}, 3000);
}, 6000); }, 6000);
recognition.start(); recognition.start();
console.log('reocgnition started. Question: ' + question); console.log('reocgnition started. Question: ' + question);
} }
// #endregion // #endregion


// #region global functions
function startDemenzScreening () {
ws.send('starte demenz test');
// startQuestion(2);
testBtn.disabled = true;
testBtn.textContent = 'Test in progress';
infoPara.textContent = 'wait...';
diagnosticPara.textContent = 'detecting...';
}

function speak (sentence) {
speechsynth.text = sentence;
window.speechSynthesis.speak(speechsynth);
}

function testSpeechOut () {
answerQuery = 'apfel wiese tisch apfel lampe pferd';
question = 1;
for (let i = 0; i < 2; i++) {
var tokens = answerQuery.split(new RegExp(separators.join('|'), 'g'));
questionPoints[question] += calculatePoints(tokens, QUESTION_ONE_ANSWERS);
}

console.log(questionPoints[question]);

// speechsynth.text = 'test 123';
// speechsynth.volume = 1;
// speechsynth.rate = 1;
// console.log(speechsynth);
// window.speechSynthesis.speak(speechsynth);
// console.log(window.speechSynthesis);
}

// function recognizeSpeech () { // function recognizeSpeech () {
// if (state === 'answer') { // if (state === 'answer') {
// var arr; // var arr;
// // recognition.grammars = speechRecognitionList; // // recognition.grammars = speechRecognitionList;
// } // }


// #region speech recognition event
recognition.onresult = function (event) { recognition.onresult = function (event) {
var last = event.results.length - 1; var last = event.results.length - 1;
var speechResult = event.results[last][0].transcript.toLowerCase(); var speechResult = event.results[last][0].transcript.toLowerCase();
// testBtn.disabled = false // testBtn.disabled = false
// testBtn.textContent = 'record...' // testBtn.textContent = 'record...'
}; };
1;
recognition.onspeechend = function () {
// recognition.stop();
// testBtn.disabled = false;
// testBtn.textContent = 'Start new test';
};

recognition.onerror = function (event) {
testBtn.disabled = false;
testBtn.textContent = 'Start new test';
diagnosticPara.textContent = 'Error occurred in recognition: ' + event.error;
};

recognition.onaudiostart = function (event) {
// Fired when the user agent has started to capture audio.

};

recognition.onaudioend = function (event) {

};

recognition.onend = function (event) {
// Fired when the speech recognition service has disconnected.

};

recognition.onnomatch = function (event) {
// Fired when the speech recognition service returns a final result with no significant recognition. This may involve some degree of recognition, which doesn't meet or exceed the confidence threshold.
// console.log('SpeechRecognition.onnomatch')
};

recognition.onsoundstart = function (event) {
// Fired when any sound — recognisable speech or not — has been detected.

};

recognition.onsoundend = function (event) {
// Fired when any sound — recognisable speech or not — has stopped being detected.

};

recognition.onspeechstart = function (event) {
// Fired when sound that is recognised by the speech recognition service as speech has been detected.

};
recognition.onstart = function (event) {
// Fired when the speech recognition service has begun listening to incoming audio with intent to recognize grammars associated with the current SpeechRecognition.

};
// }
// #endregion


// #region global functions
function processSpeech (speechResult) { function processSpeech (speechResult) {
console.log('To dialogflow: ' + speechResult); console.log('To dialogflow: ' + speechResult);
ws.send(speechResult); ws.send(speechResult);
} }
} }


// #region speech recognition event
recognition.onspeechend = function () {
// recognition.stop();
// testBtn.disabled = false;
// testBtn.textContent = 'Start new test';
};

recognition.onerror = function (event) {
testBtn.disabled = false;
testBtn.textContent = 'Start new test';
diagnosticPara.textContent = 'Error occurred in recognition: ' + event.error;
};

recognition.onaudiostart = function (event) {
// Fired when the user agent has started to capture audio.

};

recognition.onaudioend = function (event) {

};

recognition.onend = function (event) {
// Fired when the speech recognition service has disconnected.

};

recognition.onnomatch = function (event) {
// Fired when the speech recognition service returns a final result with no significant recognition. This may involve some degree of recognition, which doesn't meet or exceed the confidence threshold.
// console.log('SpeechRecognition.onnomatch')
};

recognition.onsoundstart = function (event) {
// Fired when any sound — recognisable speech or not — has been detected.

};

recognition.onsoundend = function (event) {
// Fired when any sound — recognisable speech or not — has stopped being detected.
function startDemenzScreening () {
// ws.send('starte demenz test');
startQuestion(2);
testBtn.disabled = true;
testBtn.textContent = 'Test in progress';
infoPara.textContent = 'wait...';
diagnosticPara.textContent = 'detecting...';
}


};
function testSpeechOut () {
answerQuery = 'apfel wiese tisch apfel lampe pferd';
question = 1;
for (let i = 0; i < 2; i++) {
var tokens = answerQuery.split(new RegExp(separators.join('|'), 'g'));
questionPoints[question] += calculatePoints(tokens, QUESTION_ONE_ANSWERS);
}


recognition.onspeechstart = function (event) {
// Fired when sound that is recognised by the speech recognition service as speech has been detected.
console.log(questionPoints[question]);


};
recognition.onstart = function (event) {
// Fired when the speech recognition service has begun listening to incoming audio with intent to recognize grammars associated with the current SpeechRecognition.
// speechsynth.text = 'test 123';
// speechsynth.volume = 1;
// speechsynth.rate = 1;
// console.log(speechsynth);
// window.speechSynthesis.speak(speechsynth);
// console.log(window.speechSynthesis);
}


};
// }
// #endregion
function speak (sentence) {
speechsynth.text = sentence;
window.speechSynthesis.speak(speechsynth);
}


function calculatePoints (tokens, d) { function calculatePoints (tokens, d) {
let points = 0; let points = 0;

Loading…
Cancel
Save