Browse Source

additional fix for recognition

style change
master
Niko 4 years ago
parent
commit
b56f6dfa22
3 changed files with 102 additions and 99 deletions
  1. 11
    12
      client/index.html
  2. 2
    2
      client/style.css
  3. 89
    85
      client/ws-client.js

+ 11
- 12
client/index.html View File

@@ -14,18 +14,17 @@
</head>

<body>
<h1>Speech Recognition</h1>
<p id=info>Press the button to activate microphone.</p>
<p id=query hidden>query</p>

<button>Start new test</button>
<button id="speechBtn">Speech</button>
<button onclick="getElementById('query').innerHTML = 'changed'">Change query</button>

<div>
<p class="server">Server offline</p>
<p class="output" style="font-style: italic;">... erkannte worte ...</p>
</div>
<h1>Digitaler Demenztest</h1>
<p id=info>Press the button to activate microphone.</p>
<button>Start new test</button>
<button id="speechBtn">Test Button</button>
<div>
<p class="server">Server offline</p>
<p class="quest">Question: </p>
<p class="output" style="font-style: italic;">Erkannte worte ...</p>
</div>

<script src="ws-client.js"></script>
</body>

+ 2
- 2
client/style.css View File

@@ -4,7 +4,7 @@ body, html {

html {
height: 100%;
background-color: teal;
background-color: rgb(65, 143, 143);
}

body {
@@ -19,7 +19,7 @@ h1, p {

div p {
padding: 20px;
background-color: rgba(0,0,0,0.2);
background-color: rgba(51, 51, 51, 0.2);
}

div {

+ 89
- 85
client/ws-client.js View File

@@ -87,7 +87,7 @@ var diagnosticPara = document.querySelector('.output');
var testBtn = document.querySelector('button');
var testBtn2 = document.getElementById('speechBtn');
var infoPara = document.getElementById('info');
var userPrompt = document.getElementById('query');
var questionNumDisplay = document.querySelector('.quest');
// #endregion

// websocket to communicate with the server
@@ -154,7 +154,7 @@ ws.onopen = function () {
ws.onmessage = function (payload) {
var dialogflowResult = JSON.parse(payload.data);
checkIntent(dialogflowResult);
document.querySelector('h1').innerHTML = dialogflowResult.intent.displayName;
// document.querySelector('h1').innerHTML = dialogflowResult.intent.displayName;
};
// #endregion

@@ -215,6 +215,7 @@ function checkIntent (result) {
function startQuestion (number) {
question = number;
state = 'answer';
questionNumDisplay.textContent = 'Question: ' + question;
handleQuestion();
}

@@ -263,8 +264,11 @@ function readQuestionTwo () {
window.setTimeout(
function () {
recognition.stop();
console.log('recognition stopped');
handleAnswer(answerQuery);
window.setTimeout(
function () {
handleAnswer(answerQuery);
answerQuery = '';
}, 3000);
}, 6000);
recognition.start();
console.log('reocgnition started. Question: ' + question);
@@ -369,39 +373,6 @@ function handleAnswerToThirdQuestion (query) {
}
// #endregion

// #region global functions
function startDemenzScreening () {
ws.send('starte demenz test');
// startQuestion(2);
testBtn.disabled = true;
testBtn.textContent = 'Test in progress';
infoPara.textContent = 'wait...';
diagnosticPara.textContent = 'detecting...';
}

function speak (sentence) {
speechsynth.text = sentence;
window.speechSynthesis.speak(speechsynth);
}

function testSpeechOut () {
answerQuery = 'apfel wiese tisch apfel lampe pferd';
question = 1;
for (let i = 0; i < 2; i++) {
var tokens = answerQuery.split(new RegExp(separators.join('|'), 'g'));
questionPoints[question] += calculatePoints(tokens, QUESTION_ONE_ANSWERS);
}

console.log(questionPoints[question]);

// speechsynth.text = 'test 123';
// speechsynth.volume = 1;
// speechsynth.rate = 1;
// console.log(speechsynth);
// window.speechSynthesis.speak(speechsynth);
// console.log(window.speechSynthesis);
}

// function recognizeSpeech () {
// if (state === 'answer') {
// var arr;
@@ -427,6 +398,7 @@ function testSpeechOut () {
// // recognition.grammars = speechRecognitionList;
// }

// #region speech recognition event
recognition.onresult = function (event) {
var last = event.results.length - 1;
var speechResult = event.results[last][0].transcript.toLowerCase();
@@ -439,7 +411,60 @@ recognition.onresult = function (event) {
// testBtn.disabled = false
// testBtn.textContent = 'record...'
};
1;
recognition.onspeechend = function () {
// recognition.stop();
// testBtn.disabled = false;
// testBtn.textContent = 'Start new test';
};

recognition.onerror = function (event) {
testBtn.disabled = false;
testBtn.textContent = 'Start new test';
diagnosticPara.textContent = 'Error occurred in recognition: ' + event.error;
};

recognition.onaudiostart = function (event) {
// Fired when the user agent has started to capture audio.

};

recognition.onaudioend = function (event) {

};

recognition.onend = function (event) {
// Fired when the speech recognition service has disconnected.

};

recognition.onnomatch = function (event) {
// Fired when the speech recognition service returns a final result with no significant recognition. This may involve some degree of recognition, which doesn't meet or exceed the confidence threshold.
// console.log('SpeechRecognition.onnomatch')
};

recognition.onsoundstart = function (event) {
// Fired when any sound — recognisable speech or not — has been detected.

};

recognition.onsoundend = function (event) {
// Fired when any sound — recognisable speech or not — has stopped being detected.

};

recognition.onspeechstart = function (event) {
// Fired when sound that is recognised by the speech recognition service as speech has been detected.

};
recognition.onstart = function (event) {
// Fired when the speech recognition service has begun listening to incoming audio with intent to recognize grammars associated with the current SpeechRecognition.

};
// }
// #endregion

// #region global functions
function processSpeech (speechResult) {
console.log('To dialogflow: ' + speechResult);
ws.send(speechResult);
@@ -489,58 +514,37 @@ function processSpeech (speechResult) {
}
}

// #region speech recognition event
recognition.onspeechend = function () {
// recognition.stop();
// testBtn.disabled = false;
// testBtn.textContent = 'Start new test';
};

recognition.onerror = function (event) {
testBtn.disabled = false;
testBtn.textContent = 'Start new test';
diagnosticPara.textContent = 'Error occurred in recognition: ' + event.error;
};

recognition.onaudiostart = function (event) {
// Fired when the user agent has started to capture audio.

};

recognition.onaudioend = function (event) {

};

recognition.onend = function (event) {
// Fired when the speech recognition service has disconnected.

};

recognition.onnomatch = function (event) {
// Fired when the speech recognition service returns a final result with no significant recognition. This may involve some degree of recognition, which doesn't meet or exceed the confidence threshold.
// console.log('SpeechRecognition.onnomatch')
};

recognition.onsoundstart = function (event) {
// Fired when any sound — recognisable speech or not — has been detected.

};

recognition.onsoundend = function (event) {
// Fired when any sound — recognisable speech or not — has stopped being detected.
function startDemenzScreening () {
// ws.send('starte demenz test');
startQuestion(2);
testBtn.disabled = true;
testBtn.textContent = 'Test in progress';
infoPara.textContent = 'wait...';
diagnosticPara.textContent = 'detecting...';
}

};
function testSpeechOut () {
answerQuery = 'apfel wiese tisch apfel lampe pferd';
question = 1;
for (let i = 0; i < 2; i++) {
var tokens = answerQuery.split(new RegExp(separators.join('|'), 'g'));
questionPoints[question] += calculatePoints(tokens, QUESTION_ONE_ANSWERS);
}

recognition.onspeechstart = function (event) {
// Fired when sound that is recognised by the speech recognition service as speech has been detected.
console.log(questionPoints[question]);

};
recognition.onstart = function (event) {
// Fired when the speech recognition service has begun listening to incoming audio with intent to recognize grammars associated with the current SpeechRecognition.
// speechsynth.text = 'test 123';
// speechsynth.volume = 1;
// speechsynth.rate = 1;
// console.log(speechsynth);
// window.speechSynthesis.speak(speechsynth);
// console.log(window.speechSynthesis);
}

};
// }
// #endregion
function speak (sentence) {
speechsynth.text = sentence;
window.speechSynthesis.speak(speechsynth);
}

function calculatePoints (tokens, d) {
let points = 0;

Loading…
Cancel
Save