You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

ws-client.js 15KB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542
  1. // #region web speech recognition api
  2. var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;
  3. var SpeechGrammarList = SpeechGrammarList || webkitSpeechGrammarList;
  4. var SpeechRecognitionEvent = SpeechRecognitionEvent || webkitSpeechRecognitionEvent;
  5. // #endregion
  6. // #region state management
  7. var state = '';
  8. var question = 0;
  9. var rePrompt = false;
  10. var partTwo = false;
  11. var questionThreeCount = 0;
  12. var strike = 0;
  13. // #endregion
  14. // #region questions
  15. const QUESTION_ONE = 'Ich werde Ihnen jetzt langsam eine Liste mit Worten vorlesen. Danach wiederholen Sie bitte möglichst viele dieser Worte. Auf die Reihenfolge kommt es nicht an.';
  16. const QUESTION_ONE_PT2 = 'Vielen Dank. Nun nenne ich Ihnen die gleichen 10 Worte ein zweites mal. Auch danach sollen Sie wieder möglichst viele Worte wiederholen';
  17. const QUESTION_TWO = 'Nennen Sie mir bitte so viel Dinge wie möglich, die man im Supermarkt kaufen kann. Sie haben dafür eine Minute Zeit. Und Los';
  18. const QUESTION_THREE = 'Ich werde Ihnen jetzt eine Zahlenreihe nennen, die Sie mir dann bitte in umgekehrter Reihenfolge wiederholen sollen. Wenn ich beispielsweise, vier - fünf sage, dann sagen Sie bitte, fünf - vier.';
  19. // #endregion
  20. // #region intents
  21. const WELCOME_INTENT = 'Default Welcome Intent';
  22. const WELCOME_FOLLOWUP_YES = 'Default Welcome Intent - yes';
  23. const WELCOME_FOLLOWUP_NO = 'Default Welcome Intent - no';
  24. const MORE_TIME = 'Add Time Intent';
  25. const MORE_TIME_YES = 'Add Time Intent - yes';
  26. const MORE_TIME_NO = 'Add Time Intent - no';
  27. const QUIT_INTENT = 'Quit Intent';
  28. const FALLBACK_INTENT = 'Default Fallback Intent';
  29. const HELP_INTENT = 'Help Intent';
  30. const CHOOSE_QUESTION = 'Frage_Starten';
  31. const NEXT_QUESTION = 'Nächste Frage';
  32. // #endregion
  33. // #region questions and expected results
  34. const QUESTION_ONE_ANSWERS = { 'teller': 1, 'hund': 1, 'lampe': 1, 'brief': 1, 'apfel': 1, 'apfelwiese': 2, 'apfelbaum': 2, 'und': 1, 'hose': 1, 'tisch': 1, 'wiese': 1, 'glas': 1, 'baum': 1 };
  35. const QUESTION_ONE_QUESTIONS = ['teller', 'hund', 'lampe', 'brief', 'apfel', 'hose', 'tisch', 'wiese', 'glas', 'baum'];
  36. const QUESTION_TWO_ANSWERS = {};
  37. var QUESTION_TWO_QUESTIONS;
  38. const QUESTION_THREE_QUESTIONS_PT1 = ['7, 2', '4, 7, 9', '5, 4, 9, 6', '2, 7, 5, 3, 6', '8, 1, 3, 5, 4, 2'];
  39. const QUESTION_THREE_QUESTIONS_PT2 = ['8, 6', '3, 1, 5', '1, 9, 7, 4', '1, 3, 5, 4, 8', '4, 1, 2, 7, 9, 5'];
  40. const QUESTION_THREE_ANSWERS_PT1 = ['27', '974', '6945', '63572', '245318'];
  41. const QUESTION_THREE_ANSWERS_PT2 = ['68', '513', '4791', '84531', '597214'];
  42. LoadQuestionTwo();
  43. function LoadQuestionTwo () {
  44. var xmlhttp;
  45. if (window.XMLHttpRequest) { // code for IE7+, Firefox, Chrome, Opera, Safari
  46. xmlhttp = new XMLHttpRequest();
  47. } else { // code for IE6, IE5
  48. xmlhttp = new ActiveXObject('Microsoft.XMLHTTP');
  49. }
  50. xmlhttp.onreadystatechange = function () {
  51. if (xmlhttp.readyState === 4 && xmlhttp.status === 200) {
  52. var text = xmlhttp.responseText.toLowerCase();
  53. // Now convert it into array using regex
  54. QUESTION_TWO_QUESTIONS = text.split('\r\n');
  55. for (let word of QUESTION_TWO_QUESTIONS) {
  56. QUESTION_TWO_ANSWERS[word] = 1;
  57. }
  58. }
  59. };
  60. xmlhttp.open('GET', 'lebensmittel.txt', true);
  61. xmlhttp.send();
  62. }
  63. // #endregion
  64. // #region points
  65. var questionOnePoints = 0;
  66. var questionTwoPoints = 0;
  67. var questionThreePoints = 0;
  68. var questionFourPoints = 0;
  69. var questionFivePoints = 0;
  70. var questionSixPoints = 0;
  71. // #endregion
  72. // tokenization
  73. const separators = [' ', '\\\+', '-', '\\\(', '\\\)', '\\*', '/', ':', '\\\?'];
  74. // Timers
  75. var timerId;
  76. // #region html elements
  77. var serverPara = document.querySelector('.server');
  78. var diagnosticPara = document.querySelector('.output');
  79. var testBtn = document.querySelector('button');
  80. var testBtn2 = document.getElementById('speechBtn');
  81. var infoPara = document.getElementById('info');
  82. var userPrompt = document.getElementById('query');
  83. // #endregion
  84. // websocket to communicate with the server
  85. var ws = new WebSocket('ws://' + window.location.host + window.location.pathname + 'ws');
  86. // #region speech recognition initialization
  87. var recognition = new SpeechRecognition();
  88. recognition.lang = 'de-DE';
  89. // recognition.interimResults = false;
  90. recognition.maxAlternatives = 1;
  91. recognition.continuous = true;
  92. var answerQuery = '';
  93. var skipRecording = false;
  94. // #endregion
  95. // #region speech synthesis initialization
  96. var speechsynth = new SpeechSynthesisUtterance();
  97. var listSpeechsynth = new SpeechSynthesisUtterance();
  98. var voices;
  99. // #endregion
  100. // #region speech events
  101. window.speechSynthesis.onvoiceschanged = function () {
  102. voices = window.speechSynthesis.getVoices();
  103. voices.forEach(element => {
  104. if (element.name === 'Google Deutsch') {
  105. speechsynth.voice = element;
  106. listSpeechsynth.voice = element;
  107. }
  108. });
  109. listSpeechsynth.rate = 0.7;
  110. };
  111. speechsynth.onend = function (event) {
  112. switch (question) {
  113. case 1:
  114. break;
  115. case 2:
  116. break;
  117. case 3:
  118. break;
  119. case 4:
  120. break;
  121. case 5:
  122. break;
  123. }
  124. if (!skipRecording) {
  125. recognizeSpeech();
  126. }
  127. skipRecording = false;
  128. diagnosticPara = '';
  129. console.log('global speech end');
  130. };
  131. // #endregion
  132. // #region websocket events
  133. ws.onopen = function () {
  134. serverPara.style.background = 'green';
  135. serverPara.innerHTML = 'Server online';
  136. };
  137. ws.onmessage = function (payload) {
  138. var dialogflowResult = JSON.parse(payload.data);
  139. checkIntent(dialogflowResult);
  140. document.querySelector('h1').innerHTML = dialogflowResult.intent.displayName;
  141. };
  142. // #endregion
  143. // INTENT HANDLING
  144. function checkIntent (result) {
  145. switch (result.intent.displayName) {
  146. case QUIT_INTENT:
  147. state = 'quit';
  148. if (timerId !== undefined) {
  149. clearTimeout(timerId);
  150. }
  151. skipRecording = true;
  152. speak('Beende die Durchführung.');
  153. break;
  154. case WELCOME_INTENT:
  155. state = 'detect';
  156. speak(result.fulfillmentText);
  157. break;
  158. case WELCOME_FOLLOWUP_YES:
  159. startQuestion(1);
  160. break;
  161. case WELCOME_FOLLOWUP_NO:
  162. skipRecording = true;
  163. speak('Okay, Danke fürs Benutzen.');
  164. break;
  165. case MORE_TIME:
  166. state = 'detect';
  167. speak('Brauchen Sie noch etwas Zeit?');
  168. break;
  169. case MORE_TIME_YES:
  170. rePrompt = true;
  171. state = 'answer';
  172. speak('Alles klar');
  173. break;
  174. case MORE_TIME_NO:
  175. skipRecording = true;
  176. state = 'answer';
  177. speak('Verstanden');
  178. recognition.stop();
  179. ws.send(answerQuery);
  180. break;
  181. case CHOOSE_QUESTION:
  182. question = result.parameters.fields.num.numberValue;
  183. state = 'answer';
  184. handleQuestion();
  185. break;
  186. case FALLBACK_INTENT:
  187. // if (state === 'answer') {
  188. // handleAnswer(result.queryText)
  189. // }
  190. break;
  191. default:
  192. break;
  193. }
  194. }
  195. // #region question handling
  196. function startQuestion (number) {
  197. question = number;
  198. state = 'answer';
  199. handleQuestion();
  200. }
  201. function handleQuestion () {
  202. switch (question) {
  203. case 1:
  204. skipRecording = true;
  205. speak(QUESTION_ONE);
  206. readQuestionOne();
  207. break;
  208. case 2:
  209. readQuestionTwo();
  210. break;
  211. case 3:
  212. readQuestionThree();
  213. break;
  214. case 4:
  215. break;
  216. case 5:
  217. break;
  218. }
  219. }
  220. function readQuestionOne () {
  221. for (let i = 0; i < QUESTION_ONE_QUESTIONS.length; i++) {
  222. let utterance = new SpeechSynthesisUtterance();
  223. utterance.voice = voices[2];
  224. utterance.rate = 0.75;
  225. utterance.text = QUESTION_ONE_QUESTIONS[i];
  226. window.speechSynthesis.speak(utterance);
  227. if (i === 9) {
  228. utterance.onend = function (event) {
  229. recognizeSpeech();
  230. };
  231. }
  232. }
  233. }
  234. function readQuestionTwo () {
  235. let utterance = new SpeechSynthesisUtterance();
  236. utterance.voice = voices[2];
  237. utterance.text = QUESTION_TWO;
  238. window.speechSynthesis.speak(utterance);
  239. utterance.onend = function (event) {
  240. window.setTimeout(
  241. function () {
  242. recognition.stop();
  243. handleAnswer(answerQuery);
  244. }, 60000);
  245. recognizeSpeech();
  246. };
  247. }
  248. function readQuestionThree () {
  249. recognition = false;
  250. speak('Dankeschön. Weiter geht es mit der nächsten Frage. ');
  251. let utterance = new SpeechSynthesisUtterance();
  252. utterance.voice = voices[2];
  253. utterance.text = QUESTION_THREE;
  254. window.speechSynthesis.speak(utterance);
  255. utterance.onend = function (event) {
  256. console.log('speach end');
  257. speak(QUESTION_THREE_QUESTIONS_PT1[questionThreeCount]);
  258. };
  259. utterance.onerror = function (event) {
  260. console.log('An error has occurred with the speech synthesis: ' + event.error);
  261. };
  262. }
  263. function handleAnswer (query) {
  264. switch (question) {
  265. case 1:
  266. handleAnswerToFirstQuestion(query);
  267. break;
  268. case 2:
  269. handleAnswerToSecondQuestion(query);
  270. break;
  271. case 3:
  272. handleAnswerToThirdQuestion(query);
  273. break;
  274. case 4:
  275. break;
  276. case 5:
  277. break;
  278. }
  279. }
  280. function handleAnswerToFirstQuestion (answer) {
  281. var tokens = answer.split(new RegExp(separators.join('|'), 'g'));
  282. questionOnePoints += calculatePoints(tokens, QUESTION_ONE_ANSWERS);
  283. if (partTwo) {
  284. partTwo = false;
  285. skipRecording = true;
  286. speak('Vielen Dank, nun geht es weiter mit der nächsten Frage');
  287. startQuestion(2);
  288. // state = 'detect'
  289. } else {
  290. rePrompt = false;
  291. skipRecording = true;
  292. speak(QUESTION_ONE_PT2);
  293. readQuestionOne(QUESTION_ONE);
  294. partTwo = true;
  295. }
  296. }
  297. function handleAnswerToSecondQuestion (answer) {
  298. var tokens = answer.split(new RegExp(separators.join('|'), 'g'));
  299. questionTwoPoints = calculatePoints(tokens, QUESTION_TWO_ANSWERS);
  300. startQuestion(3);
  301. // state = 'detect'
  302. }
  303. function handleAnswerToThirdQuestion (query) {
  304. speechsynth.rate = 0.87;
  305. query = query.replace(' ', '');
  306. let answerArray;
  307. let questionArray;
  308. if (!partTwo) {
  309. answerArray = QUESTION_THREE_ANSWERS_PT1;
  310. } else {
  311. answerArray = QUESTION_THREE_ANSWERS_PT2;
  312. }
  313. if (query === answerArray[questionThreeCount]) {
  314. strike = 0;
  315. partTwo = false;
  316. questionThreeCount++;
  317. questionThreePoints = questionThreeCount + 1;
  318. questionArray = QUESTION_THREE_QUESTIONS_PT1;
  319. } else {
  320. strike++;
  321. partTwo = true;
  322. questionArray = QUESTION_THREE_QUESTIONS_PT2;
  323. }
  324. if (strike === 2 || questionThreeCount === 5) {
  325. speechsynth.rate = 1;
  326. skipRecording = true;
  327. speak('weiter geht es mit der Nächsten Frage');
  328. startQuestion(4);
  329. return;
  330. }
  331. speak(questionArray[questionThreeCount]);
  332. console.log('count: ' + questionThreeCount + ', strike: ' + strike + ', points: ' + questionThreePoints);
  333. }
  334. // #endregion
  335. // #region global functions
  336. function startDemenzScreening () {
  337. ws.send('starte demenz test');
  338. testBtn.disabled = true;
  339. testBtn.textContent = 'Test in progress';
  340. infoPara.textContent = 'wait...';
  341. diagnosticPara.textContent = 'detecting...';
  342. }
  343. function speak (sentence) {
  344. speechsynth.text = sentence;
  345. window.speechSynthesis.speak(speechsynth);
  346. }
  347. function testSpeechOut () {
  348. console.log('click');
  349. speechsynth.text = 'test 123';
  350. speechsynth.volume = 1;
  351. speechsynth.rate = 1;
  352. console.log(speechsynth);
  353. window.speechSynthesis.speak(speechsynth);
  354. console.log(window.speechSynthesis);
  355. }
  356. function recognizeSpeech () {
  357. // if (state === 'answer') {
  358. // var arr;
  359. // switch (question) {
  360. // case 1:
  361. // arr = QUESTION_ONE_QUESTIONS;
  362. // break;
  363. // case 2:
  364. // // arr = QUESTION_TWO_QUESTIONS;
  365. // break;
  366. // case 3:
  367. // arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
  368. // break;
  369. // case 4:
  370. // break;
  371. // case 5:
  372. // break;
  373. // }
  374. // // var grammar = '#JSGF V1.0; grammar colors; public <color> = ' + arr.join(' | ') + ' ;';
  375. // // var speechRecognitionList = new SpeechGrammarList();
  376. // // speechRecognitionList.addFromString(grammar, 1);
  377. // // recognition.grammars = speechRecognitionList;
  378. // }
  379. recognition.start();
  380. console.log('reocgnition started. Question: ' + question);
  381. recognition.onresult = function (event) {
  382. var last = event.results.length - 1;
  383. var speechResult = event.results[last][0].transcript.toLowerCase();
  384. diagnosticPara.textContent += speechResult + ' ';
  385. // console.log('Confidence: ' + event.results[0][0].confidence)
  386. console.log('process: ' + speechResult);
  387. processSpeech(speechResult);
  388. // testBtn.disabled = false
  389. // testBtn.textContent = 'record...'
  390. };
  391. function processSpeech (speechResult) {
  392. console.log('To dialogflow: ' + speechResult);
  393. ws.send(speechResult);
  394. let timeOut;
  395. switch (question) {
  396. case 1:
  397. timeOut = 6500;
  398. break;
  399. case 2:
  400. answerQuery += speechResult;
  401. return;
  402. case 3:
  403. if (speechResult.includes('uhr')) {
  404. speechResult = speechResult.replace('uhr', '');
  405. }
  406. timeOut = 6500;
  407. break;
  408. case 4:
  409. break;
  410. case 5:
  411. timeOut = 6500;
  412. break;
  413. }
  414. if (state === 'answer') {
  415. if (timerId != undefined) {
  416. clearTimeout(timerId);
  417. }
  418. answerQuery += speechResult;
  419. timerId = window.setTimeout(
  420. function () {
  421. // if (!rePrompt) {
  422. // ws.send('ich brauche noch etwas Zeit')
  423. // } else {
  424. console.log('recording end. Evaluate: ' + answerQuery);
  425. handleAnswer(answerQuery);
  426. answerQuery = '';
  427. diagnosticPara.textContent = '';
  428. // }
  429. recognition.stop();
  430. console.log('timer fallback');
  431. }, timeOut);
  432. } else {
  433. console.log('recording end.');
  434. recognition.stop();
  435. }
  436. }
  437. recognition.onspeechend = function () {
  438. // recognition.stop();
  439. // testBtn.disabled = false;
  440. // testBtn.textContent = 'Start new test';
  441. };
  442. recognition.onerror = function (event) {
  443. testBtn.disabled = false;
  444. testBtn.textContent = 'Start new test';
  445. diagnosticPara.textContent = 'Error occurred in recognition: ' + event.error;
  446. };
  447. recognition.onaudiostart = function (event) {
  448. // Fired when the user agent has started to capture audio.
  449. };
  450. recognition.onaudioend = function (event) {
  451. };
  452. recognition.onend = function (event) {
  453. // Fired when the speech recognition service has disconnected.
  454. };
  455. recognition.onnomatch = function (event) {
  456. // Fired when the speech recognition service returns a final result with no significant recognition. This may involve some degree of recognition, which doesn't meet or exceed the confidence threshold.
  457. // console.log('SpeechRecognition.onnomatch')
  458. };
  459. recognition.onsoundstart = function (event) {
  460. // Fired when any sound — recognisable speech or not — has been detected.
  461. };
  462. recognition.onsoundend = function (event) {
  463. // Fired when any sound — recognisable speech or not — has stopped being detected.
  464. };
  465. recognition.onspeechstart = function (event) {
  466. // Fired when sound that is recognised by the speech recognition service as speech has been detected.
  467. };
  468. recognition.onstart = function (event) {
  469. // Fired when the speech recognition service has begun listening to incoming audio with intent to recognize grammars associated with the current SpeechRecognition.
  470. };
  471. }
  472. function calculatePoints (tokens, dict) {
  473. let points = 0;
  474. for (let word of tokens) {
  475. if (dict[word] !== undefined) {
  476. points += dict[word];
  477. }
  478. }
  479. return points;
  480. }
  481. // #endregion
  482. testBtn.addEventListener('click', startDemenzScreening);
  483. testBtn2.addEventListener('click', testSpeechOut);