1
- import { Injectable } from '@angular/core' ;
1
+ import { Injectable , NgZone } from '@angular/core' ;
2
2
import { environment } from '../../environments/environment' ;
3
+ import * as lodash from "lodash" ;
3
4
4
5
import { Observable } from 'rxjs/Observable' ;
5
6
import { BehaviorSubject } from 'rxjs/BehaviorSubject' ;
6
7
7
8
import { ApiAiClient } from 'api-ai-javascript' ;
8
9
import { Message } from '../model/message'
10
+ import { IWindow } from '../interface/iwindow'
9
11
10
12
@Injectable ( )
11
13
export class AiService {
12
14
readonly token = environment . dialogflow . angularAIBot ;
13
15
readonly client = new ApiAiClient ( { accessToken : this . token } ) ;
14
16
17
+ speechRecognition : any ;
15
18
conversation = new BehaviorSubject < Message [ ] > ( [ ] ) ;
16
19
17
- constructor ( ) {
20
+ constructor ( private zone : NgZone ) {
18
21
}
19
22
20
- converse ( msg : string ) {
23
+ textConversation ( msg : string ) {
21
24
const userMessage = new Message ( msg , 'user' ) ;
22
25
this . update ( userMessage ) ;
23
26
return this . client . textRequest ( msg )
@@ -31,4 +34,51 @@ export class AiService {
31
34
update ( msg : Message ) {
32
35
this . conversation . next ( [ msg ] ) ;
33
36
}
37
+
38
+ voiceConversation ( ) : Observable < string > {
39
+ return Observable . create ( observer => {
40
+ const { webkitSpeechRecognition } : IWindow = < IWindow > window ;
41
+ this . speechRecognition = new webkitSpeechRecognition ( ) ;
42
+ this . speechRecognition . continuous = false ;
43
+ this . speechRecognition . interimResults = false ;
44
+ this . speechRecognition . lang = 'en-us' ;
45
+ this . speechRecognition . maxAlternatives = 0 ;
46
+
47
+ this . speechRecognition . onresult = speech => {
48
+ let sentence : string = "" ;
49
+ if ( speech . results ) {
50
+ var result = speech . results [ speech . resultIndex ] ;
51
+ var transcript = result [ 0 ] . transcript ;
52
+ if ( result . isFinal ) {
53
+ if ( result [ 0 ] . confidence < 0.1 ) {
54
+ console . log ( "Unrecognized result - Please try again" ) ;
55
+ }
56
+ else {
57
+ sentence = lodash . trim ( transcript ) ;
58
+ console . log ( "Did you said? -> " + sentence + " , If not then say something else..." ) ;
59
+ }
60
+ }
61
+ }
62
+ this . zone . run ( ( ) => {
63
+ observer . next ( sentence ) ;
64
+ } ) ;
65
+ } ;
66
+
67
+ this . speechRecognition . onerror = error => {
68
+ observer . error ( error ) ;
69
+ } ;
70
+
71
+ this . speechRecognition . onend = ( ) => {
72
+ observer . complete ( ) ;
73
+ } ;
74
+
75
+ this . speechRecognition . start ( ) ;
76
+ // console.log("I'm listening...");
77
+ } ) ;
78
+ }
79
+
80
+ destroyVoiceConversation ( ) {
81
+ if ( this . speechRecognition )
82
+ this . speechRecognition . stop ( ) ;
83
+ }
34
84
}
0 commit comments