-
Notifications
You must be signed in to change notification settings - Fork 4
/
index.js
181 lines (143 loc) · 5.46 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
'use strict';
var GoogleAssistant = require('./googleassistant.js')
var net = require('net');
/* [DEBUG] Save audio chunks
const FileWriter = require('wav').FileWriter;
var stream = require('stream');
*/
//Server configuration
const HOST = '0.0.0.0'
const PORT = 4000
//GA configuration
const inputSampleRate = 16000;
const inputEncoding = 1;
const outputSampleRate = 16000;
const outputEncoding = 1;
const assistant = GoogleAssistant.assistant;
const embedded_assistant_pb = GoogleAssistant.embedded_assistant_pb;
var server = net.createServer(function (client) {
console.log('Client connected:' + client.remoteAddress + ':' + client.remotePort);
client.setTimeout(3000);
client.setKeepAlive(true);
//GA start
const config = new embedded_assistant_pb.AssistConfig();
//Input
config.setAudioInConfig(new embedded_assistant_pb.AudioInConfig());
config.getAudioInConfig().setEncoding(inputEncoding);
config.getAudioInConfig().setSampleRateHertz(inputSampleRate);
//Output
config.setAudioOutConfig(new embedded_assistant_pb.AudioOutConfig());
config.getAudioOutConfig().setEncoding(outputEncoding);
config.getAudioOutConfig().setSampleRateHertz(outputSampleRate);
config.getAudioOutConfig().setVolumePercentage(100);
config.setDialogStateIn(new embedded_assistant_pb.DialogStateIn());
config.setDeviceConfig(new embedded_assistant_pb.DeviceConfig());
config.getDialogStateIn().setLanguageCode(assistant.locale);
config.getDeviceConfig().setDeviceId(assistant.deviceInstanceId);
config.getDeviceConfig().setDeviceModelId(assistant.deviceModelId);
const request = new embedded_assistant_pb.AssistRequest();
request.setConfig(config);
delete request.audio_in;
const conversation = assistant.client.assist();
// Send config to GA
conversation.write(request);
// GA response JSON
let response = {};
//[DEBUG] Audio bytes
//let audioTCP = null;
/* GA data start */
conversation.on('data', (data) => {
if (data.device_action) {
// Device action
response.deviceAction = JSON.parse(data.device_action.device_request_json);
} else if (data.dialog_state_out !== null && data.dialog_state_out.supplemental_display_text) {
// Response text
response.text = data.dialog_state_out.supplemental_display_text;
response.microphone_mode = data.dialog_state_out.microphone_mode;
response.conversation_state = data.dialog_state_out.conversation_state;
} else if (data.speech_results != [] && data.speech_results.length == 1 && data.speech_results[0].stability == 1) {
// Audio transcript
response.transcript = data.speech_results[0].transcript;
}else if (data.event_type == "END_OF_UTTERANCE") {
// End of utterance
var bf = Buffer.from("0END_OF_UTTERANCE");
client.write(bf);
console.log("END_OF_UTTERANCE");
}
//[DEBUG] AssistResponse
console.log(data);
});
conversation.on('end', (error) => {
// Send json with response to the client
var bf1 = Buffer.from("START_JSON");
var bf2 = Buffer.from(JSON.stringify(response));
var bf3 = Buffer.from("STOP_JSON");
client.write(Buffer.concat([bf1, bf2, bf3]));
});
conversation.on('error', (error) => {
console.log(error);
});
/* GA data end */
/* Server start */
client.on('data', function (data) {
//console.log('Read: ' + client.bytesRead);
//[DEBUG] Concat audio buffer for save the final audio
/*if(audioTCP == null){
audioTCP = data
}else{
audioTCP = Buffer.concat([audioTCP, data])
}*/
// Split audio in smaller chunks if needed
const buf_length = 1024;
var buf_start = 0;
var buf_end = buf_length;
while (true) {
//Create GA request
const audio = new embedded_assistant_pb.AssistRequest();
audio.setAudioIn(data.slice(buf_start, buf_end));
//Send audio to GA
conversation.write(audio);
buf_start = buf_end;
buf_end += buf_length;
if (buf_start >= data.length) {
break;
}
}
});
client.on('error', (error) => {
console.log(error);
});
client.on('end', function () {
console.log('Client disconnect.');
//[DEBUG] Save final audio file
/*var outputFileStream = new FileWriter('out.wav', {
sampleRate: 16000,
channels: 1
});
var bufferStream = new stream.PassThrough();
bufferStream.end(audioTCP);
bufferStream.pipe(outputFileStream)*/
conversation.end();
server.getConnections(function (err, count) {
if (!err) {
console.log('There are ' + count +' connections.');
} else {
console.error(JSON.stringify(err));
}
});
});
client.on('timeout', function () {
console.log('Client request time out. ');
})
/* Server end */
});
// TCP server listening
server.listen(PORT, HOST, function () {
console.log('TCP server listen on address : ' + HOST + ':' + PORT);
server.on('close', function () {
console.log('TCP server socket is closed.');
});
server.on('error', function (error) {
console.error(JSON.stringify(error));
});
});