diff --git a/example/app/alloy.js b/example/app/alloy.js
index a2c3d34..e69de29 100644
--- a/example/app/alloy.js
+++ b/example/app/alloy.js
@@ -1,17 +0,0 @@
-// The contents of this file will be executed before any of
-// your view controllers are ever executed, including the index.
-// You have access to all functionality on the `Alloy` namespace.
-//
-// This is a great place to do any initialization for your app
-// or create any global variables/functions that you'd like to
-// make available throughout your app. You can easily make things
-// accessible globally by attaching them to the `Alloy.Globals`
-// object. For example:
-//
-// Alloy.Globals.someGlobalFunction = function(){};
-
-(function(){
-
-
-})();
-
diff --git a/example/app/controllers/index.js b/example/app/controllers/index.js
index fc58cc7..fa752a9 100644
--- a/example/app/controllers/index.js
+++ b/example/app/controllers/index.js
@@ -1,3 +1,5 @@
+Ti.Media.audioSessionCategory = Titanium.Media.AUDIO_SESSION_CATEGORY_PLAY_AND_RECORD;
+
var TiSpeech = require('ti.speech');
TiSpeech.initialize();
@@ -5,24 +7,17 @@ var canRecordAudio = false;
var canUseSpeechRecognition = false;
var isRunning = false;
-if (!TiSpeech.isAvailable()) {
- alert('Speech recognition is not available on this device!');
-} else {
- TiSpeech.requestSpeechRecognizerAuthorization(function(e) {
- canUseSpeechRecognition = !!e.success;
- if (!e.success) {
- alert("Speech recognition was not authorized!");
- } else {
- TiSpeech.requestMicrophoneAuthorization(function(e) {
- canRecordAudio = !!e.success;
- if (!e.success) {
- alert("Permission to record audio was not authorized!");
- }
- enableButtons();
- });
- }
- });
-}
+
+Ti.Media.requestAudioRecorderPermissions(function(e){
+ if (e.success) {
+ if (!TiSpeech.isAvailable()) {
+ alert('Speech recognition is not available on this device!');
+ } else {
+ enableButtons();
+ }
+ }
+});
+
/**
* @function enableButtons
@@ -30,9 +25,9 @@ if (!TiSpeech.isAvailable()) {
* @since 1.0.0
*/
function enableButtons() {
- canUseSpeechRecognition && canRecordAudio && $.toggleLiveRecognitionButton.setEnabled(true);
- canUseSpeechRecognition && $.toggleAudioRecognitionButton.setEnabled(true);
- canUseSpeechRecognition && $.toggleVideoRecognitionButton.setEnabled(true);
+ $.toggleLiveRecognitionButton.enabled = true;
+ $.toggleAudioRecognitionButton.enabled = true;
+ $.toggleVideoRecognitionButton.enabled = true;
}
/**
@@ -52,9 +47,9 @@ function stopRecognition() {
$.toggleAudioRecognitionButton.title = 'Start Listening to Audio File';
$.toggleVideoRecognitionButton.title = 'Start Listening to Video File';
- $.toggleLiveRecognitionButton.setEnabled(true);
- $.toggleAudioRecognitionButton.setEnabled(true);
- $.toggleVideoRecognitionButton.setEnabled(true);
+ $.toggleLiveRecognitionButton.enabled = true;
+ $.toggleAudioRecognitionButton.enabled = true;
+ $.toggleVideoRecognitionButton.enabled = true;
}
/**
@@ -78,7 +73,7 @@ function progressCallback(result) {
stopRecognition();
return;
} else {
- $.results.setText(result.value);
+ $.results.text = result.value;
}
if (result.finished) {
isRunning = false;
@@ -92,7 +87,7 @@ function toggleLiveRecognition(e) {
if (isRunning) {
stopRecognition();
} else {
- $.results.setText('Listening...');
+ $.results.text = 'Listening...';
var success = TiSpeech.startRecognition({
progress: progressCallback,
@@ -112,7 +107,7 @@ function toggleAudioRecognition(e) {
if (isRunning) {
stopRecognition();
} else {
- $.results.setText('Loading Audio File...');
+ $.results.text = 'Loading Audio File...';
var success = TiSpeech.startRecognition({
type: TiSpeech.SOURCE_TYPE_URL,
url: 'one_more_thing.mp3',
@@ -132,7 +127,7 @@ function toggleVideoRecognition(e) {
if (isRunning) {
stopRecognition();
} else {
- $.results.setText('Loading Video File...');
+ $.results.text = 'Loading Video File...';
var success = TiSpeech.startRecognition({
type: TiSpeech.SOURCE_TYPE_URL,
diff --git a/example/app/lib/ti.speech.js b/example/app/lib/ti.speech.js
index 1f7bc19..d2d0d18 100644
--- a/example/app/lib/ti.speech.js
+++ b/example/app/lib/ti.speech.js
@@ -9,21 +9,11 @@
* @version 1.0.0
* @since 1.0.0
*/
-
-var AVAudioEngine = require('AVFoundation/AVAudioEngine');
-var AVAudioSession = require('AVFoundation/AVAudioSession');
-var AVFoundation = require('AVFoundation');
-var NSBundle = require('Foundation/NSBundle');
-var NSError = require('Foundation/NSError');
-var NSLocale = require('Foundation/NSLocale');
-var NSURL = require('Foundation/NSURL');
-var SFSpeechAudioBufferRecognitionRequest = require('Speech/SFSpeechAudioBufferRecognitionRequest');
-var SFSpeechRecognitionRequest = require('Speech/SFSpeechRecognitionRequest');
-var SFSpeechRecognitionResult = require('Speech/SFSpeechRecognitionResult');
-var SFSpeechRecognitionTask = require('Speech/SFSpeechRecognitionTask');
-var SFSpeechRecognizer = require('Speech/SFSpeechRecognizer');
-var SFSpeechURLRecognitionRequest = require('Speech/SFSpeechURLRecognitionRequest');
-var Speech = require('Speech');
+import { AVAudioEngine } from 'AVFAudio';
+import { AVAudioSession, AVFoundation } from 'AVFoundation';
+import { NSBundle, NSError, NSLocale, NSURL } from 'Foundation';
+import { SFSpeechAudioBufferRecognitionRequest, SFSpeechRecognitionRequest,SFSpeechRecognitionResult,
+SFSpeechRecognitionTask, SFSpeechRecognizer, SFSpeechURLRecognitionRequest, Speech } from 'Speech';
var audioEngine;
var request;
@@ -64,7 +54,7 @@ exports.initialize = function(locale) {
/**
* @function requestSpeechRecognizerAuthorization
* @summary Asks the user to grant your app permission to perform speech recognition.
- * @param {permissionCallback} callback - A function that is called when the authorization request has been approved or denied.
+ * @param {permissionCallback} callback - A function that is called when the authorization request has been approved or denied.
* @since 1.0.0
*/
exports.requestSpeechRecognizerAuthorization = function(callback) {
@@ -114,7 +104,7 @@ exports.requestSpeechRecognizerAuthorization = function(callback) {
/**
* @function requestMicrophoneAuthorization
* @summary Asks the user to grant your app permission to record audio using microphone.
- * @param {permissionCallback} callback - A function that is called when the authorization request has been approved or denied.
+ * @param {permissionCallback} callback - A function that is called when the authorization request has been approved or denied.
* @since 1.0.0
*/
exports.requestMicrophoneAuthorization = function(callback) {
@@ -161,7 +151,7 @@ exports.requestMicrophoneAuthorization = function(callback) {
/**
* Indicates whether the speech recognizer is available.
- * Even though a speech recognizer is supported for a specific locale,
+ * Even though a speech recognizer is supported for a specific locale,
* it might be unavailable for reasons such as a nonfunctioning Internet connection.
* @function isAvailable
* @summary Indicates whether the speech recognizer is available.
@@ -196,13 +186,13 @@ exports.isAvailable = function() {
exports.startRecognition = function(args) {
var progressCallback = args.progress || null;
var type = args.type;
-
+
if (!type && args.url) {
type = SOURCE_TYPE_URL;
} else if (!type) {
type = SOURCE_TYPE_MICROPHONE;
}
-
+
if (!progressCallback) {
Ti.API.error('No "progress" callback supplied - You will not be notified about transcription updates');
}
diff --git a/example/app/views/index.xml b/example/app/views/index.xml
index dd4a815..5b9cc5e 100644
--- a/example/app/views/index.xml
+++ b/example/app/views/index.xml
@@ -8,10 +8,9 @@
-
+
-
\ No newline at end of file
+
diff --git a/example/plugins/ti.alloy/hooks/alloy.js b/example/plugins/ti.alloy/hooks/alloy.js
index 04a2d33..71f7df8 100644
--- a/example/plugins/ti.alloy/hooks/alloy.js
+++ b/example/plugins/ti.alloy/hooks/alloy.js
@@ -1,11 +1,11 @@
/**
* Alloy
- * Copyright (c) 2012 by Appcelerator, Inc. All Rights Reserved.
+ * Copyright TiDev, Inc. 04/07/2022-Present
* See LICENSE for more information on licensing.
*/
exports.cliVersion = '>=3.X';
-exports.version = '1.0.0';
+exports.version = '1.0.1';
var SILENT = true;
exports.init = function (logger, config, cli, appc) {
@@ -20,9 +20,9 @@ exports.init = function (logger, config, cli, appc) {
spawn = require('child_process').spawn,
parallel = appc.async.parallel;
- if(!process.env.sdk) {
- process.env.sdk = cli.sdk.name;
- }
+ if (!process.env.sdk) {
+ process.env.sdk = cli.sdk.name;
+ }
function run(deviceFamily, deployType, target, finished, silent) {
var appDir = path.join(cli.argv['project-dir'], 'app');
@@ -50,12 +50,16 @@ exports.init = function (logger, config, cli, appc) {
deploytype: deployType || cli.argv['deploy-type'] || 'development',
target: target
};
- if(silent) {
+ if (silent) {
// turn off all logging output for code analyzer build hook
config.noBanner = 'true';
config.logLevel = '-1';
}
+ if (cli.argv.theme) {
+ config.theme = cli.argv.theme;
+ }
+
config = Object.keys(config).map(function (c) {
return c + '=' + config[c];
}).join(',');
@@ -119,6 +123,12 @@ exports.init = function (logger, config, cli, appc) {
};
}), function () {
+ if (!paths.alloy) {
+ logger.error('The alloy CLI is not installed');
+ logger.error('Please install it with [sudo] npm i alloy -g');
+ process.exit(1);
+ }
+
// compose alloy command execution
var cmd = [paths.node, paths.alloy, 'compile', appDir, '--config', config];
if (cli.argv['no-colors'] || cli.argv['color'] === false) { cmd.push('--no-colors'); }
@@ -145,17 +155,16 @@ exports.init = function (logger, config, cli, appc) {
if (process.platform === 'win32' && paths.alloy === 'alloy.cmd') {
cmd.shift();
logger.info(__('Executing Alloy compile: %s',
- ['cmd','/s','/c'].concat(cmd).join(' ').cyan));
+ ['cmd', '/s', '/c'].concat(cmd).join(' ').cyan));
// arg processing from https://github.com/MarcDiethelm/superspawn
child = spawn('cmd', [['/s', '/c', '"' +
cmd.map(function(a) {
- if (/^[^"].* .*[^"]/.test(a)) return '"'+a+'"'; return a;
- }).join(" ") + '"'].join(" ")], {
- stdio: 'inherit',
- windowsVerbatimArguments: true
- }
- );
+ if (/^[^"].* .*[^"]/.test(a)) return '"' + a + '"'; return a;
+ }).join(' ') + '"'].join(' ')], {
+ stdio: 'inherit',
+ windowsVerbatimArguments: true
+ });
} else {
logger.info(__('Executing Alloy compile: %s', cmd.join(' ').cyan));
child = spawn(cmd.shift(), cmd);
@@ -175,8 +184,8 @@ exports.init = function (logger, config, cli, appc) {
} else {
logger.info(__('Alloy compiler completed successfully'));
- afs.exists(path.join(cli.argv["project-dir"], 'build', 'i18n')) && process.argv.push('--i18n-dir', 'build');
- afs.exists(path.join(cli.argv["project-dir"], 'build', 'platform')) && (cli.argv['platform-dir'] = 'build/platform');
+ afs.exists(path.join(cli.argv['project-dir'], 'build', 'i18n')) && process.argv.push('--i18n-dir', 'build');
+ afs.exists(path.join(cli.argv['project-dir'], 'build', 'platform')) && (cli.argv['platform-dir'] = 'build/platform');
}
finished();
});
@@ -191,8 +200,4 @@ exports.init = function (logger, config, cli, appc) {
run(build.deviceFamily, deployType, target, finished);
});
-
- cli.addHook('codeprocessor.pre.run', function (build, finished) {
- run('none', 'development', undefined, finished, SILENT);
- });
};
diff --git a/example/plugins/ti.alloy/hooks/deepclean.js b/example/plugins/ti.alloy/hooks/deepclean.js
index ed742a5..f6bfc2d 100644
--- a/example/plugins/ti.alloy/hooks/deepclean.js
+++ b/example/plugins/ti.alloy/hooks/deepclean.js
@@ -1,6 +1,6 @@
/**
* Alloy
- * Copyright (c) 2014 by Appcelerator, Inc. All Rights Reserved.
+ * Copyright TiDev, Inc. 04/07/2022-Present
* See LICENSE for more information on licensing.
*/
@@ -13,7 +13,7 @@ exports.init = function (logger, config, cli, appc) {
afs = appc.fs;
function run(finished) {
- if(cli.argv['shallow'] === '') {
+ if (cli.argv['shallow'] === '') {
logger.info('Not cleaning the Resources directory');
finished();
return;
@@ -46,8 +46,7 @@ function rmdir(dirPath, fs, path, logger, removeSelf) {
var files;
try {
files = fs.readdirSync(dirPath);
- }
- catch(e) {
+ } catch (e) {
return;
}
if (files.length > 0) {
diff --git a/example/tiapp.xml b/example/tiapp.xml
index cc331c2..c730c57 100644
--- a/example/tiapp.xml
+++ b/example/tiapp.xml
@@ -50,9 +50,7 @@
- hyperloop
- ti.cloud
- com.appcelerator.apm
+ hyperloop
false
@@ -60,9 +58,8 @@
true
false
- 6.0.3.GA
+ 12.0.0.GA
ti.alloy
- hyperloop
-
\ No newline at end of file
+
diff --git a/ti.speech.js b/ti.speech.js
deleted file mode 120000
index e182a2d..0000000
--- a/ti.speech.js
+++ /dev/null
@@ -1 +0,0 @@
-./example/app/lib/ti.speech.js
\ No newline at end of file
diff --git a/ti.speech.js b/ti.speech.js
new file mode 100755
index 0000000..d2d0d18
--- /dev/null
+++ b/ti.speech.js
@@ -0,0 +1,350 @@
+'use strict';
+/***
+ * @file Use Speech Recognition functionality in iOS 10
+ * @module ti.speech
+ * @author Hans Knöchel
+ * @author Brenton House
+ * @requires Hyperloop
+ * @requires Speech
+ * @version 1.0.0
+ * @since 1.0.0
+ */
+import { AVAudioEngine } from 'AVFAudio';
+import { AVAudioSession, AVFoundation } from 'AVFoundation';
+import { NSBundle, NSError, NSLocale, NSURL } from 'Foundation';
+import { SFSpeechAudioBufferRecognitionRequest, SFSpeechRecognitionRequest,SFSpeechRecognitionResult,
+SFSpeechRecognitionTask, SFSpeechRecognizer, SFSpeechURLRecognitionRequest, Speech } from 'Speech';
+
+var audioEngine;
+var request;
+var recognitionTask;
+var speechRecognizer;
+var SOURCE_TYPE_URL = 'url';
+var SOURCE_TYPE_MICROPHONE = 'microphone';
+
+/**
+ * @function initialize
+ * @summary Creates a speech recognizer for the specified locale, if supported.
+ * @param {string} locale - Locale to use for initializing speech recognizer
+ * @since 1.0.0
+ */
+exports.initialize = function(locale) {
+ if (speechRecognizer) {
+ speechRecognizer = null;
+ // Can't delete local variable in strict mode
+ // delete speechRecognizer;
+ }
+
+ if (locale) {
+ speechRecognizer = SFSpeechRecognizer.alloc().initWithLocale(NSLocale.alloc().initWithLocaleIdentifier(locale));
+ } else {
+ speechRecognizer = new SFSpeechRecognizer();
+ }
+};
+
+/**
+ * Callback used for reporting success of requesting permissions for features
+ * @callback permissionCallback
+ * @param {object} param - Object that contains info about the success of the request
+ * @param {string} param.message - Friendly message regarding the success or failure of request
+ * @param {number} param.status - Status of the permission request as returned from the OS
+ * @param {boolean} param.success - Value is true, if request was successful, otherwise false
+ */
+
+/**
+ * @function requestSpeechRecognizerAuthorization
+ * @summary Asks the user to grant your app permission to perform speech recognition.
+ * @param {permissionCallback} callback - A function that is called when the authorization request has been approved or denied.
+ * @since 1.0.0
+ */
+exports.requestSpeechRecognizerAuthorization = function(callback) {
+ SFSpeechRecognizer.requestAuthorization(function(status) {
+ var success = false;
+ var message = '';
+
+ switch (status) {
+ case Speech.SFSpeechRecognizerAuthorizationStatusAuthorized:
+ // User gave access to speech recognition
+ message = 'User gave access to speech recognition';
+ success = true;
+ break;
+
+ case Speech.SFSpeechRecognizerAuthorizationStatusDenied:
+ // User denied access to speech recognition
+ message = 'User denied access to speech recognition';
+ break;
+
+ case Speech.SFSpeechRecognizerAuthorizationStatusRestricted:
+ // Speech recognition restricted on this device
+ message = 'Speech recognition restricted on this device';
+ break;
+
+ case Speech.SFSpeechRecognizerAuthorizationStatusNotDetermined:
+ // Speech recognition not yet authorized
+ message = 'Speech recognition not yet authorized';
+ break;
+
+ default:
+ // Should not be here. Issue should be resolved in Hyperloop 2.0.2.
+ message = 'Something has gone wrong requesting Speech Recogniction authorization';
+ break;
+ }
+
+ // TODO: Temporarily setting success to true until Hyperloop 2.0.2, https://jira.appcelerator.org/browse/TIMOB-23902
+ success = true;
+
+ callback({
+ success: success,
+ message: message,
+ status: status,
+ });
+ });
+};
+
+/**
+ * @function requestMicrophoneAuthorization
+ * @summary Asks the user to grant your app permission to record audio using microphone.
+ * @param {permissionCallback} callback - A function that is called when the authorization request has been approved or denied.
+ * @since 1.0.0
+ */
+exports.requestMicrophoneAuthorization = function(callback) {
+ var audioSession = new AVAudioSession();
+
+ audioSession.requestRecordPermission(function(status) {
+ var success = false;
+ var message = '';
+
+ switch (status) {
+ case AVFoundation.AVAudioSessionRecordPermissionGranted:
+ // Recording permission has been granted.
+ message = 'Recording permission has been granted.';
+ success = true;
+ break;
+
+ case AVFoundation.AVAudioSessionRecordPermissionDenied:
+ // Recording permission has been denied.
+ message = 'Recording permission has been denied.';
+ break;
+
+ case AVFoundation.SFSpeechRecognizerAuthorizationStatusRestricted:
+ // Recording permission has not been granted or denied. This typically means that permission has yet to be requested, or is in the process of being requested.
+ message = 'Recording permission has not been granted or denied. This typically means that permission has yet to be requested, or is in the process of being requested.';
+ break;
+
+ default:
+ // Should not be here. Issue should be resolved in Hyperloop 2.0.2.
+ message = 'Something has gone wrong while requesting authorization to record';
+ break;
+ }
+
+ // TODO: Temporarily setting success to true until Hyperloop 2.0.2, https://jira.appcelerator.org/browse/TIMOB-23902
+ success = true;
+
+ callback({
+ success: success,
+ message: message,
+ status: status,
+ });
+ });
+};
+
+
+/**
+ * Indicates whether the speech recognizer is available.
+ * Even though a speech recognizer is supported for a specific locale,
+ * it might be unavailable for reasons such as a nonfunctioning Internet connection.
+ * @function isAvailable
+ * @summary Indicates whether the speech recognizer is available.
+ * @since 1.0.0
+ * @returns {boolean} - A Boolean value that indicates whether the speech recognizer is available.
+ */
+exports.isAvailable = function() {
+ return speechRecognizer && speechRecognizer.isAvailable();
+};
+
+/**
+ * This callback is used to report progress on speech Recognition
+ * @callback progressCallback
+ * @param {object} param - Object that contains info about the state of the speech recognition
+ * @param {string} param.value - Text transcription of speech recognition
+ * @param {object} param.error - Contains any error returned from the speech recognition engine
+ * @param {number} param.state - Represents the state of the speech recognition engine
+ * @param {boolean} param.finished - Value is true, if recognition is finished, otherwise false
+ */
+
+
+/**
+ * @function startRecognition
+ * @summary Starts the speech recogniztion engine and begins processing
+ * @param {object} args - Parameters used to start speech recognition
+ * @param {string} [args.type=SOURCE_TYPE_MICROPHONE] - Indicates source for speech recognition (microphone or url)
+ * @param {string} [args.url] - Url for audio file to apply speech recognition to.
+ * @param {progressCallback} args.progress - Callback function used to report progress of speech recognition
+ * @since 1.0.0
+ * @returns {boolean} - Returns true if started successfully, otherwise false.
+ */
+exports.startRecognition = function(args) {
+ var progressCallback = args.progress || null;
+ var type = args.type;
+
+ if (!type && args.url) {
+ type = SOURCE_TYPE_URL;
+ } else if (!type) {
+ type = SOURCE_TYPE_MICROPHONE;
+ }
+
+ if (!progressCallback) {
+ Ti.API.error('No "progress" callback supplied - You will not be notified about transcription updates');
+ }
+
+ if (recognitionTask) {
+ recognitionTask.cancel();
+ recognitionTask = null;
+ // Can't delete local variable in strict mode
+ // delete recognitionTask;
+ }
+
+ if (request) {
+ request = null;
+ }
+
+ if (type == SOURCE_TYPE_URL) {
+ var url = args.url.split('.');
+ var ext = url.pop();
+ var soundPath = NSBundle.mainBundle.pathForResourceOfType(url.join('.'), ext);
+ var soundURL = NSURL.fileURLWithPath(soundPath);
+
+ request = SFSpeechURLRecognitionRequest.alloc().initWithURL(soundURL);
+ if (!request) {
+ console.error('Unable to created a SFSpeechURLRecognitionRequest object');
+ return false;
+ }
+
+ request.shouldReportPartialResults = true;
+
+ if (!speechRecognizer) {
+ exports.initialize();
+ }
+
+ recognitionTask = speechRecognizer.recognitionTaskWithRequestResultHandler(request, function(result, error) {
+
+ if (!recognitionTask) {
+ // The recognitionTask has already been cancelled.
+ return;
+ }
+
+ if (recognitionTask.state === Speech.SFSpeechRecognitionTaskStateCanceling) {
+ // The recognitionTask is being cancelled so no progress should be reported after this.
+ console.info('The speech recognition task has been cancelled.');
+ progressCallback &&
+ progressCallback({
+ error: error,
+ value: result && result.bestTranscription.formattedString,
+ state: recognitionTask.state,
+ finished: true,
+ });
+
+ progressCallback = null;
+ request = null;
+ recognitionTask = null;
+ return;
+ }
+
+ progressCallback &&
+ progressCallback({
+ error: error,
+ value: result && result.bestTranscription.formattedString,
+ state: recognitionTask.state,
+ finished: result && result.isFinal(),
+ });
+
+ if (error || (result && result.isFinal())) {
+ recognitionTask = null;
+ request = null;
+ return;
+ }
+ });
+
+ return true;
+ } else if (type == SOURCE_TYPE_MICROPHONE) {
+
+ if (!audioEngine) {
+ audioEngine = new AVAudioEngine();
+ }
+
+ if (!audioEngine.inputNode) {
+ console.error('Audio engine has no input node');
+ return false;
+ }
+
+ request = new SFSpeechAudioBufferRecognitionRequest();
+ request.shouldReportPartialResults = true;
+
+ // Create recognition task that will listen to live speech and send progress to callback
+ recognitionTask = speechRecognizer.recognitionTaskWithRequestResultHandler(request, function(result, error) {
+
+ progressCallback({
+ error: error,
+ value: result && result.bestTranscription.formattedString,
+ state: recognitionTask.state,
+ finished: result && result.isFinal(),
+ });
+
+ if (error || (result && result.isFinal())) {
+ if (audioEngine.isRunning()) {
+ audioEngine.stop();
+ }
+ if (request) {
+ request.endAudio();
+ }
+ audioEngine.inputNode.removeTapOnBus(0);
+ recognitionTask = null;
+ request = null;
+
+ return;
+ }
+ });
+
+ audioEngine.inputNode.installTapOnBusBufferSizeFormatBlock(0, 1024, audioEngine.inputNode.outputFormatForBus(0), function(buffer, when) {
+ request && request.appendAudioPCMBuffer(buffer);
+ });
+
+ audioEngine.prepare();
+ var audioEngineStartError = new NSError();
+ var audioEngineStartSuccess = audioEngine.startAndReturnError(audioEngineStartError);
+ if (!audioEngineStartSuccess) {
+ //TODO: Do something with audioEngineStartError
+ return false;
+ }
+
+ return true;
+ } else {
+ console.error('Unhandled type supplied:' + type);
+ return false;
+ }
+};
+
+/**
+ * @function stopRecognition
+ * @summary Forces speech recognition components to stop processing
+ * @since 1.0.0
+ */
+exports.stopRecognition = function() {
+ if (audioEngine && audioEngine.isRunning()) {
+ // if we are using the audioEngine for real-time audio, we need to stop components
+ audioEngine.stop();
+ request && request.endAudio();
+ audioEngine.inputNode.removeTapOnBus(0);
+ } else if (recognitionTask) {
+ // If are using a file for audio recoginition, we need to cancel the recognition task
+ recognitionTask.cancel();
+ }
+};
+
+exports.SOURCE_TYPE_URL = SOURCE_TYPE_URL;
+exports.SOURCE_TYPE_MICROPHONE = SOURCE_TYPE_MICROPHONE;
+exports.RECOGNITION_STATE_STARTING = Speech.SFSpeechRecognitionTaskStateStarting;
+exports.RECOGNITION_STATE_RUNNING = Speech.SFSpeechRecognitionTaskStateRunning;
+exports.RECOGNITION_STATE_FINISHING = Speech.SFSpeechRecognitionTaskStateFinishing;
+exports.RECOGNITION_STATE_COMPLETED = Speech.SFSpeechRecognitionTaskStateCompleted;
+exports.RECOGNITION_STATE_CANCELING = Speech.SFSpeechRecognitionTaskStateCanceling;