diff --git a/.github/workflows/cloud-language.yaml b/.github/workflows/cloud-language.yaml new file mode 100644 index 00000000000..685190dbd10 --- /dev/null +++ b/.github/workflows/cloud-language.yaml @@ -0,0 +1,67 @@ +name: cloud-language +on: + push: + branches: + - main + paths: + - 'cloud-language/**' + pull_request: + paths: + - 'cloud-language/**' + pull_request_target: + types: [labeled] + schedule: + - cron: '0 0 * * 0' +jobs: + test: + if: ${{ github.event.action != 'labeled' || github.event.label.name == 'actions:force-run' }} + runs-on: ubuntu-latest + timeout-minutes: 60 + permissions: + contents: 'write' + pull-requests: 'write' + id-token: 'write' + steps: + - uses: actions/checkout@v3.1.0 + with: + ref: ${{github.event.pull_request.head.ref}} + repository: ${{github.event.pull_request.head.repo.full_name}} + - uses: 'google-github-actions/auth@v0.8.3' + with: + workload_identity_provider: 'projects/1046198160504/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider' + service_account: 'kokoro-system-test@long-door-651.iam.gserviceaccount.com' + create_credentials_file: 'true' + access_token_lifetime: 600s + - uses: actions/setup-node@v3.5.1 + with: + node-version: 16 + - run: npm install + working-directory: cloud-language + - run: npm test + working-directory: cloud-language + env: + MOCHA_REPORTER_SUITENAME: cloud_language + MOCHA_REPORTER_OUTPUT: cloud_language_sponge_log.xml + MOCHA_REPORTER: xunit + - if: ${{ github.event.action == 'labeled' && github.event.label.name == 'actions:force-run' }} + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + try { + await github.rest.issues.removeLabel({ + name: 'actions:force-run', + owner: 'GoogleCloudPlatform', + repo: 'nodejs-docs-samples', + issue_number: context.payload.pull_request.number + }); + } catch (e) { + if (!e.message.includes('Label does not exist')) { + throw e; + } + } + - if: ${{ github.event_name == 'schedule'}} + run: | + curl https://github.com/googleapis/repo-automation-bots/releases/download/flakybot-1.1.0/flakybot -o flakybot -s -L + chmod +x ./flakybot + ./flakybot --repo GoogleCloudPlatform/nodejs-docs-samples --commit_hash ${{github.sha}} --build_url https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} diff --git a/.github/workflows/workflows.json b/.github/workflows/workflows.json index 9fa4b05ddce..58eccff15a9 100644 --- a/.github/workflows/workflows.json +++ b/.github/workflows/workflows.json @@ -17,6 +17,7 @@ "appengine/typescript", "appengine/websockets", "appengine/twilio", + "cloud-language", "cloud-tasks/tutorial-gcf/app", "cloud-tasks/tutorial-gcf/function", "composer", diff --git a/cloud-language/analyze.v1.js b/cloud-language/analyze.v1.js new file mode 100644 index 00000000000..8c734e53d1f --- /dev/null +++ b/cloud-language/analyze.v1.js @@ -0,0 +1,459 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// sample-metadata: +// title: Analyze v1 +async function analyzeSentimentOfText(text) { + // [START language_sentiment_text] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following line to run this code. + */ + // const text = 'Your text to analyze, e.g. Hello, world!'; + + // Prepares a document, representing the provided text + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + // Detects the sentiment of the document + const [result] = await client.analyzeSentiment({document}); + + const sentiment = result.documentSentiment; + console.log('Document sentiment:'); + console.log(` Score: ${sentiment.score}`); + console.log(` Magnitude: ${sentiment.magnitude}`); + + const sentences = result.sentences; + sentences.forEach(sentence => { + console.log(`Sentence: ${sentence.text.content}`); + console.log(` Score: ${sentence.sentiment.score}`); + console.log(` Magnitude: ${sentence.sentiment.magnitude}`); + }); + + // [END language_sentiment_text] +} + +async function analyzeSentimentInFile(bucketName, fileName) { + // [START language_sentiment_gcs] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following lines to run this code + */ + // const bucketName = 'Your bucket name, e.g. my-bucket'; + // const fileName = 'Your file name, e.g. my-file.txt'; + + // Prepares a document, representing a text file in Cloud Storage + const document = { + gcsContentUri: `gs://${bucketName}/${fileName}`, + type: 'PLAIN_TEXT', + }; + + // Detects the sentiment of the document + const [result] = await client.analyzeSentiment({document}); + + const sentiment = result.documentSentiment; + console.log('Document sentiment:'); + console.log(` Score: ${sentiment.score}`); + console.log(` Magnitude: ${sentiment.magnitude}`); + + const sentences = result.sentences; + sentences.forEach(sentence => { + console.log(`Sentence: ${sentence.text.content}`); + console.log(` Score: ${sentence.sentiment.score}`); + console.log(` Magnitude: ${sentence.sentiment.magnitude}`); + }); + // [END language_sentiment_gcs] +} + +async function analyzeEntitiesOfText(text) { + // [START language_entities_text] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following line to run this code. + */ + // const text = 'Your text to analyze, e.g. Hello, world!'; + + // Prepares a document, representing the provided text + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + // Detects entities in the document + const [result] = await client.analyzeEntities({document}); + + const entities = result.entities; + + console.log('Entities:'); + entities.forEach(entity => { + console.log(entity.name); + console.log(` - Type: ${entity.type}, Salience: ${entity.salience}`); + if (entity.metadata && entity.metadata.wikipedia_url) { + console.log(` - Wikipedia URL: ${entity.metadata.wikipedia_url}`); + } + }); + // [END language_entities_text] +} + +async function analyzeEntitiesInFile(bucketName, fileName) { + // [START language_entities_gcs] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following lines to run this code + */ + // const bucketName = 'Your bucket name, e.g. my-bucket'; + // const fileName = 'Your file name, e.g. my-file.txt'; + + // Prepares a document, representing a text file in Cloud Storage + const document = { + gcsContentUri: `gs://${bucketName}/${fileName}`, + type: 'PLAIN_TEXT', + }; + + // Detects entities in the document + const [result] = await client.analyzeEntities({document}); + const entities = result.entities; + + console.log('Entities:'); + entities.forEach(entity => { + console.log(entity.name); + console.log(` - Type: ${entity.type}, Salience: ${entity.salience}`); + if (entity.metadata && entity.metadata.wikipedia_url) { + console.log(` - Wikipedia URL: ${entity.metadata.wikipedia_url}`); + } + }); + + // [END language_entities_gcs] +} + +async function analyzeSyntaxOfText(text) { + // [START language_syntax_text] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following line to run this code. + */ + // const text = 'Your text to analyze, e.g. Hello, world!'; + + // Prepares a document, representing the provided text + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + // Need to specify an encodingType to receive word offsets + const encodingType = 'UTF8'; + + // Detects the sentiment of the document + const [syntax] = await client.analyzeSyntax({document, encodingType}); + + console.log('Tokens:'); + syntax.tokens.forEach(part => { + console.log(`${part.partOfSpeech.tag}: ${part.text.content}`); + console.log('Morphology:', part.partOfSpeech); + }); + // [END language_syntax_text] +} + +async function analyzeSyntaxInFile(bucketName, fileName) { + // [START language_syntax_gcs] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following lines to run this code + */ + // const bucketName = 'Your bucket name, e.g. my-bucket'; + // const fileName = 'Your file name, e.g. my-file.txt'; + + // Prepares a document, representing a text file in Cloud Storage + const document = { + gcsContentUri: `gs://${bucketName}/${fileName}`, + type: 'PLAIN_TEXT', + }; + + // Need to specify an encodingType to receive word offsets + const encodingType = 'UTF8'; + + // Detects the sentiment of the document + const [syntax] = await client.analyzeSyntax({document, encodingType}); + + console.log('Parts of speech:'); + syntax.tokens.forEach(part => { + console.log(`${part.partOfSpeech.tag}: ${part.text.content}`); + console.log('Morphology:', part.partOfSpeech); + }); + // [END language_syntax_gcs] +} + +async function analyzeEntitySentimentOfText(text) { + // [START language_entity_sentiment_text] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following line to run this code. + */ + // const text = 'Your text to analyze, e.g. Hello, world!'; + + // Prepares a document, representing the provided text + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + // Detects sentiment of entities in the document + const [result] = await client.analyzeEntitySentiment({document}); + const entities = result.entities; + + console.log('Entities and sentiments:'); + entities.forEach(entity => { + console.log(` Name: ${entity.name}`); + console.log(` Type: ${entity.type}`); + console.log(` Score: ${entity.sentiment.score}`); + console.log(` Magnitude: ${entity.sentiment.magnitude}`); + }); + // [END language_entity_sentiment_text] +} + +async function analyzeEntitySentimentInFile(bucketName, fileName) { + // [START language_entity_sentiment_gcs] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following lines to run this code + */ + // const bucketName = 'Your bucket name, e.g. my-bucket'; + // const fileName = 'Your file name, e.g. my-file.txt'; + + // Prepares a document, representing a text file in Cloud Storage + const document = { + gcsContentUri: `gs://${bucketName}/${fileName}`, + type: 'PLAIN_TEXT', + }; + + // Detects sentiment of entities in the document + const [result] = await client.analyzeEntitySentiment({document}); + const entities = result.entities; + + console.log('Entities and sentiments:'); + entities.forEach(entity => { + console.log(` Name: ${entity.name}`); + console.log(` Type: ${entity.type}`); + console.log(` Score: ${entity.sentiment.score}`); + console.log(` Magnitude: ${entity.sentiment.magnitude}`); + }); + // [END language_entity_sentiment_gcs] +} + +async function classifyTextOfText(text) { + // [START language_classify_text] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Creates a client + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following line to run this code. + */ + // const text = 'Your text to analyze, e.g. Hello, world!'; + + // Prepares a document, representing the provided text + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + const classificationModelOptions = { + v2Model: { + contentCategoriesVersion: 'V2', + }, + }; + + // Classifies text in the document + const [classification] = await client.classifyText({ + document, + classificationModelOptions, + }); + console.log('Categories:'); + classification.categories.forEach(category => { + console.log(`Name: ${category.name}, Confidence: ${category.confidence}`); + }); + // [END language_classify_text] +} + +async function classifyTextInFile(bucketName, fileName) { + // [START language_classify_gcs] + // Imports the Google Cloud client library. + const language = require('@google-cloud/language'); + + // Creates a client. + const client = new language.LanguageServiceClient(); + + /** + * TODO(developer): Uncomment the following lines to run this code + */ + // const bucketName = 'Your bucket name, e.g. my-bucket'; + // const fileName = 'Your file name, e.g. my-file.txt'; + + // Prepares a document, representing a text file in Cloud Storage + const document = { + gcsContentUri: `gs://${bucketName}/${fileName}`, + type: 'PLAIN_TEXT', + }; + + // Classifies text in the document + const [classification] = await client.classifyText({document}); + + console.log('Categories:'); + classification.categories.forEach(category => { + console.log(`Name: ${category.name}, Confidence: ${category.confidence}`); + }); + // [END language_classify_gcs] +} + +require('yargs') + .demand(1) + .command( + 'sentiment-text ', + 'Detects sentiment of a string.', + {}, + opts => analyzeSentimentOfText(opts.text) + ) + .command( + 'sentiment-file ', + 'Detects sentiment in a file in Google Cloud Storage.', + {}, + opts => analyzeSentimentInFile(opts.bucketName, opts.fileName) + ) + .command('entities-text ', 'Detects entities in a string.', {}, opts => + analyzeEntitiesOfText(opts.text) + ) + .command( + 'entities-file ', + 'Detects entities in a file in Google Cloud Storage.', + {}, + opts => analyzeEntitiesInFile(opts.bucketName, opts.fileName) + ) + .command('syntax-text ', 'Detects syntax of a string.', {}, opts => + analyzeSyntaxOfText(opts.text) + ) + .command( + 'syntax-file ', + 'Detects syntax in a file in Google Cloud Storage.', + {}, + opts => analyzeSyntaxInFile(opts.bucketName, opts.fileName) + ) + .command( + 'entity-sentiment-text ', + 'Detects sentiment of the entities in a string.', + {}, + opts => analyzeEntitySentimentOfText(opts.text) + ) + .command( + 'entity-sentiment-file ', + 'Detects sentiment of the entities in a file in Google Cloud Storage.', + {}, + opts => analyzeEntitySentimentInFile(opts.bucketName, opts.fileName) + ) + .command('classify-text ', 'Classifies text of a string.', {}, opts => + classifyTextOfText(opts.text) + ) + .command( + 'classify-file ', + 'Classifies text in a file in Google Cloud Storage.', + {}, + opts => classifyTextInFile(opts.bucketName, opts.fileName) + ) + .example( + 'node $0 sentiment-text "President Obama is speaking at the White House."' + ) + .example( + 'node $0 sentiment-file my-bucket file.txt', + 'Detects sentiment in gs://my-bucket/file.txt' + ) + .example( + 'node $0 entities-text "President Obama is speaking at the White House."' + ) + .example( + 'node $0 entities-file my-bucket file.txt', + 'Detects entities in gs://my-bucket/file.txt' + ) + .example( + 'node $0 syntax-text "President Obama is speaking at the White House."' + ) + .example( + 'node $0 syntax-file my-bucket file.txt', + 'Detects syntax in gs://my-bucket/file.txt' + ) + .example( + 'node $0 entity-sentiment-text "President Obama is speaking at the White House."' + ) + .example( + 'node $0 entity-sentiment-file my-bucket file.txt', + 'Detects sentiment of entities in gs://my-bucket/file.txt' + ) + .example( + 'node $0 classify-text "Android is a mobile operating system developed by Google, based on the Linux kernel and designed primarily for touchscreen mobile devices such as smartphones and tablets."' + ) + .example( + 'node $0 classify-file my-bucket android_text.txt', + 'Detects syntax in gs://my-bucket/android_text.txt' + ) + .wrap(120) + .recommendCommands() + .epilogue( + 'For more information, see https://cloud.google.com/natural-language/docs' + ) + .help() + .strict().argv; diff --git a/cloud-language/package.json b/cloud-language/package.json new file mode 100644 index 00000000000..1db523db50c --- /dev/null +++ b/cloud-language/package.json @@ -0,0 +1,29 @@ +{ + "name": "nodejs-docs-samples-language", + "license": "Apache-2.0", + "author": "Google Inc.", + "engines": { + "node": ">=12.0.0" + }, + "repository": "googleapis/nodejs-language", + "private": true, + "files": [ + "*.js", + "resources" + ], + "scripts": { + "test": "mocha --timeout 60000" + }, + "dependencies": { + "@google-cloud/automl": "^3.0.0", + "mathjs": "^11.0.0", + "@google-cloud/language": "^5.1.0", + "@google-cloud/storage": "^6.0.0", + "yargs": "^16.0.0" + }, + "devDependencies": { + "chai": "^4.2.0", + "mocha": "^8.0.0", + "uuid": "^9.0.0" + } +} \ No newline at end of file diff --git a/cloud-language/quickstart.js b/cloud-language/quickstart.js new file mode 100644 index 00000000000..7eb7b5a4a29 --- /dev/null +++ b/cloud-language/quickstart.js @@ -0,0 +1,43 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// [START language_quickstart] +async function quickstart() { + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Instantiates a client + const client = new language.LanguageServiceClient(); + + // The text to analyze + const text = 'Hello, world!'; + + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + // Detects the sentiment of the text + const [result] = await client.analyzeSentiment({document: document}); + const sentiment = result.documentSentiment; + + console.log(`Text: ${text}`); + console.log(`Sentiment score: ${sentiment.score}`); + console.log(`Sentiment magnitude: ${sentiment.magnitude}`); +} +// [END language_quickstart] + +quickstart().catch(console.error); diff --git a/cloud-language/resources/android_text.txt b/cloud-language/resources/android_text.txt new file mode 100644 index 00000000000..5afe2895f31 --- /dev/null +++ b/cloud-language/resources/android_text.txt @@ -0,0 +1 @@ +Android is a mobile operating system developed by Google, based on the Linux kernel and designed primarily for touchscreen mobile devices such as smartphones and tablets. \ No newline at end of file diff --git a/cloud-language/resources/text.txt b/cloud-language/resources/text.txt new file mode 100644 index 00000000000..97a1cea02b7 --- /dev/null +++ b/cloud-language/resources/text.txt @@ -0,0 +1 @@ +President Obama is speaking at the White House. \ No newline at end of file diff --git a/cloud-language/setEndpoint.js b/cloud-language/setEndpoint.js new file mode 100644 index 00000000000..b1f5fa7439c --- /dev/null +++ b/cloud-language/setEndpoint.js @@ -0,0 +1,46 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function setEndpoint() { + // [START language_set_endpoint] + // Imports the Google Cloud client library + const language = require('@google-cloud/language'); + + // Specifies the location of the api endpoint + const clientOptions = {apiEndpoint: 'eu-language.googleapis.com'}; + + // Instantiates a client + const client = new language.LanguageServiceClient(clientOptions); + // [END language_set_endpoint] + + // The text to analyze + const text = 'Hello, world!'; + + const document = { + content: text, + type: 'PLAIN_TEXT', + }; + + // Detects the sentiment of the text + const [result] = await client.analyzeSentiment({document: document}); + const sentiment = result.documentSentiment; + + console.log(`Text: ${text}`); + console.log(`Sentiment score: ${sentiment.score}`); + console.log(`Sentiment magnitude: ${sentiment.magnitude}`); +} + +setEndpoint().catch(console.error); diff --git a/cloud-language/snippets/.eslintrc.yml b/cloud-language/snippets/.eslintrc.yml new file mode 100644 index 00000000000..0aa37ac630e --- /dev/null +++ b/cloud-language/snippets/.eslintrc.yml @@ -0,0 +1,4 @@ +--- +rules: + no-console: off + node/no-missing-require: off diff --git a/cloud-language/test/analyze.v1.test.js b/cloud-language/test/analyze.v1.test.js new file mode 100644 index 00000000000..4dc992f8ea2 --- /dev/null +++ b/cloud-language/test/analyze.v1.test.js @@ -0,0 +1,134 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const {Storage} = require('@google-cloud/storage'); +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); +const cp = require('child_process'); +const uuid = require('uuid'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +describe('analyze.v1', () => { + const storage = new Storage(); + const cmd = 'node analyze.v1.js'; + const bucketName = `nodejs-docs-samples-test-${uuid.v4()}`; + const fileName = 'text.txt'; + const fileName2 = 'android_text.txt'; + const localFilePath = path.join(__dirname, `../resources/${fileName}`); + const localFilePath2 = path.join(__dirname, `../resources/${fileName2}`); + const text = fs.readFileSync(localFilePath, 'utf-8'); + const text2 = fs.readFileSync(localFilePath2, 'utf-8'); + + before(async () => { + const [bucket] = await storage.createBucket(bucketName); + await bucket.upload(localFilePath); + await bucket.upload(localFilePath2); + }); + + after(async () => { + const bucket = storage.bucket(bucketName); + await bucket.deleteFiles({force: true}); + await bucket.deleteFiles({force: true}); // Try a second time... + await bucket.delete(); + }); + + it('should analyze sentiment in text', async () => { + const output = execSync(`${cmd} sentiment-text "${text}"`); + assert.match(output, /Document sentiment:/); + assert.match(output, new RegExp(`Sentence: ${text}`)); + assert.match(output, /Score: 0/); + assert.match(output, /Magnitude: 0/); + }); + + it('should analyze sentiment in a file', async () => { + const output = execSync(`${cmd} sentiment-file ${bucketName} ${fileName}`); + assert(output, /Document sentiment:/); + assert.match(output, new RegExp(`Sentence: ${text}`)); + assert.match(output, /Score: 0/); + assert.match(output, /Magnitude: 0/); + }); + + it('should analyze entities in text', async () => { + const output = execSync(`${cmd} entities-text "${text}"`); + assert.match(output, /Obama/); + assert.match(output, /Type: PERSON/); + assert.match(output, /White House/); + assert.match(output, /Type: LOCATION/); + }); + + it('should analyze entities in a file', async () => { + const output = execSync(`${cmd} entities-file ${bucketName} ${fileName}`); + assert.match(output, /Entities:/); + assert.match(output, /Obama/); + assert.match(output, /Type: PERSON/); + assert.match(output, /White House/); + assert.match(output, /Type: LOCATION/); + }); + + it('should analyze syntax in text', async () => { + const output = execSync(`${cmd} syntax-text "${text}"`); + assert.match(output, /Tokens:/); + assert.match(output, /NOUN:/); + assert.match(output, /President/); + assert.match(output, /Obama/); + assert.match(output, /Morphology:/); + assert.match(output, /tag: 'NOUN'/); + }); + + it('should analyze syntax in a file', async () => { + const output = execSync(`${cmd} syntax-file ${bucketName} ${fileName}`); + assert.match(output, /NOUN:/); + assert.match(output, /President/); + assert.match(output, /Obama/); + assert.match(output, /Morphology:/); + assert.match(output, /tag: 'NOUN'/); + }); + + it('should analyze entity sentiment in text', async () => { + const output = execSync(`${cmd} entity-sentiment-text "${text}"`); + assert.match(output, /Entities and sentiments:/); + assert.match(output, /Obama/); + assert.match(output, /PERSON/); + assert.match(output, /Score: 0/); + assert.match(output, /Magnitude: 0/); + }); + + it('should analyze entity sentiment in a file', async () => { + const output = execSync( + `${cmd} entity-sentiment-file ${bucketName} ${fileName}` + ); + assert.match(output, /Entities and sentiments:/); + assert.match(output, /Obama/); + assert.match(output, /PERSON/); + assert.match(output, /Score: 0/); + assert.match(output, /Magnitude: 0/); + }); + + it('should classify text in a file', async () => { + const output = execSync(`${cmd} classify-file ${bucketName} ${fileName2}`); + assert.match(output, /Name:/); + assert.match(output, /Computers & Electronics/); + }); + + it('should classify text in text', async () => { + const output = execSync(`${cmd} classify-text "${text2}"`); + assert.match(output, /Name:/); + assert.match(output, /Computers & Electronics/); + }); +}); diff --git a/cloud-language/test/automlNaturalLanguage.test.js b/cloud-language/test/automlNaturalLanguage.test.js new file mode 100644 index 00000000000..e230cd9aad2 --- /dev/null +++ b/cloud-language/test/automlNaturalLanguage.test.js @@ -0,0 +1,133 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const cmdDataset = 'node automl/automlNaturalLanguageDataset.js'; +const cmdModel = 'node automl/automlNaturalLanguageModel.js'; +const cmdPredict = 'node automl/automlNaturalLanguagePredict.js'; + +const testDataSetName = 'testDataset'; +const dummyDataSet = 'dummyDataset'; +const testModelName = 'dummyModel'; +const sampleText = './resources/test.txt'; +const projectId = process.env.GCLOUD_PROJECT; + +describe.skip('automl', () => { + // Skipped because it's been taking too long to delete datasets + it('should create a create, list, and delete a dataset', async () => { + // Check to see that this dataset does not yet exist + let output = execSync(`${cmdDataset} list-datasets`); + //t.false(output.includes(testDataSetName)); + assert.notMatch(output, /testDataset/); + + // Create dataset + output = execSync(`${cmdDataset} create-dataset -n "${testDataSetName}"`); + const parsedOut = output.split('\n'); + const dataSetId = parsedOut[1].split(':')[1].trim(); + assert.match(output, /Dataset display name: {2}testDataset/); + + // Delete dataset + output = execSync(`${cmdDataset} delete-dataset -i "${dataSetId}"`); + assert.match(output, /Dataset deleted./); + }); + + // See : https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/NaturalLanguage/automl/model_test.py + // We make two models running this test, see hard-coded workaround below + it('should create a dataset, import data, and start making a model', async () => { + // Check to see that this dataset does not yet exist + let output = execSync(`${cmdDataset} list-datasets`); + assert.notMatch(output, /dummyDataset/); + + // Create dataset + output = execSync(`${cmdDataset} create-dataset -n "${dummyDataSet}"`); + + const dataSetId = output.split('\n')[1].split(':')[1].trim(); + assert.match(output, /Dataset display name: {2}dummyDataSet/); + + // Import Data + output = execSync( + `${cmdDataset} import-data -i "${dataSetId}" -p "gs://nodejs-docs-samples-vcm/happiness.csv"` + ); + assert.match(output, /Data imported./); + + // Check to make sure model doesn't already exist + output = execSync(`${cmdModel} list-models`); + assert.notMatch(output, /dummyModel/); + + // Begin training dataset, getting operation ID for next operation + output = execSync( + `${cmdModel} create-model -i "${dataSetId}" -m "${testModelName}" -t "2"` + ); + const operationName = output.split('\n')[0].split(':')[1].trim(); + assert.match(output, /Training started.../); + + // Poll operation status, here confirming that operation is not complete yet + output = execSync( + `${cmdModel} get-operation-status -i "${dataSetId}" -o "${operationName}"` + ); + assert.match(output, /done: false/); + }); + + it('should display evaluation from prexisting model', async () => { + const donotdeleteModelId = 'TCN4740161257642267869'; + + // Confirm dataset exists + let output = execSync(`${cmdDataset} list-datasets`); + assert.match(output, /dummyDb/); + + // List model evaluations, confirm model exists + output = execSync( + `${cmdModel} list-model-evaluations -a "${donotdeleteModelId}"` + ); + + // Display evaluation + output = execSync( + `${cmdModel} display-evaluation -a "${donotdeleteModelId}"` + ); + assert.match(output, /Model Precision:/); + }); + + it('should run Prediction from prexisting model', async () => { + const donotdeleteModelId = 'TCN4740161257642267869'; + + // Confirm dataset exists + let output = execSync(`${cmdDataset} list-datasets`); + assert.match(output, /do_not_delete_me/); + + // List model evaluations, confirm model exists + output = execSync( + `${cmdModel} list-model-evaluations -a "${donotdeleteModelId}"` + ); + assert.match(output, /classificationEvaluationMetrics:/); + + // Run prediction on 'test.txt' in resources folder + output = execSync( + `${cmdPredict} predict -i "${donotdeleteModelId}" -f "${sampleText}" -s "0.5"` + ); + assert.match(output, /Firm_Cheese/); + }); + + // List datasets + it('should list datasets', async () => { + const output = execSync(`${cmdDataset} list-datasets ${projectId}`); + assert.match(output, /List of datasets:/); + }); +}); diff --git a/cloud-language/test/quickstart.test.js b/cloud-language/test/quickstart.test.js new file mode 100644 index 00000000000..43470453dca --- /dev/null +++ b/cloud-language/test/quickstart.test.js @@ -0,0 +1,30 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +describe('quickstart', () => { + it('should analyze sentiment in text', async () => { + const stdout = execSync('node quickstart.js'); + assert(stdout, /Text: Hello, world!/); + assert(stdout, /Sentiment score: /); + assert(stdout, /Sentiment magnitude: /); + }); +}); diff --git a/cloud-language/test/setEndpoint.test.js b/cloud-language/test/setEndpoint.test.js new file mode 100644 index 00000000000..7e279281364 --- /dev/null +++ b/cloud-language/test/setEndpoint.test.js @@ -0,0 +1,30 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +describe('set endpoint for language api call', () => { + it('should analyze sentiment in text at a specific api endpoint', async () => { + const stdout = execSync('node setEndpoint.js'); + assert(stdout, /Text: Hello, world!/); + assert.match(stdout, /Sentiment score: /); + assert.match(stdout, /Sentiment magnitude: /); + }); +});