diff --git a/.github/.metadata.json b/.github/.metadata.json index 40d69c54..d12aa16a 100644 --- a/.github/.metadata.json +++ b/.github/.metadata.json @@ -1,25 +1,25 @@ { - "templateVersion": "0.2", - "product": { - "name": "API Mesh", - "description": "Aio cli plugin api mesh. CLI plugins for api-mesh installed with aio." - }, - "contacts": { - "team": { - "name": "Star-Griffins", - "DL": "Grp-Star-Griffins", - "slackChannel": "star-griffins" - } - }, - "ticketTracker": { - "functionalJiraQueue": { - "projectKey": "CEXT", - "component": "" - }, - "securityJiraQueue": { - "projectKey": "MAGREQ", - "component": "MAGREQ/Magento Extensibility" - } - }, - "productionCodeBranches": ["main"] -} \ No newline at end of file + "templateVersion": "0.2", + "product": { + "name": "API Mesh", + "description": "Aio cli plugin api mesh. CLI plugins for api-mesh installed with aio." + }, + "contacts": { + "team": { + "name": "Star-Griffins", + "DL": "Grp-Star-Griffins", + "slackChannel": "star-griffins" + } + }, + "ticketTracker": { + "functionalJiraQueue": { + "projectKey": "CEXT", + "component": "" + }, + "securityJiraQueue": { + "projectKey": "MAGREQ", + "component": "MAGREQ/Magento Extensibility" + } + }, + "productionCodeBranches": ["main"] +} diff --git a/README.md b/README.md index 9a7fc44c..82501da4 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,9 @@ ## Prerequisites -* [aio-cli](https://github.com/adobe/aio-cli). -* [node.js](https://nodejs.org/en/) -* [yarn](https://classic.yarnpkg.com/lang/en/docs/install). +- [aio-cli](https://github.com/adobe/aio-cli). +- [node.js](https://nodejs.org/en/) +- [yarn](https://classic.yarnpkg.com/lang/en/docs/install). To install a revision from this repository: diff --git a/package.json b/package.json index 1ee307fb..86dc6e7b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@adobe/aio-cli-plugin-api-mesh", - "version": "3.7.0", + "version": "3.8.0", "description": "Adobe I/O CLI plugin to develop and manage API mesh sources", "keywords": [ "oclif-plugin" @@ -86,6 +86,7 @@ "jsmin": "1.0.1", "json-interpolate": "^1.0.3", "lru-cache": "^7.14.1", + "newrelic": "^11.0.0", "node-clipboardy": "^1.0.3", "node-fetch": "2.6.1", "pino": "^7.9.2", @@ -94,8 +95,7 @@ "source-registry-storage-adapter": "github:devx-services/source-registry-storage-adapter#main", "util": "^0.12.5", "uuid": "^8.3.2", - "yaml": "^2.4.2", - "newrelic": "^11.0.0" + "yaml": "^2.4.2" }, "devDependencies": { "@babel/eslint-parser": "^7.15.8", diff --git a/src/commands/__fixtures__/files/requestParams.json b/src/commands/__fixtures__/files/requestParams.json index de6b1e17..7c65f61a 100644 --- a/src/commands/__fixtures__/files/requestParams.json +++ b/src/commands/__fixtures__/files/requestParams.json @@ -1,3 +1,3 @@ { - "type": "updatedContent" -} \ No newline at end of file + "type": "updatedContent" +} diff --git a/src/commands/__fixtures__/openapi-schema.json b/src/commands/__fixtures__/openapi-schema.json index 5c9f2ef9..d5885652 100644 --- a/src/commands/__fixtures__/openapi-schema.json +++ b/src/commands/__fixtures__/openapi-schema.json @@ -1,4 +1,4 @@ { - "$schema": "http://json-schema.org/draft-04/schema", - "id": "2" -} \ No newline at end of file + "$schema": "http://json-schema.org/draft-04/schema", + "id": "2" +} diff --git a/src/commands/__fixtures__/requestParams.json b/src/commands/__fixtures__/requestParams.json index de6b1e17..7c65f61a 100644 --- a/src/commands/__fixtures__/requestParams.json +++ b/src/commands/__fixtures__/requestParams.json @@ -1,3 +1,3 @@ { - "type": "updatedContent" -} \ No newline at end of file + "type": "updatedContent" +} diff --git a/src/commands/__fixtures__/sample_fully_qualified_mesh.json b/src/commands/__fixtures__/sample_fully_qualified_mesh.json index 0542d8e5..b3898612 100644 --- a/src/commands/__fixtures__/sample_fully_qualified_mesh.json +++ b/src/commands/__fixtures__/sample_fully_qualified_mesh.json @@ -1,29 +1,29 @@ { - "meshConfig": { - "sources": [ - { - "name": "", - "handler": { - "JsonSchema": { - "baseUrl": "", - "operations": [ - { - "type": "Query", - "field": "", - "path": "", - "method": "POST", - "requestSchema": "./schemaBody.json" - } - ] - } - } - } - ], - "files": [ - { - "path": "./schemaBody.json", - "content": "{\"type\":\"dummyContent\"}" - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./schemaBody.json" + } + ] + } + } + } + ], + "files": [ + { + "path": "./schemaBody.json", + "content": "{\"type\":\"dummyContent\"}" + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_files.json b/src/commands/__fixtures__/sample_mesh_files.json index 1afa4292..6b841d42 100644 --- a/src/commands/__fixtures__/sample_mesh_files.json +++ b/src/commands/__fixtures__/sample_mesh_files.json @@ -1,23 +1,23 @@ { - "meshConfig": { - "sources": [ - { - "name": "", - "handler": { - "JsonSchema": { - "baseUrl": "", - "operations": [ - { - "type": "Query", - "field": "", - "path": "", - "method": "POST", - "requestSchema": "./requestParams.json" - } - ] - } - } - } - ] - } -} \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./requestParams.json" + } + ] + } + } + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_invalid_file_content.json b/src/commands/__fixtures__/sample_mesh_invalid_file_content.json index 9ac5f8e8..db0c9eec 100644 --- a/src/commands/__fixtures__/sample_mesh_invalid_file_content.json +++ b/src/commands/__fixtures__/sample_mesh_invalid_file_content.json @@ -1,14 +1,14 @@ { - "meshConfig":{ - "sources":[ - { - "name":"CurrencyAPI", - "handler":{ - "openapi":{ - "source":"./openapi-schema.json" - } - } - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "CurrencyAPI", + "handler": { + "openapi": { + "source": "./openapi-schema.json" + } + } + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_invalid_file_name.json b/src/commands/__fixtures__/sample_mesh_invalid_file_name.json index cb745a51..5254b0a2 100644 --- a/src/commands/__fixtures__/sample_mesh_invalid_file_name.json +++ b/src/commands/__fixtures__/sample_mesh_invalid_file_name.json @@ -1,27 +1,25 @@ { - "meshConfig":{ - "sources":[ - { - "name":"", - "handler":{ - "JsonSchema":{ - "baseUrl":"", - "operations":[ - { - "type":"Query", - "field":"", - "path":"", - "method":"POST", - "requestSchema":"./requestJSONParameters.json" - } - ] - } - } - } - ], - "additionalTypeDefs":"", - "additionalResolvers":[ - "./additional-resolvers.js" - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./requestJSONParameters.json" + } + ] + } + } + } + ], + "additionalTypeDefs": "", + "additionalResolvers": ["./additional-resolvers.js"] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_invalid_paths.json b/src/commands/__fixtures__/sample_mesh_invalid_paths.json index fec93f3a..01443bab 100644 --- a/src/commands/__fixtures__/sample_mesh_invalid_paths.json +++ b/src/commands/__fixtures__/sample_mesh_invalid_paths.json @@ -1,23 +1,23 @@ { - "meshConfig": { - "sources": [ - { - "name": "", - "handler": { - "JsonSchema": { - "baseUrl": "", - "operations": [ - { - "type": "Query", - "field": "", - "path": "", - "method": "POST", - "requestSchema": "./schemaBody.json" - } - ] - } - } - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./schemaBody.json" + } + ] + } + } + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_invalid_type.json b/src/commands/__fixtures__/sample_mesh_invalid_type.json index d3b5a10a..3599832e 100644 --- a/src/commands/__fixtures__/sample_mesh_invalid_type.json +++ b/src/commands/__fixtures__/sample_mesh_invalid_type.json @@ -1,27 +1,25 @@ { - "meshConfig":{ - "sources":[ - { - "name":"", - "handler":{ - "JsonSchema":{ - "baseUrl":"", - "operations":[ - { - "type":"Query", - "field":"", - "path":"", - "method":"POST", - "requestSchema":"./requestParams.txt" - } - ] - } - } - } - ], - "additionalTypeDefs":"", - "additionalResolvers":[ - "./additional-resolvers.js" - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./requestParams.txt" + } + ] + } + } + } + ], + "additionalTypeDefs": "", + "additionalResolvers": ["./additional-resolvers.js"] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_mismatching_path.json b/src/commands/__fixtures__/sample_mesh_mismatching_path.json index c9fc4cb1..4cabc7f1 100644 --- a/src/commands/__fixtures__/sample_mesh_mismatching_path.json +++ b/src/commands/__fixtures__/sample_mesh_mismatching_path.json @@ -1,29 +1,29 @@ { - "meshConfig":{ - "sources":[ - { - "name":"", - "handler":{ - "JsonSchema":{ - "baseUrl":"", - "operations":[ - { - "type":"Query", - "field":"", - "path":"", - "method":"POST", - "requestSchema":"./requestParams.json" - } - ] - } - } - } - ], - "files": [ - { - "path": "./requestPARAMS.json", - "content": "{\"type\":\"dummyObject\"}" - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./requestParams.json" + } + ] + } + } + } + ], + "files": [ + { + "path": "./requestPARAMS.json", + "content": "{\"type\":\"dummyObject\"}" + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_outside_workspace_dir.json b/src/commands/__fixtures__/sample_mesh_outside_workspace_dir.json index 98300cf5..df90faba 100644 --- a/src/commands/__fixtures__/sample_mesh_outside_workspace_dir.json +++ b/src/commands/__fixtures__/sample_mesh_outside_workspace_dir.json @@ -1,23 +1,23 @@ { - "meshConfig": { - "sources": [ - { - "name": "", - "handler": { - "JsonSchema": { - "baseUrl": "", - "operations": [ - { - "type": "Query", - "field": "", - "path": "", - "method": "POST", - "requestSchema": "../requestParams.json" - } - ] - } - } - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "../requestParams.json" + } + ] + } + } + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_path_from_home.json b/src/commands/__fixtures__/sample_mesh_path_from_home.json index cad626ea..a95e58a6 100644 --- a/src/commands/__fixtures__/sample_mesh_path_from_home.json +++ b/src/commands/__fixtures__/sample_mesh_path_from_home.json @@ -1,14 +1,14 @@ { - "meshConfig":{ - "sources":[ - { - "name": "CurrencyAPI", - "handler": { - "openapi": { - "source": "~/venia-openapi-schema.json" - } - } - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "CurrencyAPI", + "handler": { + "openapi": { + "source": "~/venia-openapi-schema.json" + } + } + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_subdirectory.json b/src/commands/__fixtures__/sample_mesh_subdirectory.json index e60b18b4..e57e2f7e 100644 --- a/src/commands/__fixtures__/sample_mesh_subdirectory.json +++ b/src/commands/__fixtures__/sample_mesh_subdirectory.json @@ -1,23 +1,23 @@ { - "meshConfig": { - "sources": [ - { - "name": "", - "handler": { - "JsonSchema": { - "baseUrl": "", - "operations": [ - { - "type": "Query", - "field": "", - "path": "", - "method": "POST", - "requestSchema": "./files/requestParams.json" - } - ] - } - } - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./files/requestParams.json" + } + ] + } + } + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_mesh_with_files_array.json b/src/commands/__fixtures__/sample_mesh_with_files_array.json index 9ab6b3ee..67e55666 100644 --- a/src/commands/__fixtures__/sample_mesh_with_files_array.json +++ b/src/commands/__fixtures__/sample_mesh_with_files_array.json @@ -1,29 +1,29 @@ { - "meshConfig": { - "sources": [ - { - "name": "", - "handler": { - "JsonSchema": { - "baseUrl": "", - "operations": [ - { - "type": "Query", - "field": "", - "path": "", - "method": "POST", - "requestSchema": "./requestParams.json" - } - ] - } - } - } - ], - "files": [ - { - "path": "./requestParams.json", - "content": "{\"type\":\"dummyContent\"}" - } - ] - } - } \ No newline at end of file + "meshConfig": { + "sources": [ + { + "name": "", + "handler": { + "JsonSchema": { + "baseUrl": "", + "operations": [ + { + "type": "Query", + "field": "", + "path": "", + "method": "POST", + "requestSchema": "./requestParams.json" + } + ] + } + } + } + ], + "files": [ + { + "path": "./requestParams.json", + "content": "{\"type\":\"dummyContent\"}" + } + ] + } +} diff --git a/src/commands/__fixtures__/sample_secrets_mesh.json b/src/commands/__fixtures__/sample_secrets_mesh.json index 8a573425..3bffb852 100644 --- a/src/commands/__fixtures__/sample_secrets_mesh.json +++ b/src/commands/__fixtures__/sample_secrets_mesh.json @@ -15,4 +15,4 @@ } ] } -} \ No newline at end of file +} diff --git a/src/commands/api-mesh/__tests__/log-get-bulk.test.js b/src/commands/api-mesh/__tests__/log-get-bulk.test.js new file mode 100644 index 00000000..e8bdc00e --- /dev/null +++ b/src/commands/api-mesh/__tests__/log-get-bulk.test.js @@ -0,0 +1,284 @@ +const fs = require('fs'); +const path = require('path'); +const GetBulkLogCommand = require('../log-get-bulk'); +const { initRequestId, initSdk, promptConfirm } = require('../../../helpers'); +const { getMeshId, getPresignedUrls } = require('../../../lib/devConsole'); +const { suggestCorrectedDateFormat } = require('../../../utils'); + +jest.mock('fs'); +jest.mock('axios'); +jest.mock('../../../helpers', () => ({ + initSdk: jest.fn().mockResolvedValue({}), + initRequestId: jest.fn().mockResolvedValue({}), + promptConfirm: jest.fn().mockResolvedValue(true), +})); +jest.mock('../../../lib/devConsole'); +jest.mock('../../../classes/logger'); + +describe('GetBulkLogCommand', () => { + let parseSpy; + + beforeEach(() => { + // Setup spies and mock functions + parseSpy = jest.spyOn(GetBulkLogCommand.prototype, 'parse').mockResolvedValue({ + flags: { + startTime: '2024-08-29T12:00:00Z', + endTime: '2024-08-29T12:30:00Z', + filename: 'test.csv', + ignoreCache: false, + }, + }); + + // initRequestId.mockResolvedValue(); + initSdk.mockResolvedValue({ + imsOrgId: 'orgId', + imsOrgCode: 'orgCode', + projectId: 'projectId', + workspaceId: 'workspaceId', + workspaceName: 'workspaceName', + }); + getMeshId.mockResolvedValue('meshId'); + getPresignedUrls.mockResolvedValue({ + presignedUrls: [{ key: 'log1.csv', url: 'http://example.com/someHash' }], + totalSize: 2048, + }); + promptConfirm.mockResolvedValue(true); + global.requestId = 'dummy_request_id'; + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('throws an error if the time difference between startTime and endTime is greater than 30 minutes', async () => { + // Mock the file system checks even if they are not the focus of this test + fs.existsSync.mockReturnValue(true); // Assume the file exists + fs.statSync.mockReturnValue({ size: 0 }); // Assume the file is empty + + // Get the current date and time + const now = new Date(); + + // Create dynamic startTime and endTime + const startTime = new Date(now); + const endTime = new Date(now); + startTime.setMinutes(startTime.getMinutes() - 45); // Set endTime to 45 minutes after startTime + + const formattedStartTime = startTime.toISOString().slice(0, 19) + 'Z'; + const formattedEndTime = endTime.toISOString().slice(0, 19) + 'Z'; + + parseSpy.mockResolvedValueOnce({ + flags: { + startTime: formattedStartTime, + endTime: formattedEndTime, + filename: 'test.csv', + ignoreCache: false, + }, + }); + + const command = new GetBulkLogCommand([], {}); + await expect(command.run()).rejects.toThrow( + 'Max duration between startTime and endTime should be 30 minutes. Current duration is 0 hours 45 minutes and 0 seconds.', + ); + }); + + test('throws an error if the endTime is greater than current time(now)', async () => { + // Mock the file system checks even if they are not the focus of this test + fs.existsSync.mockReturnValue(true); // Assume the file exists + fs.statSync.mockReturnValue({ size: 0 }); // Assume the file is empty + + // Get the current date and time + const now = new Date(); + + // Create dynamic startTime and endTime + const startTime = new Date(now); + const endTime = new Date(now); + endTime.setMinutes(startTime.getMinutes() + 45); // Set endTime to 45 minutes after startTime + + const formattedStartTime = startTime.toISOString().slice(0, 19) + 'Z'; + const formattedEndTime = endTime.toISOString().slice(0, 19) + 'Z'; + + parseSpy.mockResolvedValueOnce({ + flags: { + startTime: formattedStartTime, + endTime: formattedEndTime, + filename: 'test.csv', + ignoreCache: false, + }, + }); + + const command = new GetBulkLogCommand([], {}); + await expect(command.run()).rejects.toThrow( + 'endTime cannot be in the future. Provide a valid endTime.', + ); + }); + + test('throws an error if startTime format is invalid', async () => { + parseSpy.mockResolvedValueOnce({ + flags: { + // startTime: '20240809123456', + startTime: '20241213223832', + endTime: '2024-08-29T12:30:00Z', + filename: 'test.csv', + }, + }); + + const command = new GetBulkLogCommand([], {}); + + // Assuming your suggestCorrectedDateFormat function corrects the format to "2024-08-09T09:08:33Z" + const correctedStartTime = '2024-12-13T22:38:32Z'; // Use an appropriate correction - 2024-08-09T12:34:56Z + + await expect(command.run()).rejects.toThrow( + `Use the format YYYY-MM-DDTHH:MM:SSZ for startTime. Did you mean ${correctedStartTime}?`, + ); + }); + + test('throws an error if endTime format is invalid', async () => { + parseSpy.mockResolvedValueOnce({ + flags: { + startTime: '2024-08-29T12:00:00Z', + endTime: '2024-08-29:23:45:56Z', + filename: 'test.csv', + }, + }); + + const command = new GetBulkLogCommand([], {}); + + // Assuming your suggestCorrectedDateFormat function corrects the format to "2024-08-09T09:08:33Z" + const correctedStartTime = '2024-08-29T23:45:56Z'; // Use an appropriate correction + await expect(command.run()).rejects.toThrow( + `Use the format YYYY-MM-DDTHH:MM:SSZ for endTime. Did you mean ${correctedStartTime}?`, + ); + }); + + // Test for totalSize being 0 + test('throws an error if totalSize is 0', async () => { + // Mock the file system checks even if they are not the focus of this test + fs.existsSync.mockReturnValue(true); // Assume the file exists + fs.statSync.mockReturnValue({ size: 0 }); // Assume the file is empty + // Mock getPresignedUrls to return totalSize as 0 + getPresignedUrls.mockResolvedValueOnce({ + presignedUrls: [{ key: 'log1', url: 'http://example.com/log1' }], + totalSize: 0, // totalSize is 0 + }); + + const command = new GetBulkLogCommand([], {}); + await expect(command.run()).rejects.toThrow('No logs available to download'); + }); + + test('throws an error if logs are requested for a date older than 30 days', async () => { + const today = new Date(); + const thirtyDaysAgo = new Date(today); + thirtyDaysAgo.setUTCDate(today.getUTCDate() - 30); + + const startTime = new Date(thirtyDaysAgo); + startTime.setUTCDate(thirtyDaysAgo.getUTCDate() - 1); + const formattedStartTime = startTime.toISOString().slice(0, 19) + 'Z'; + + parseSpy.mockResolvedValueOnce({ + flags: { + startTime: formattedStartTime, + endTime: '2024-08-30T12:30:00Z', + filename: 'test.csv', + }, + }); + + const command = new GetBulkLogCommand([], {}); + await expect(command.run()).rejects.toThrow( + 'Cannot get logs more than 30 days old. Adjust your time range.', + ); + }); + + // Test for file creation and emptiness check + test('creates file if it does not exist and checks if file is empty before proceeding', async () => { + fs.existsSync.mockReturnValue(false); // Mock file does not exist + fs.statSync.mockReturnValue({ size: 0 }); // Mock file is empty + + const mockWriteStream = { + write: jest.fn(), + end: jest.fn(), + on: jest.fn((event, callback) => { + if (event === 'finish') { + callback(); + } + }), + }; + fs.createWriteStream.mockReturnValue(mockWriteStream); + + const command = new GetBulkLogCommand([], {}); + await command.run(); + + expect(fs.existsSync).toHaveBeenCalledWith(path.resolve(process.cwd(), 'test.csv')); + expect(fs.writeFileSync).toHaveBeenCalledWith(path.resolve(process.cwd(), 'test.csv'), ''); // Ensures file is created if not exists + expect(mockWriteStream.write).toHaveBeenCalled(); // Writes content to file + }); + + test('throws an error if the file is not empty', async () => { + fs.existsSync.mockReturnValue(true); + fs.statSync.mockReturnValue({ size: 1024 }); + + const command = new GetBulkLogCommand([], {}); + await expect(command.run()).rejects.toThrow('Make sure the file: test.csv is empty'); + }); + + test('downloads logs if all conditions are met', async () => { + fs.existsSync.mockReturnValue(true); + fs.statSync.mockReturnValue({ size: 0 }); + + const mockWriteStream = { + write: jest.fn(), + end: jest.fn(), + on: jest.fn((event, callback) => { + if (event === 'finish') { + callback(); + } + }), + }; + fs.createWriteStream.mockReturnValue(mockWriteStream); + + const command = new GetBulkLogCommand([], {}); + await command.run(); + + expect(initRequestId).toHaveBeenCalled(); + expect(initSdk).toHaveBeenCalled(); + expect(getMeshId).toHaveBeenCalledWith('orgId', 'projectId', 'workspaceId', 'workspaceName'); + expect(getPresignedUrls).toHaveBeenCalledWith( + 'orgCode', + 'projectId', + 'workspaceId', + 'meshId', + expect.any(String), + expect.any(String), + ); + expect(fs.createWriteStream).toHaveBeenCalledWith(path.resolve(process.cwd(), 'test.csv'), { + flags: 'a', + }); + expect(mockWriteStream.write).toHaveBeenCalled(); + expect(mockWriteStream.end).toHaveBeenCalled(); + }); +}); +describe('GetBulkLogCommand startTime and endTime validation', () => { + // Define the test cases as an array of [inputDate, expectedOutput] pairs + const testCases = [ + // Invalid formats that should be corrected + ['2024-0812T123445', '2024-08-12T12:34:45Z'], + ['2024:08:12-09-08-36Z', '2024-08-12T09:08:36Z'], + ['20241223T234556Z', '2024-12-23T23:45:56Z'], + ['20241213223832', '2024-12-13T22:38:32Z'], + ['2024-12-13T223832Z', '2024-12-13T22:38:32Z'], + ['2024-11-23T21:34:45', '2024-11-23T21:34:45Z'], + + // Invalid date components that should not be corrected, but return null which lets the user know the date is invalid + ['20240834123456Z', null], // Invalid day (34) + ['2024-12-34T23:34:45Z', null], // Invalid day (34) + ['2024-13-23:21:34:45', null], // Invalid month (13) + ['2024-11-63T21:34:45', null], // Invalid day (13) and missing Z + ]; + + test.each(testCases)( + 'suggestCorrectedDateFormat("%s") should return "%s"', + (inputDate, expectedOutput) => { + const correctedDate = suggestCorrectedDateFormat(inputDate); + expect(correctedDate).toBe(expectedOutput); + }, + ); +}); diff --git a/src/commands/api-mesh/__tests__/log-get.test.js b/src/commands/api-mesh/__tests__/log-get.test.js new file mode 100644 index 00000000..d853e42c --- /dev/null +++ b/src/commands/api-mesh/__tests__/log-get.test.js @@ -0,0 +1,234 @@ +/* +Copyright 2021 Adobe. All rights reserved. +This file is licensed to you under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. You may obtain a copy +of the License at http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under +the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +OF ANY KIND, either express or implied. See the License for the specific language +governing permissions and limitations under the License. +*/ + +jest.mock('axios'); +jest.mock('@adobe/aio-lib-env'); +jest.mock('@adobe/aio-cli-lib-console'); +jest.mock('fs/promises'); +jest.mock('@adobe/aio-cli-lib-console', () => ({ + init: jest.fn().mockResolvedValue(mockConsoleCLIInstance), + cleanStdOut: jest.fn(), +})); +jest.mock('@adobe/aio-lib-ims'); +jest.mock('../../../helpers', () => ({ + initSdk: jest.fn().mockResolvedValue({}), + initRequestId: jest.fn().mockResolvedValue({}), +})); + +jest.mock('../../../lib/devConsole'); + +const mockConsoleCLIInstance = {}; +const selectedOrg = { id: '1234', code: 'CODE1234@AdobeOrg', name: 'ORG01', type: 'entp' }; +const selectedProject = { id: '5678', title: 'Project01' }; +const selectedWorkspace = { id: '123456789', title: 'Workspace01' }; + +const { writeFile } = require('fs/promises'); +const { initSdk } = require('../../../helpers'); +const FetchLogsCommand = require('../log-get'); +const { getLogsByRayId, getMeshId } = require('../../../lib/devConsole'); +const os = require('os'); +let logSpy = null; +let errorLogSpy = null; +let parseSpy = null; +let platformSpy = null; + +const mockIgnoreCacheFlag = Promise.resolve(true); + +describe('FetchLogsCommand tests', () => { + beforeEach(() => { + initSdk.mockResolvedValue({ + imsOrgId: selectedOrg.id, + projectId: selectedProject.id, + workspaceId: selectedWorkspace.id, + workspaceName: selectedWorkspace.title, + }); + + global.requestId = 'dummy_request_id'; + + logSpy = jest.spyOn(FetchLogsCommand.prototype, 'log'); + errorLogSpy = jest.spyOn(FetchLogsCommand.prototype, 'error'); + platformSpy = jest.spyOn(os, 'platform'); + + writeFile.mockResolvedValue(true); + + parseSpy = jest.spyOn(FetchLogsCommand.prototype, 'parse'); + parseSpy.mockResolvedValue({ + args: { rayId: 'ray1' }, + flags: { + ignoreCache: mockIgnoreCacheFlag, + }, + }); + getMeshId.mockResolvedValue('12345'); + getLogsByRayId.mockResolvedValue({ + 'EventTimestampMs': 123456789, + 'Exceptions': 'None', + 'Logs': 'Log data', + 'Outcome': 'Success', + 'meshId': 'mesh1', + 'rayId': 'ray1', + 'URL': 'http://example.com', + 'Request Method': 'GET', + 'Response Status': 200, + }); + }); + + afterEach(() => { + platformSpy.mockRestore(); + }); + + test('snapshot FetchLogsCommand', () => { + expect(FetchLogsCommand.description).toMatchInlineSnapshot( + `"Get the Log of a given mesh by RayId"`, + ); + expect(FetchLogsCommand.args).toMatchInlineSnapshot(` + [ + { + "description": "Fetch a single log by rayID", + "name": "rayId", + "required": true, + }, + ] + `); + expect(FetchLogsCommand.flags).toMatchInlineSnapshot(` + { + "ignoreCache": { + "allowNo": false, + "char": "i", + "default": false, + "description": "Ignore cache and force manual org -> project -> workspace selection", + "parse": [Function], + "type": "boolean", + }, + } + `); + expect(FetchLogsCommand.aliases).toMatchInlineSnapshot(`[]`); + }); + + test('should handle log not found error', async () => { + getLogsByRayId.mockRejectedValue(new Error('LogNotFound')); + + const runResult = FetchLogsCommand.run(); + + return runResult.catch(err => { + expect(err.message).toMatchInlineSnapshot(` + "No logs found for RayID ray1. Check the RayID and try again. RequestId: dummy_request_id. Alternatively, you can use the following command to get all logs for a 30 minute time period: + aio api-mesh log-get-bulk --startTime YYYY-MM-DDTHH:MM:SSZ --endTime YYYY-MM-DDTHH:MM:SSZ --filename mesh_logs.csv" + `); + expect(logSpy.mock.calls).toMatchInlineSnapshot(`[]`); + expect(errorLogSpy.mock.calls).toMatchInlineSnapshot(` + [ + [ + "No logs found for RayID ray1. Check the RayID and try again. RequestId: dummy_request_id. Alternatively, you can use the following command to get all logs for a 30 minute time period: + aio api-mesh log-get-bulk --startTime YYYY-MM-DDTHH:MM:SSZ --endTime YYYY-MM-DDTHH:MM:SSZ --filename mesh_logs.csv", + ], + ] + `); + }); + }); + + test('should handle server error', async () => { + getLogsByRayId.mockRejectedValue(new Error('ServerError')); + + const runResult = FetchLogsCommand.run(); + + return runResult.catch(err => { + expect(err.message).toMatchInlineSnapshot( + `"Server error while fetching logs for RayId ray1. Please try again later. RequestId: dummy_request_id"`, + ); + expect(logSpy.mock.calls).toMatchInlineSnapshot(`[]`); + expect(errorLogSpy.mock.calls).toMatchInlineSnapshot(` + [ + [ + "Server error while fetching logs for RayId ray1. Please try again later. RequestId: dummy_request_id", + ], + ] + `); + }); + }); + + test('should handle generic error', async () => { + getLogsByRayId.mockRejectedValue({ + response: { status: 503, statusText: 'Service Unavailable' }, + }); + + const runResult = FetchLogsCommand.run(); + + return runResult.catch(err => { + expect(err.message).toMatchInlineSnapshot( + `"Unable to get mesh logs. Please check the details and try again. If the error persists please contact support. RequestId: dummy_request_id"`, + ); + expect(logSpy.mock.calls).toMatchInlineSnapshot(`[]`); + expect(errorLogSpy.mock.calls).toMatchInlineSnapshot(` + [ + [ + "Unable to get mesh logs. Please check the details and try again. If the error persists please contact support. RequestId: dummy_request_id", + ], + ] + `); + }); + }); + + test('should handle mesh ID not found error', async () => { + getMeshId.mockResolvedValue(null); + + const runResult = FetchLogsCommand.run(); + + return runResult.catch(err => { + expect(err.message).toMatchInlineSnapshot( + `"Unable to get mesh ID. Please check the details and try again. RequestId: dummy_request_id"`, + ); + expect(logSpy.mock.calls).toMatchInlineSnapshot(`[]`); + expect(errorLogSpy.mock.calls).toMatchInlineSnapshot(` + [ + [ + "Unable to get mesh ID. Please check the details and try again. RequestId: dummy_request_id", + ], + ] + `); + }); + }); + test('should fetch logs successfully', async () => { + getMeshId.mockResolvedValue('mesh1'); + getLogsByRayId.mockResolvedValue({ + eventTimestampMs: 1724660420904, + exceptions: '[]', + logs: + '[{\'Level\': \'log\', \'Message\': [\'[object Object]\'], \'TimestampMs\': 1724660422580}, {\'Level\': \'log\', \'Message\': [\'{"sources":[{"name":"venia","handler":{"graphql":{"useGETForQueries":true,"endpoint":"https://venia.magento.com/graphql","operationHeaders":{"x-test-header":"{context.headers[\\\'x-test-header\\\']}"}}}}],"responseConfig":{"includeHTTPDetails":true},"additionalResolvers":[],"plugins":[{"httpDetailsExtensions":{}}]}\'], \'TimestampMs\': 1724660422580}]', + outcome: 'ok', + meshId: 'mesh1', + rayId: 'ray1', + url: 'https://edge-dev1-graph.adobe.io/api/REDACTED/graphql', + requestMethod: 'POST', + responseStatus: 200, + level: 'log', + }); + + const command = new FetchLogsCommand(['ray1']); + await command.run(); + + expect(logSpy).toHaveBeenCalledWith('Event Timestamp : %s', 1724660420904); + expect(logSpy).toHaveBeenCalledWith('Exceptions : %s', '[]'); + expect(logSpy).toHaveBeenCalledWith( + 'Logs : %s', + '[{\'Level\': \'log\', \'Message\': [\'[object Object]\'], \'TimestampMs\': 1724660422580}, {\'Level\': \'log\', \'Message\': [\'{"sources":[{"name":"venia","handler":{"graphql":{"useGETForQueries":true,"endpoint":"https://venia.magento.com/graphql","operationHeaders":{"x-test-header":"{context.headers[\\\'x-test-header\\\']}"}}}}],"responseConfig":{"includeHTTPDetails":true},"additionalResolvers":[],"plugins":[{"httpDetailsExtensions":{}}]}\'], \'TimestampMs\': 1724660422580}]', + ); + expect(logSpy).toHaveBeenCalledWith('Outcome : %s', 'ok'); + expect(logSpy).toHaveBeenCalledWith('Mesh ID : %s', 'mesh1'); + expect(logSpy).toHaveBeenCalledWith('RayId : %s', 'ray1'); + expect(logSpy).toHaveBeenCalledWith( + 'Mesh URL : %s', + 'https://edge-dev1-graph.adobe.io/api/REDACTED/graphql', + ); + expect(logSpy).toHaveBeenCalledWith('Request Method : %s', 'POST'); + expect(logSpy).toHaveBeenCalledWith('Request Status : %s', 200); + }); +}); diff --git a/src/commands/api-mesh/__tests__/log-list.test.js b/src/commands/api-mesh/__tests__/log-list.test.js new file mode 100644 index 00000000..612abac1 --- /dev/null +++ b/src/commands/api-mesh/__tests__/log-list.test.js @@ -0,0 +1,162 @@ +const fs = require('fs'); +const ListLogsCommand = require('../log-list'); +const { initSdk, promptConfirm } = require('../../../helpers'); +const { getMeshId, listLogs } = require('../../../lib/devConsole'); + +jest.mock('fs'); +jest.mock('axios'); +jest.mock('../../../helpers', () => ({ + initSdk: jest.fn().mockResolvedValue({}), + initRequestId: jest.fn().mockResolvedValue({}), + promptConfirm: jest.fn().mockResolvedValue(true), +})); +jest.mock('../../../lib/devConsole'); +jest.mock('../../../classes/logger'); + +describe('List Logs Command', () => { + let parseSpy; + let logSpy; + + beforeEach(() => { + // Setup spies and mock functions + parseSpy = jest.spyOn(ListLogsCommand.prototype, 'parse').mockResolvedValue({ + flags: { + filename: 'test.csv', + ignoreCache: false, + }, + }); + + logSpy = jest.spyOn(ListLogsCommand.prototype, 'log'); + + // initRequestId.mockResolvedValue(); + initSdk.mockResolvedValue({ + imsOrgId: 'orgId', + imsOrgCode: 'orgCode', + projectId: 'projectId', + workspaceId: 'workspaceId', + workspaceName: 'workspaceName', + }); + getMeshId.mockResolvedValue('meshId'); + listLogs.mockResolvedValue([ + { + rayId: '8c171e8a9a47c16d', + timestamp: 1726052061861, + responseStatus: 200, + level: 'info', + }, + { + rayId: '8c171dd35860c16d', + timestamp: 1726052032540, + responseStatus: 200, + level: 'info', + }, + { + rayId: '8c171dd22f00c16d', + timestamp: 1726052032348, + responseStatus: 200, + level: 'info', + }, + { + rayId: '8c171dd10df2c16d', + timestamp: 1726052032167, + responseStatus: 200, + level: 'info', + }, + ]); + + fs.existsSync.mockReturnValue(false); + fs.appendFileSync.mockReturnValue(); + promptConfirm.mockResolvedValue(true); + global.requestId = 'dummy_request_id'; + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('Throws an error if filename is not of csv extension', async () => { + // Mock the file system checks even if they are not the focus of this test + parseSpy.mockResolvedValue({ + flags: { + filename: 'test.txt', + ignoreCache: false, + }, + }); + + const command = new ListLogsCommand([], {}); + await expect(command.run()).rejects.toThrow( + 'Invalid file type. Provide a filename with a .csv extension.', + ); + }); + + test('Throws an error if file already exists', async () => { + fs.existsSync.mockReturnValue(true); + + const command = new ListLogsCommand([], {}); + await expect(command.run()).rejects.toThrow( + 'File test.csv already exists. Provide a new file name.', + ); + }); + + test('Throws an error is meshId is not found', async () => { + getMeshId.mockResolvedValue(null); + + const command = new ListLogsCommand([], {}); + const result = command.run(); + result.catch(err => { + expect(err.message).toMatchInlineSnapshot( + `"Unable to get mesh config. No mesh found for Org(orgId) -> Project(projectId) -> Workspace(workspaceId). Check the details and try again. RequestId: dummy_request_id"`, + ); + }); + }); + + test('Logs are listed successfully with file as output', async () => { + const command = new ListLogsCommand([], {}); + await command.run(); + expect(fs.appendFileSync).toHaveBeenCalled(); + }); + + test('Logs are listed successfully', async () => { + parseSpy.mockResolvedValue({ + flags: { + ignoreCache: false, + }, + }); + const command = new ListLogsCommand([], {}); + await command.run(); + expect(fs.appendFileSync).not.toHaveBeenCalled(); + }); + + test('No logs found message displayed when sms returns empty array with file as output', async () => { + listLogs.mockResolvedValue([]); + const command = new ListLogsCommand([], {}); + await command.run(); + expect(fs.appendFileSync).not.toHaveBeenCalled(); + expect(logSpy).toHaveBeenCalledWith( + `No recent logs found. Alternatively, you can use the following command to get all logs for a 30 minute time period: \naio api-mesh log-get-bulk --startTime YYYY-MM-DDTHH:MM:SSZ --endTime YYYY-MM-DDTHH:MM:SSZ --filename mesh_logs.csv`, + ); + }); + + test('No logs found message displayed when sms returns empty array', async () => { + parseSpy.mockResolvedValue({ + flags: { + ignoreCache: false, + }, + }); + listLogs.mockResolvedValue([]); + const command = new ListLogsCommand([], {}); + await command.run(); + expect(logSpy).toHaveBeenCalledWith( + `No recent logs found. Alternatively, you can use the following command to get all logs for a 30 minute time period: \naio api-mesh log-get-bulk --startTime YYYY-MM-DDTHH:MM:SSZ --endTime YYYY-MM-DDTHH:MM:SSZ --filename mesh_logs.csv`, + ); + expect(fs.appendFileSync).not.toHaveBeenCalled(); + }); + + test('Throw an error if SMS call fails', async () => { + listLogs.mockRejectedValue(new Error('SMS call failed')); + const command = new ListLogsCommand([], {}); + await expect(command.run()).rejects.toThrow( + 'Failed to list recent logs, RequestId: dummy_request_id', + ); + }); +}); diff --git a/src/commands/api-mesh/log-get-bulk.js b/src/commands/api-mesh/log-get-bulk.js new file mode 100644 index 00000000..e60a95ce --- /dev/null +++ b/src/commands/api-mesh/log-get-bulk.js @@ -0,0 +1,247 @@ +const { Command } = require('@oclif/core'); +const path = require('path'); +const fs = require('fs'); +const { initRequestId, initSdk, promptConfirm } = require('../../helpers'); +const { getMeshId, getPresignedUrls } = require('../../lib/devConsole'); +const logger = require('../../classes/logger'); +const axios = require('axios'); +const { + ignoreCacheFlag, + startTimeFlag, + endTimeFlag, + logFilenameFlag, + suggestCorrectedDateFormat, +} = require('../../utils'); + +require('dotenv').config(); + +class GetBulkLogCommand extends Command { + static flags = { + ignoreCache: ignoreCacheFlag, + startTime: startTimeFlag, + endTime: endTimeFlag, + filename: logFilenameFlag, + }; + + async run() { + // Column headers to be written as the first row in the output file + const columnHeaders = + 'EventTimestampMs,Exceptions,Logs,Outcome,MeshId,RayID,URL,Request Method,Response Status,Level'; + + await initRequestId(); + logger.info(`RequestId: ${global.requestId}`); + const { flags } = await this.parse(GetBulkLogCommand); + const ignoreCache = await flags.ignoreCache; + + const filename = await flags.filename; + + // Only supports files that end with .csv + if (!filename || path.extname(filename).toLowerCase() !== '.csv') { + this.error('Invalid file type. Provide a filename with a .csv extension.'); + return; + } + // Regular expression to validate the input date format YYYY-MM-DDTHH:MM:SSZ + const dateTimeRegex = /^(?:(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01])T(0[0-9]|1[0-9]|2[0-3]):([0-5]\d):([0-5]\d)Z)$/; + + // Validate user provided startTime format + if (!dateTimeRegex.test(flags.startTime)) { + const correctedStartTime = suggestCorrectedDateFormat(flags.startTime); + if (!correctedStartTime) { + this.error('Found invalid date components for startTime. Check and correct the date.'); + } else { + this.error( + `Use the format YYYY-MM-DDTHH:MM:SSZ for startTime. Did you mean ${correctedStartTime}?`, + ); + } + + return; + } + + // Validate user provided endTime format + if (!dateTimeRegex.test(flags.endTime)) { + const correctedEndTime = suggestCorrectedDateFormat(flags.endTime); + //check for incorrect date components + if (!correctedEndTime) { + this.error('Found invalid date components for endTime. Check and correct the date.'); + } else { + this.error( + `Use the format YYYY-MM-DDTHH:MM:SSZ for endTime. Did you mean ${correctedEndTime}?`, + ); + } + return; + } + + // Properly format startTime and endTime strings before handing it over to SMS + const formattedStartTime = flags.startTime.replace(/-|:|Z/g, '').replace('T', 'T'); + const formattedEndTime = flags.endTime.replace(/-|:|Z/g, '').replace('T', 'T'); + + // Convert formatted times to Date objects for comparison + const startTime = new Date(flags.startTime); + const endTime = new Date(flags.endTime); + const now = new Date(); // Current time + + // Require both startTime and endTime + if (!startTime || !endTime) { + this.error('Provide both startTime and endTime.'); + return; + } + + // Get the current date and calculate the date 30 days ago, both in UTC + const today = new Date(); + const thirtyDaysAgo = new Date(today); + thirtyDaysAgo.setUTCDate(today.getUTCDate() - 30); + // Validate that logs from beyond 30 days from today are not available + if (startTime < thirtyDaysAgo || endTime < thirtyDaysAgo) { + this.error('Cannot get logs more than 30 days old. Adjust your time range.'); + return; + } + + // Validate required filename flag + if (!filename) { + this.error('Missing filename. Provide a valid file in the current working directory.'); + return; + } + + // Check if the file exists + const outputFile = path.resolve(process.cwd(), filename); + + // Check if file exists and if doesn't, create one in the cwd and continue + if (!fs.existsSync(outputFile)) { + fs.writeFileSync(outputFile, ''); + } + + //check if the file is empty before proceeding + const stats = fs.statSync(outputFile); + if (stats.size > 0) { + throw new Error(`Make sure the file: ${filename} is empty`); + } + // truncate milliseconds to ensure comparison is only done up to seconds + startTime.setMilliseconds(0); + endTime.setMilliseconds(0); + + // Validate startTime < endTime + if (startTime > endTime) { + this.error('endTime must be greater than startTime'); + } + // Validate that endTime is not greater than the current time (now) + if (endTime > now) { + this.error('endTime cannot be in the future. Provide a valid endTime.'); + return; + } + + // 4. Check if the duration between start and end times is greater than 30 minutes (1800 seconds) + const timeDifferenceInSeconds = (endTime.getTime() - startTime.getTime()) / 1000; + + if (timeDifferenceInSeconds > 1800) { + const hours = Math.floor(timeDifferenceInSeconds / 3600); //hours calculation + const minutes = Math.floor((timeDifferenceInSeconds % 3600) / 60); //minutes calculation + const seconds = timeDifferenceInSeconds % 60; //seconds calculation + + this.error( + `Max duration between startTime and endTime should be 30 minutes. Current duration is ${hours} hour${ + hours !== 1 ? 's' : '' + } ${minutes} minute${minutes !== 1 ? 's' : ''} and ${seconds} second${ + seconds !== 1 ? 's' : '' + }.`, + ); + return; + } + logger.info('Calling initSdk...'); + const { imsOrgId, imsOrgCode, projectId, workspaceId, workspaceName } = await initSdk({ + ignoreCache, + }); + + // Retrieve meshId + let meshId = null; + try { + meshId = await getMeshId(imsOrgId, projectId, workspaceId, workspaceName); + } catch (err) { + this.error(`Unable to get mesh ID: ${err.message}.`); + } + + if (!meshId) { + this.error('Mesh ID not found.'); + } + + // 5. Call downloadFiles + const { presignedUrls, totalSize } = await getPresignedUrls( + imsOrgCode, + projectId, + workspaceId, + meshId, + formattedStartTime, + formattedEndTime, + ); + //If presigned URLs are not found, throw error saying that no logs are found + if (!presignedUrls || presignedUrls.length === 0) { + this.error('No logs found for the given time range.'); + } + + let shouldDownload = false; + if (totalSize > 0) { + const totalSizeKB = (totalSize / 1024).toFixed(2); // Convert bytes to KB + // 7. Get user confirmation + shouldDownload = await promptConfirm( + `The expected file size is ${totalSizeKB} KB. Confirm ${filename} download? (y/n)`, + ); + if (shouldDownload) { + //create a writer and proceed with download + const writer = fs.createWriteStream(outputFile, { flags: 'a' }); + + // Write the column headers before appending the log content + writer.write(`${columnHeaders}\n`); + + // Stream the data from the signed URLs + for (const urlObj of presignedUrls) { + const { key, url } = urlObj; + logger.info(`Downloading ${key} and appending to ${outputFile}...`); + + try { + const fileContentStream = await this.downloadFileContent(url); + fileContentStream.pipe(writer, { end: false }); + + await new Promise((resolve, reject) => { + fileContentStream.on('end', resolve); + fileContentStream.on('error', reject); + }); + + logger.info(`${key} content appended successfully.`); + } catch (error) { + logger.error(`Error downloading or appending content of ${key}:`, error); + } + } + // Ensure the stream is closed + writer.end(); + + this.log(`Successfully downloaded the logs to ${filename}.`); + } else { + this.log('Log files not downloaded.'); + } + } else { + this.error('No logs available to download'); + } + } + /** + * Downloads the content of a file from the provided presigned URL. + * + * @param {string} url - presigned URL to download the log from + * @returns {Promise} - A promise that resolves to a readable stream of the file content + */ + + async downloadFileContent(url) { + return axios({ + method: 'get', + url: url, + responseType: 'stream', + }) + .then(response => response.data) + .catch(error => { + logger.error('Error downloading log content:', error.message); + throw error; + }); + } +} + +GetBulkLogCommand.description = 'Download all mesh logs for a selected time period.'; + +module.exports = GetBulkLogCommand; diff --git a/src/commands/api-mesh/log-get.js b/src/commands/api-mesh/log-get.js new file mode 100644 index 00000000..47004192 --- /dev/null +++ b/src/commands/api-mesh/log-get.js @@ -0,0 +1,83 @@ +/* +Copyright 2021 Adobe. All rights reserved. +This file is licensed to you under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. You may obtain a copy +of the License at http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software distributed under +the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +OF ANY KIND, either express or implied. See the License for the specific language +governing permissions and limitations under the License. +*/ +const { Command } = require('@oclif/core'); +const logger = require('../../classes/logger'); +const { initSdk, initRequestId } = require('../../helpers'); +const { ignoreCacheFlag } = require('../../utils'); +const { getMeshId, getLogsByRayId } = require('../../lib/devConsole'); +require('dotenv').config(); + +class FetchLogsCommand extends Command { + static args = [{ name: 'rayId', required: true, description: 'Fetch a single log by rayID' }]; + static flags = { + ignoreCache: ignoreCacheFlag, + }; + + async run() { + await initRequestId(); + + logger.info(`RequestId: ${global.requestId}`); + + const { args, flags } = await this.parse(FetchLogsCommand); + + const ignoreCache = flags.ignoreCache; + const rayId = args.rayId; + + const { imsOrgId, imsOrgCode, projectId, workspaceId } = await initSdk({ + ignoreCache, + }); + + let meshId = null; + try { + meshId = await getMeshId(imsOrgId, projectId, workspaceId, meshId); + if (!meshId) { + throw new Error('MeshIdNotFound'); + } + } catch (err) { + this.error( + `Unable to get mesh ID. Please check the details and try again. RequestId: ${global.requestId}`, + ); + } + + try { + const meshLog = await getLogsByRayId(imsOrgCode, projectId, workspaceId, meshId, rayId); + if (meshLog) { + this.log('Event Timestamp : %s', meshLog.eventTimestampMs); + this.log('Exceptions : %s', meshLog.exceptions); + this.log('Logs : %s', meshLog.logs); + this.log('Outcome : %s', meshLog.outcome); + this.log('Mesh ID : %s', meshLog.meshId); + this.log('RayId : %s', meshLog.rayId); + this.log('Mesh URL : %s', meshLog.url); + this.log('Request Method : %s', meshLog.requestMethod); + this.log('Request Status : %s', meshLog.responseStatus); + } + } catch (error) { + if (error.message === 'LogNotFound') { + this.error( + `No logs found for RayID ${rayId}. Check the RayID and try again. RequestId: ${global.requestId}. Alternatively, you can use the following command to get all logs for a 30 minute time period: \naio api-mesh log-get-bulk --startTime YYYY-MM-DDTHH:MM:SSZ --endTime YYYY-MM-DDTHH:MM:SSZ --filename mesh_logs.csv`, + ); + } else if (error.message === 'ServerError') { + this.error( + `Server error while fetching logs for RayId ${rayId}. Please try again later. RequestId: ${global.requestId}`, + ); + } else { + this.error( + `Unable to get mesh logs. Please check the details and try again. If the error persists please contact support. RequestId: ${global.requestId}`, + ); + } + } + } +} + +FetchLogsCommand.description = 'Get the Log of a given mesh by RayId'; + +module.exports = FetchLogsCommand; diff --git a/src/commands/api-mesh/log-list.js b/src/commands/api-mesh/log-list.js new file mode 100644 index 00000000..20cf92cf --- /dev/null +++ b/src/commands/api-mesh/log-list.js @@ -0,0 +1,105 @@ +const { Command } = require('@oclif/core'); + +const logger = require('../../classes/logger'); +const { initSdk, initRequestId } = require('../../helpers'); +const { ignoreCacheFlag, fileNameFlag } = require('../../utils'); +const { getMeshId, listLogs } = require('../../lib/devConsole'); +const { appendFileSync, existsSync } = require('fs'); +const { ux } = require('@oclif/core/lib/cli-ux'); +const path = require('path'); + +require('dotenv').config(); +class ListLogsCommand extends Command { + static flags = { + ignoreCache: ignoreCacheFlag, + filename: fileNameFlag, + }; + + static enableJsonFlag = true; + + async run() { + await initRequestId(); + + logger.info(`RequestId: ${global.requestId}`); + + const { flags } = await this.parse(ListLogsCommand); + + const { ignoreCache, filename } = await flags; + + if (filename) { + if (path.extname(filename).toLowerCase() !== '.csv') { + this.error('Invalid file type. Provide a filename with a .csv extension.'); + } + const file = path.resolve(process.cwd(), filename); + if (existsSync(file)) { + this.error(`File ${filename} already exists. Provide a new file name.`); + } + } + + const { imsOrgId, imsOrgCode, projectId, workspaceId, workspaceName } = await initSdk({ + ignoreCache, + }); + + let meshId = null; + + try { + meshId = await getMeshId(imsOrgId, projectId, workspaceId, workspaceName); + } catch (err) { + this.error( + `Unable to get mesh ID. Check the details and try again. RequestId: ${global.requestId}`, + ); + } + if (meshId) { + try { + const logs = await listLogs(imsOrgCode, projectId, workspaceId, meshId, filename); + + if (logs && logs.length > 0) { + // add a new line + this.log(); + ux.table( + logs, + { + rayId: { + header: 'Ray ID', + minWidth: 15, + }, + timestamp: { + header: 'Timestamp', + minWidth: 15, + }, + responseStatus: { + header: 'Response Status', + minWidth: 15, + }, + level: { + header: 'Level', + minWidth: 15, + }, + }, + { + printLine: filename + ? line => appendFileSync(filename, line + '\n') + : line => this.log(line), + csv: filename, + ...flags, + }, + ); + } else { + this.log( + 'No recent logs found. Alternatively, you can use the following command to get all logs for a 30 minute time period: \naio api-mesh log-get-bulk --startTime YYYY-MM-DDTHH:MM:SSZ --endTime YYYY-MM-DDTHH:MM:SSZ --filename mesh_logs.csv', + ); + } + } catch (error) { + this.error(`Failed to list recent logs, RequestId: ${global.requestId}`); + } + } else { + this.error( + `Unable to get mesh config. No mesh found for Org(${imsOrgId}) -> Project(${projectId}) -> Workspace(${workspaceId}). Check the details and try again. RequestId: ${global.requestId}`, + ); + } + } +} + +ListLogsCommand.description = 'Get recent logs of requests made to the API Mesh.'; + +module.exports = ListLogsCommand; diff --git a/src/lib/devConsole.js b/src/lib/devConsole.js index 577e265c..76240bef 100644 --- a/src/lib/devConsole.js +++ b/src/lib/devConsole.js @@ -112,6 +112,45 @@ const describeMesh = async (organizationId, projectId, workspaceId, workspaceNam } }; +/** + * List Recent Logs + * + * @param {*} organizationId + * @param {*} projectId + * @param {*} workspaceId + * @param {*} workspaceName + * @param {*} meshId + * @returns + */ +const listLogs = async (organizationCode, projectId, workspaceId, meshId, fileName) => { + const { accessToken, apiKey } = await getDevConsoleConfig(); + const url = `${SMS_BASE_URL}/organizations/${organizationCode}/projects/${projectId}/workspaces/${workspaceId}/meshes/${meshId}/logs/list`; + const config = { + method: 'get', + url: fileName ? url + `?filename=${fileName}` : url, + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'x-request-id': global.requestId, + 'x-api-key': apiKey, + }, + }; + + logger.info('Initiating GET %s', url); + + try { + const response = await axios(config); + + logger.info('Response from GET %s', response.status); + + if (response?.status === 200) { + return response.data; + } + } catch (error) { + logger.error(`Error fetching recent logs: ${error}`); + throw error; + } +}; + const getMesh = async (organizationId, projectId, workspaceId, workspaceName, meshId) => { const { baseUrl: devConsoleUrl, accessToken, apiKey } = await getDevConsoleConfig(); const config = { @@ -1015,6 +1054,103 @@ const getPublicEncryptionKey = async organizationCode => { } }; +const getPresignedUrls = async ( + organizationCode, + projectId, + workspaceId, + meshId, + startTime, + endTime, +) => { + const { accessToken, apiKey } = await getDevConsoleConfig(); + const config = { + method: 'get', + url: `${SMS_BASE_URL}/organizations/${organizationCode}/projects/${projectId}/workspaces/${workspaceId}/meshes/${meshId}/logs?startDateTime=${startTime}&endDateTime=${endTime}`, + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'x-request-id': global.requestId, + 'x-api-key': apiKey, + }, + }; + + logger.info( + 'Initiating GET %s', + `${SMS_BASE_URL}/organizations/${organizationCode}/projects/${projectId}/workspaces/${workspaceId}/meshes/${meshId}/logs?startDateTime=${startTime}&endDateTime=${endTime}`, + ); + + try { + const response = await axios(config); + + logger.info('Response from GET %s', response.status); + + if (response?.status === 200) { + logger.info(`Presigned urls : ${objToString(response, ['data'])}`); + const { presignedUrls, totalSize } = response.data; + return { + presignedUrls, + totalSize, + }; + } + } catch (error) { + logger.error(`Error fetching presigned urls: ${error}`); + return { + urls: {}, + totalsize: 0, + }; + } +}; + +const getLogsByRayId = async (organizationCode, projectId, workspaceId, meshId, rayId) => { + const { accessToken, apiKey } = await getDevConsoleConfig(); + const config = { + method: 'get', + url: `${SMS_BASE_URL}/organizations/${organizationCode}/projects/${projectId}/workspaces/${workspaceId}/meshes/${meshId}/logs/${rayId}`, + headers: { + 'Authorization': `Bearer ${accessToken}`, + 'x-request-id': global.requestId, + 'x-api-key': apiKey, + }, + }; + + logger.info( + 'Initiating GET %s', + `${SMS_BASE_URL}/organizations/${organizationCode}/projects/${projectId}/workspaces/${workspaceId}/meshes/${meshId}/logs/${rayId}`, + ); + + try { + const response = await axios(config); + + logger.info('Response from GET log %s', response.status); + if (response?.status === 200) { + logger.info(`Fetched log: ${objToString(response, ['data'])}`); + return response.data; + } else { + let errorMessage = `Unexpected response status: ${response.status}`; + logger.error(errorMessage); + throw new Error(errorMessage); + } + } catch (error) { + logger.info('Response from GET Logs %s', error.response.status); + if (error.response.status === 404) { + // The request was made and the server responded with a 404 status code + logger.error('Logs not found for the given Ray ID'); + + let errorMessage = `LogNotFound`; + logger.error(`${errorMessage}. Received ${error.response.status}, expected 200`); + throw new Error(errorMessage); + } else if (error.response.status === 500) { + // Handle 500 Internal Server Error + let errorMessage = `ServerError`; + logger.error(errorMessage); + throw new Error(errorMessage); + } else { + let errorMessage = `Something went wrong while getting logs. Received ${error.response.status}`; + logger.error(errorMessage); + throw new Error(errorMessage); + } + } +}; + module.exports = { getApiKeyCredential, describeMesh, @@ -1023,6 +1159,7 @@ module.exports = { updateMesh, deleteMesh, getMeshId, + listLogs, createAPIMeshCredentials, getListOfCurrentServices, subscribeCredentialToServices, @@ -1032,4 +1169,6 @@ module.exports = { getTenantFeatures, getMeshDeployments, getPublicEncryptionKey, + getPresignedUrls, + getLogsByRayId, }; diff --git a/src/utils.js b/src/utils.js index 44eb7687..5dee0f49 100644 --- a/src/utils.js +++ b/src/utils.js @@ -67,6 +67,25 @@ const selectFlag = Flags.boolean({ default: false, }); +const fileNameFlag = Flags.string({ + description: 'Name of CSV file to export the recent logs to', +}); + +const startTimeFlag = Flags.string({ + description: 'Start time for the logs in UTC', + required: true, +}); + +const endTimeFlag = Flags.string({ + description: 'End time for the logs in UTC', + required: true, +}); + +const logFilenameFlag = Flags.string({ + description: 'Path to the output file for logs', + required: true, +}); + /** * Parse the meshConfig and get the list of (local) files to be imported * @@ -534,6 +553,52 @@ function reduceConsecutiveBackslashes(str) { return result; } +/** + * Helper function to suggest a corrected format for the user provided input date + * @param {string} inputDate + */ +function suggestCorrectedDateFormat(inputDate) { + // Remove any non-numeric characters except 'T' and 'Z' + let correctedDate = inputDate.replace(/[^\dTZ]/g, ''); + + // If "T" is missing, insert it between the date and time + if (!/T/.test(correctedDate) && correctedDate.length >= 14) { + correctedDate = correctedDate.slice(0, 8) + 'T' + correctedDate.slice(8); + } + + // Extract date components for validation + const month = parseInt(correctedDate.slice(4, 6), 10); + const day = parseInt(correctedDate.slice(6, 8), 10); + const hour = parseInt(correctedDate.slice(9, 11), 10); + const minute = parseInt(correctedDate.slice(11, 13), 10); + const second = parseInt(correctedDate.slice(13, 15), 10); + + // Check for invalid month, day, hour, minute, second + const isValidDate = + month >= 1 && + month <= 12 && + day >= 1 && + day <= 31 && // Note: Can be further validated by month and year + hour >= 0 && + hour <= 23 && + minute >= 0 && + minute <= 59 && + second >= 0 && + second <= 59; + + if (!isValidDate) { + return null; // Or return an error-specific message for better UX + } + + // Add missing characters to match the correct format + correctedDate = correctedDate.replace( + /(\d{4})(\d{2})(\d{2})T?(\d{2})(\d{2})(\d{2})Z?/, + '$1-$2-$3T$4:$5:$6Z', + ); + + return correctedDate; +} + module.exports = { ignoreCacheFlag, autoConfirmActionFlag, @@ -548,7 +613,12 @@ module.exports = { debugFlag, selectFlag, secretsFlag, + fileNameFlag, interpolateSecrets, validateSecretsFile, encryptSecrets, + startTimeFlag, + endTimeFlag, + logFilenameFlag, + suggestCorrectedDateFormat, };