diff --git a/.borp.int.yaml b/.borp.int.yaml index 74626df765..17789c23d5 100644 --- a/.borp.int.yaml +++ b/.borp.int.yaml @@ -1,7 +1,6 @@ files: - 'test/integration/**/*.tap.js' - # We can't do a simple '**/*.test.js' because the "uninstrumented" suite - # includes a `node_modules` directory that includes several `.test.js` files. - - 'test/integration/*.test.js' - - 'test/integration/cat/*.test.js' - - 'test/integration/config/*.test.js' + - 'test/integration/**/*.test.js' + # We need to ignore any `*.test.js` files within nested `node_modules` + # directories. We only want to target the tests we have written ourselves. + - '!test/integration/**/node_modules/**/*' diff --git a/THIRD_PARTY_NOTICES.md b/THIRD_PARTY_NOTICES.md index c89397551d..904e3d606a 100644 --- a/THIRD_PARTY_NOTICES.md +++ b/THIRD_PARTY_NOTICES.md @@ -510,7 +510,7 @@ This product includes source derived from [@grpc/proto-loader](https://github.co ### @newrelic/security-agent -This product includes source derived from [@newrelic/security-agent](https://github.com/newrelic/csec-node-agent) ([v2.0.0](https://github.com/newrelic/csec-node-agent/tree/v2.0.0)), distributed under the [UNKNOWN License](https://github.com/newrelic/csec-node-agent/blob/v2.0.0/LICENSE): +This product includes source derived from [@newrelic/security-agent](https://github.com/newrelic/csec-node-agent) ([v2.1.1](https://github.com/newrelic/csec-node-agent/tree/v2.1.1)), distributed under the [UNKNOWN License](https://github.com/newrelic/csec-node-agent/blob/v2.1.1/LICENSE): ``` ## New Relic Software License v1.0 @@ -1042,7 +1042,7 @@ IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ### winston-transport -This product includes source derived from [winston-transport](https://github.com/winstonjs/winston-transport) ([v4.8.0](https://github.com/winstonjs/winston-transport/tree/v4.8.0)), distributed under the [MIT License](https://github.com/winstonjs/winston-transport/blob/v4.8.0/LICENSE): +This product includes source derived from [winston-transport](https://github.com/winstonjs/winston-transport) ([v4.9.0](https://github.com/winstonjs/winston-transport/tree/v4.9.0)), distributed under the [MIT License](https://github.com/winstonjs/winston-transport/blob/v4.9.0/LICENSE): ``` The MIT License (MIT) @@ -1075,7 +1075,7 @@ SOFTWARE. ### @aws-sdk/client-s3 -This product includes source derived from [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3) ([v3.676.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.676.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.676.0/LICENSE): +This product includes source derived from [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3) ([v3.698.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.698.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.698.0/LICENSE): ``` Apache License @@ -1284,7 +1284,7 @@ This product includes source derived from [@aws-sdk/client-s3](https://github.co ### @aws-sdk/s3-request-presigner -This product includes source derived from [@aws-sdk/s3-request-presigner](https://github.com/aws/aws-sdk-js-v3) ([v3.676.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.676.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.676.0/LICENSE): +This product includes source derived from [@aws-sdk/s3-request-presigner](https://github.com/aws/aws-sdk-js-v3) ([v3.698.0](https://github.com/aws/aws-sdk-js-v3/tree/v3.698.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js-v3/blob/v3.698.0/LICENSE): ``` Apache License @@ -2711,7 +2711,7 @@ THE SOFTWARE. ### aws-sdk -This product includes source derived from [aws-sdk](https://github.com/aws/aws-sdk-js) ([v2.1691.0](https://github.com/aws/aws-sdk-js/tree/v2.1691.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js/blob/v2.1691.0/LICENSE.txt): +This product includes source derived from [aws-sdk](https://github.com/aws/aws-sdk-js) ([v2.1692.0](https://github.com/aws/aws-sdk-js/tree/v2.1692.0)), distributed under the [Apache-2.0 License](https://github.com/aws/aws-sdk-js/blob/v2.1692.0/LICENSE.txt): ``` @@ -2921,7 +2921,7 @@ This product includes source derived from [aws-sdk](https://github.com/aws/aws-s ### borp -This product includes source derived from [borp](https://github.com/mcollina/borp) ([v0.18.0](https://github.com/mcollina/borp/tree/v0.18.0)), distributed under the [MIT License](https://github.com/mcollina/borp/blob/v0.18.0/LICENSE): +This product includes source derived from [borp](https://github.com/mcollina/borp) ([v0.19.0](https://github.com/mcollina/borp/tree/v0.19.0)), distributed under the [MIT License](https://github.com/mcollina/borp/blob/v0.19.0/LICENSE): ``` MIT License diff --git a/package.json b/package.json index 157a13e730..895104f8f4 100644 --- a/package.json +++ b/package.json @@ -229,7 +229,7 @@ "ajv": "^6.12.6", "async": "^3.2.4", "aws-sdk": "^2.1604.0", - "borp": "^0.18.0", + "borp": "^0.19.0", "c8": "^8.0.1", "clean-jsdoc-theme": "^4.2.18", "commander": "^7.0.0", diff --git a/test/integration/agent/multiple.tap.js b/test/integration/agent/multiple.test.js similarity index 68% rename from test/integration/agent/multiple.tap.js rename to test/integration/agent/multiple.test.js index 42a2521206..39374fa2d4 100644 --- a/test/integration/agent/multiple.tap.js +++ b/test/integration/agent/multiple.test.js @@ -5,10 +5,12 @@ 'use strict' -const test = require('tap').test +const test = require('node:test') +const assert = require('node:assert') + const metricPrefix = require('../../../lib/metrics/names').SUPPORTABILITY.PREFIX -test('Multiple require("newrelic")', function (t) { +test('Multiple require("newrelic")', () => { process.env.NEW_RELIC_ENABLED = true process.env.NEW_RELIC_APP_NAME = 'agent test' @@ -19,8 +21,7 @@ test('Multiple require("newrelic")', function (t) { const second = require(path) - t.equal(first, second) + assert.equal(first, second) const doubleLoadMetric = second.agent.metrics.getOrCreateMetric(`${metricPrefix}Agent/DoubleLoad`) - t.equal(doubleLoadMetric.callCount, 1, 'should have tried to double-load the agent once') - t.end() + assert.equal(doubleLoadMetric.callCount, 1, 'should have tried to double-load the agent once') }) diff --git a/test/integration/agent/serverless-harvest.tap.js b/test/integration/agent/serverless-harvest.tap.js deleted file mode 100644 index 0e1767f219..0000000000 --- a/test/integration/agent/serverless-harvest.tap.js +++ /dev/null @@ -1,411 +0,0 @@ -/* - * Copyright 2020 New Relic Corporation. All rights reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -'use strict' -const fs = require('fs') - -const helper = require('../../lib/agent_helper') -const tap = require('tap') -const sinon = require('sinon') -const API = require('../../../api') -const util = require('util') - -const DESTS = require('../../../lib/config/attribute-filter').DESTINATIONS -const TEST_ARN = 'test:arn' -const TEST_FUNC_VERSION = '$LATEST' -const TEST_EX_ENV = 'test-AWS_Lambda_nodejs8.10' -const PROTOCOL_VERSION = 16 - -tap.test('Serverless mode harvest', (t) => { - t.autoend() - - let agent = null - let logSpy = null - - process.env.AWS_EXECUTION_ENV = TEST_EX_ENV - - t.beforeEach(async () => { - logSpy = sinon.spy(fs, 'writeSync') - agent = helper.instrumentMockedAgent({ - serverless_mode: { - enabled: true - }, - app_name: 'serverless mode tests', - license_key: '' // serverless mode doesn't require license key - }) - agent.setLambdaArn(TEST_ARN) - agent.setLambdaFunctionVersion(TEST_FUNC_VERSION) - - const agentStart = util.promisify(agent.start).bind(agent) - await agentStart() - }) - - t.afterEach(async () => { - logSpy && logSpy.restore() - helper.unloadAgent(agent) - - const agentStop = util.promisify(agent.stop).bind(agent) - await agentStop() - }) - - t.test('simple harvest', (t) => { - t.plan(5) - let transaction - const proxy = agent.tracer.transactionProxy(() => { - transaction = agent.getTransaction() - transaction.finalizeNameFromUri('/nonexistent', 501) - }) - proxy() - - // ensure it's slow enough to get traced - transaction.trace.setDurationInMillis(5001) - transaction.end() - agent.once('harvestFinished', () => { - const payload = JSON.parse(logSpy.args[0][1]) - - t.equal(payload[0], 1, 'payload has expected version') - t.equal(payload[1], 'NR_LAMBDA_MONITORING', 'payload has expected marker') - - helper.decodeServerlessPayload(t, payload[2], (err, decoded) => { - if (err) { - return t.fail(err, 'decompression failed') - } - - t.ok(decoded.metadata, 'decoded payload has metadata object') - t.same( - decoded.metadata, - { - arn: TEST_ARN, - function_version: TEST_FUNC_VERSION, - execution_environment: TEST_EX_ENV, - protocol_version: PROTOCOL_VERSION, - agent_version: agent.version, - agent_language: 'nodejs' - }, - 'metadata object has expected data' - ) - t.ok(decoded.data, 'decoded payload has data object') - t.end() - }) - }) - agent.harvestSync() - }) - - t.test('sending metrics', (t) => { - t.plan(5) - agent.metrics.measureMilliseconds('TEST/discard', null, 101) - - const metrics = agent.metrics._metrics.toJSON() - t.ok(findMetric(metrics, 'TEST/discard'), 'the test metric should be present') - - let error - try { - agent.harvestSync() - } catch (err) { - error = err - } - t.error(error, 'should send metrics without error') - - checkCompressedPayload( - t, - findPayload(logSpy.args[0])[2], - 'metric_data', - function checkData(payload) { - t.ok(payload, 'should have a payload') - t.same(payload[3][0][0], { name: 'TEST/discard' }, 'should have test metric') - t.end() - } - ) - }) - - t.test('sending error traces', (t) => { - t.plan(4) - - helper.runInTransaction(agent, (tx) => { - tx.finalizeNameFromUri('/nonexistent', 501) - tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'foo', 'bar') - tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'request.uri', '/nonexistent') - agent.errors.add(tx, new Error('test error')) - const spanId = agent.tracer.getSegment().id - - tx.end() - agent.once('harvestFinished', () => { - checkCompressedPayload( - t, - findPayload(logSpy.args[0])[2], - 'error_data', - function checkData(payload) { - t.ok(payload, 'should have a payload') - const errData = payload[1][0][4] - t.ok(errData, 'should contain error information') - const attrs = errData.agentAttributes - t.same( - attrs, - { 'foo': 'bar', 'request.uri': '/nonexistent', spanId }, - 'should have the correct attributes' - ) - t.end() - } - ) - }) - agent.harvestSync() - }) - }) - - t.test('sending traces', (t) => { - t.plan(4) - - let transaction - const proxy = agent.tracer.transactionProxy(() => { - transaction = agent.getTransaction() - transaction.finalizeNameFromUri('/nonexistent', 200) - }) - proxy() - - // ensure it's slow enough to get traced - transaction.trace.setDurationInMillis(5001) - transaction.end() - agent.once('harvestFinished', () => { - checkCompressedPayload( - t, - findPayload(logSpy.args[0])[2], - 'transaction_sample_data', - function checkData(payload) { - t.ok(payload, 'should have trace payload') - t.type(payload[1][0], 'Array', 'should have trace') - t.type(payload[1][0][4], 'string', 'should have encoded trace') - t.end() - } - ) - }) - agent.harvestSync() - }) - - t.test('serverless_mode harvest should disregard sampling limits', (t) => { - t.plan(4) - - agent.config.transaction_events.max_samples_stored = 0 - - let transaction - const proxy = agent.tracer.transactionProxy(() => { - transaction = agent.getTransaction() - transaction.finalizeNameFromUri('/nonexistent', 200) - }) - proxy() - - // ensure it's slow enough to get traced - transaction.trace.setDurationInMillis(5001) - transaction.end() - agent.once('harvestFinished', () => { - checkCompressedPayload( - t, - findPayload(logSpy.args[0])[2], - 'transaction_sample_data', - function checkData(payload) { - t.ok(payload, 'should have trace payload') - t.type(payload[1][0], 'Array', 'should have trace') - t.type(payload[1][0][4], 'string', 'should have encoded trace') - t.end() - } - ) - }) - agent.harvestSync() - }) - - t.test('sending span events', (t) => { - t.plan(4) - - agent.config.distributed_tracing.enabled = true - agent.config.span_events.enabled = true - - helper.runInTransaction(agent, (tx) => { - setTimeout(() => { - // Just to create an extra span. - tx.finalizeNameFromUri('/some/path', 200) - tx.end() - agent.once('harvestFinished', end) - agent.harvestSync() - }, 100) - }) - - function end() { - checkCompressedPayload( - t, - findPayload(logSpy.args[0])[2], - 'span_event_data', - function checkData(payload) { - t.ok(payload, 'should have trace payload') - t.type(payload[2], 'Array', 'should have spans') - t.equal(payload[2].length, 2, 'should have all spans') - t.end() - } - ) - } - }) - - t.test('sending error events', (t) => { - helper.runInTransaction(agent, (tx) => { - tx.finalizeNameFromUri('/nonexistent', 501) - tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'foo', 'bar') - tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'request.uri', '/nonexistent') - agent.errors.add(tx, new Error('test error')) - const spanId = agent.tracer.getSegment().id - - tx.end() - agent.once('harvestFinished', () => { - const rawPayload = findPayload(logSpy.args[0]) - const encodedData = rawPayload[2] - - checkCompressedPayload(t, encodedData, 'error_event_data', function checkData(payload) { - t.ok(payload, 'should have a payload') - - const [runId, eventMetrics, eventData] = payload - - // runid should be null/undefined - t.notOk(runId) - - t.equal(eventMetrics.events_seen, 1) - - const expectedSize = agent.config.error_collector.max_event_samples_stored - t.equal(eventMetrics.reservoir_size, expectedSize) - - const errorEvent = eventData[0] - const [intrinsicAttr /* skip user */, , agentAttr] = errorEvent - - t.equal(intrinsicAttr.type, 'TransactionError') - - t.same( - agentAttr, - { 'foo': 'bar', 'request.uri': '/nonexistent', spanId }, - 'should have the correct attributes' - ) - t.end() - }) - }) - agent.harvestSync() - }) - }) - - t.test('sending custom events', (t) => { - helper.runInTransaction(agent, (tx) => { - tx.finalizeNameFromUri('/nonexistent', 501) - - const expectedEventType = 'myEvent' - const expectedAttributes = { foo: 'bar' } - - const api = new API(agent) - api.recordCustomEvent(expectedEventType, expectedAttributes) - - tx.end() - agent.once('harvestFinished', () => { - const rawPayload = findPayload(logSpy.args[0]) - const encodedData = rawPayload[2] - - checkCompressedPayload(t, encodedData, 'custom_event_data', function checkData(payload) { - t.ok(payload, 'should have a payload') - - const [runId, eventData] = payload - - // runid should be null/undefined - t.notOk(runId) - - const customEvent = eventData[0] - const [intrinsicAttr, userAttr] = customEvent - - t.equal(intrinsicAttr.type, expectedEventType) - - t.same(userAttr, expectedAttributes, 'should have the correct attributes') - t.end() - }) - }) - agent.harvestSync() - }) - }) - - t.test('sending sql traces', (t) => { - helper.runInTransaction(agent, (tx) => { - const expectedUrl = '/nonexistent' - - tx.finalizeNameFromUri(expectedUrl, 501) - - agent.config.transaction_tracer.record_sql = 'raw' - agent.config.transaction_tracer.explain_threshold = 0 - agent.config.slow_sql.enabled = true - - const expectedSql = 'select pg_sleep(1)' - - agent.queries.add(tx.trace.root, 'postgres', expectedSql, 'FAKE STACK') - - tx.end() - agent.once('harvestFinished', () => { - const rawPayload = findPayload(logSpy.args[0]) - const encodedData = rawPayload[2] - - checkCompressedPayload(t, encodedData, 'sql_trace_data', function checkData(payload) { - t.ok(payload, 'should have a payload') - - const [runId, samples] = payload - - // runid should be null/undefined - t.notOk(runId) - - const sample = samples[0] - - const transactionUrl = sample[1] - const sql = sample[3] - const count = sample[5] - const encodedParams = sample[9] - - t.equal(transactionUrl, expectedUrl) - t.equal(sql, expectedSql) - t.equal(count, 1) - - // won't have anything interesting added this way - t.ok(encodedParams) - - t.end() - }) - }) - agent.harvestSync() - }) - }) -}) - -function findMetric(metrics, name) { - for (let i = 0; i < metrics.length; i++) { - const metric = metrics[i] - if (metric[0].name === name) { - return metric - } - } -} - -function checkCompressedPayload(t, payload, prop, cb) { - helper.decodeServerlessPayload(t, payload, (err, decoded) => { - if (err) { - return t.error(err) - } - - const data = decoded.data[prop] - t.ok(data, `compressed payload includes ${prop} prop`) - - for (const key in decoded.data) { - if (!decoded.data[key].length) { - t.fail(`payload data.${key} property is empty`) - } - } - - cb(decoded.data[prop]) - }) -} - -function findPayload(args) { - for (let i = 0; i < args.length; ++i) { - const arg = args[i] - if (typeof arg === 'string') { - return JSON.parse(arg) - } - } -} diff --git a/test/integration/agent/serverless-harvest.test.js b/test/integration/agent/serverless-harvest.test.js new file mode 100644 index 0000000000..9b025ccfdf --- /dev/null +++ b/test/integration/agent/serverless-harvest.test.js @@ -0,0 +1,435 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +const test = require('node:test') +const util = require('node:util') +const fs = require('node:fs') +const tspl = require('@matteo.collina/tspl') + +const helper = require('../../lib/agent_helper') +const API = require('../../../api') + +const DESTS = require('../../../lib/config/attribute-filter').DESTINATIONS +const TEST_ARN = 'test:arn' +const TEST_FUNC_VERSION = '$LATEST' +const TEST_EX_ENV = 'test-AWS_Lambda_nodejs8.10' +const PROTOCOL_VERSION = 16 + +test.beforeEach(async (ctx) => { + process.env.AWS_EXECUTION_ENV = TEST_EX_ENV + + ctx.nr = {} + + // The ServerlessCollector ultimately performs a `fs.writeSyc` to the + // `stdout.fd` in order to issue logs for CloudWatch to pick up. We want + // to verify those logs in these tests, and also suppress their output + // during the tests. So we monkey patch the function. + ctx.nr.fsWriteSync = fs.writeSync + ctx.nr.writeLogs = [] + fs.writeSync = (target, data) => { + ctx.nr.writeLogs.push(data) + } + + ctx.nr.agent = helper.instrumentMockedAgent({ + serverless_mode: { enabled: true }, + app_name: 'serverless mode tests', + license_key: '' // serverless mode doesn't require license key + }) + ctx.nr.agent.setLambdaArn(TEST_ARN) + ctx.nr.agent.setLambdaFunctionVersion(TEST_FUNC_VERSION) + + const agentStart = util.promisify(ctx.nr.agent.start).bind(ctx.nr.agent) + await agentStart() +}) + +test.afterEach(async (ctx) => { + delete process.env.AWS_EXECUTION_ENV + + fs.writeSync = ctx.nr.fsWriteSync + helper.unloadAgent(ctx.nr.agent) + + const agentStop = util.promisify(ctx.nr.agent.stop).bind(ctx.nr.agent) + await agentStop() +}) + +test('simple harvest', async (t) => { + const { agent } = t.nr + const plan = tspl(t, { plan: 6 }) + + let transaction + const proxy = agent.tracer.transactionProxy(() => { + transaction = agent.getTransaction() + transaction.finalizeNameFromUri('/nonexistent', 501) + }) + proxy() + + // Ensure it's slow enough to get traced. + transaction.trace.setDurationInMillis(5_001) + transaction.end() + agent.once('harvestFinished', () => { + const payload = JSON.parse(t.nr.writeLogs.shift()) + + plan.equal(payload[0], 1, 'payload has expected version') + plan.equal(payload[1], 'NR_LAMBDA_MONITORING', 'payload has expected marker') + + helper.decodeServerlessPayload(payload[2], (error, decoded) => { + plan.ifError(error, 'decompression should not fail') + plan.ok(decoded.metadata, 'decoded payload has metadata object') + plan.deepStrictEqual( + decoded.metadata, + { + arn: TEST_ARN, + function_version: TEST_FUNC_VERSION, + execution_environment: TEST_EX_ENV, + protocol_version: PROTOCOL_VERSION, + agent_version: agent.version, + agent_language: 'nodejs' + }, + 'metadata object has expected data' + ) + plan.ok(decoded.data, 'decoded payload has data object') + }) + }) + agent.harvestSync() + + await plan.completed +}) + +test('sending metrics', async (t) => { + const plan = tspl(t, { plan: 6 }) + const { agent } = t.nr + agent.metrics.measureMilliseconds('TEST/discard', null, 101) + + const metrics = agent.metrics._metrics.toJSON() + plan.ok(findMetric(metrics, 'TEST/discard'), 'the test metric should be present') + + let error + try { + agent.harvestSync() + } catch (err) { + error = err + } + plan.ifError(error, 'should send metrics without error') + + checkCompressedPayload( + plan, + findPayload(t.nr.writeLogs)[2], + 'metric_data', + function checkData(payload) { + plan.ok(payload, 'should have a payload') + plan.deepStrictEqual(payload[3][0][0], { name: 'TEST/discard' }, 'should have test metric') + } + ) + + await plan.completed +}) + +test('sending error traces', async (t) => { + const plan = tspl(t, { plan: 5 }) + const { agent } = t.nr + + helper.runInTransaction(agent, (tx) => { + tx.finalizeNameFromUri('/nonexistent', 501) + tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'foo', 'bar') + tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'request.uri', '/nonexistent') + agent.errors.add(tx, new Error('test error')) + const spanId = agent.tracer.getSegment().id + + tx.end() + agent.once('harvestFinished', () => { + checkCompressedPayload( + plan, + findPayload(t.nr.writeLogs)[2], + 'error_data', + function checkData(payload) { + plan.ok(payload, 'should have a payload') + const errData = payload[1][0][4] + plan.ok(errData, 'should contain error information') + const attrs = errData.agentAttributes + plan.deepStrictEqual( + attrs, + { 'foo': 'bar', 'request.uri': '/nonexistent', spanId }, + 'should have the correct attributes' + ) + } + ) + }) + agent.harvestSync() + }) + + await plan.completed +}) + +test('sending traces', async (t) => { + const plan = tspl(t, { plan: 5 }) + const { agent } = t.nr + + let transaction + const proxy = agent.tracer.transactionProxy(() => { + transaction = agent.getTransaction() + transaction.finalizeNameFromUri('/nonexistent', 200) + }) + proxy() + + // ensure it's slow enough to get traced + transaction.trace.setDurationInMillis(5001) + transaction.end() + agent.once('harvestFinished', () => { + checkCompressedPayload( + plan, + findPayload(t.nr.writeLogs)[2], + 'transaction_sample_data', + function checkData(payload) { + plan.ok(payload, 'should have trace payload') + plan.equal(Array.isArray(payload[1][0]), true, 'should have trace') + plan.equal(typeof payload[1][0][4] === 'string', true, 'should have encoded trace') + } + ) + }) + agent.harvestSync() + + await plan.completed +}) + +test('serverless_mode harvest should disregard sampling limits', async (t) => { + const plan = tspl(t, { plan: 5 }) + const { agent } = t.nr + + agent.config.transaction_events.max_samples_stored = 0 + + let transaction + const proxy = agent.tracer.transactionProxy(() => { + transaction = agent.getTransaction() + transaction.finalizeNameFromUri('/nonexistent', 200) + }) + proxy() + + // ensure it's slow enough to get traced + transaction.trace.setDurationInMillis(5001) + transaction.end() + agent.once('harvestFinished', () => { + checkCompressedPayload( + plan, + findPayload(t.nr.writeLogs)[2], + 'transaction_sample_data', + function checkData(payload) { + plan.ok(payload, 'should have trace payload') + plan.equal(Array.isArray(payload[1][0]), true, 'should have trace') + plan.equal(typeof payload[1][0][4] === 'string', true, 'should have encoded trace') + } + ) + }) + agent.harvestSync() + + await plan.completed +}) + +test('sending span events', async (t) => { + const plan = tspl(t, { plan: 5 }) + const { agent } = t.nr + + agent.config.distributed_tracing.enabled = true + agent.config.span_events.enabled = true + + helper.runInTransaction(agent, (tx) => { + setTimeout(() => { + // Just to create an extra span. + tx.finalizeNameFromUri('/some/path', 200) + tx.end() + agent.once('harvestFinished', end) + agent.harvestSync() + }, 100) + }) + + await plan.completed + + function end() { + checkCompressedPayload( + plan, + findPayload(t.nr.writeLogs)[2], + 'span_event_data', + function checkData(payload) { + plan.ok(payload, 'should have trace payload') + plan.equal(Array.isArray(payload[2]), true, 'should have spans') + plan.equal(payload[2].length, 2, 'should have all spans') + } + ) + } +}) + +test('sending error events', async (t) => { + const plan = tspl(t, { plan: 8 }) + const { agent } = t.nr + + helper.runInTransaction(agent, (tx) => { + tx.finalizeNameFromUri('/nonexistent', 501) + tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'foo', 'bar') + tx.trace.attributes.addAttribute(DESTS.ERROR_EVENT, 'request.uri', '/nonexistent') + agent.errors.add(tx, new Error('test error')) + const spanId = agent.tracer.getSegment().id + + tx.end() + agent.once('harvestFinished', () => { + const rawPayload = findPayload(t.nr.writeLogs) + const encodedData = rawPayload[2] + + checkCompressedPayload(plan, encodedData, 'error_event_data', function checkData(payload) { + plan.ok(payload, 'should have a payload') + + const [runId, eventMetrics, eventData] = payload + + // runid should be null/undefined + plan.equal(runId, undefined) + + plan.equal(eventMetrics.events_seen, 1) + + const expectedSize = agent.config.error_collector.max_event_samples_stored + plan.equal(eventMetrics.reservoir_size, expectedSize) + + const errorEvent = eventData[0] + const [intrinsicAttr /* skip user */, , agentAttr] = errorEvent + + plan.equal(intrinsicAttr.type, 'TransactionError') + + plan.deepStrictEqual( + agentAttr, + { 'foo': 'bar', 'request.uri': '/nonexistent', spanId }, + 'should have the correct attributes' + ) + }) + }) + agent.harvestSync() + }) + + await plan.completed +}) + +test('sending custom events', async (t) => { + const plan = tspl(t, { plan: 6 }) + const { agent } = t.nr + + helper.runInTransaction(agent, (tx) => { + tx.finalizeNameFromUri('/nonexistent', 501) + + const expectedEventType = 'myEvent' + const expectedAttributes = { foo: 'bar' } + + const api = new API(agent) + api.recordCustomEvent(expectedEventType, expectedAttributes) + + tx.end() + agent.once('harvestFinished', () => { + const rawPayload = findPayload(t.nr.writeLogs) + const encodedData = rawPayload[2] + + checkCompressedPayload(plan, encodedData, 'custom_event_data', function checkData(payload) { + plan.ok(payload, 'should have a payload') + + const [runId, eventData] = payload + + // runid should be null/undefined + plan.equal(runId, undefined) + + const customEvent = eventData[0] + const [intrinsicAttr, userAttr] = customEvent + + plan.equal(intrinsicAttr.type, expectedEventType) + + plan.deepStrictEqual(userAttr, expectedAttributes, 'should have the correct attributes') + }) + }) + agent.harvestSync() + }) + + await plan.completed +}) + +test('sending sql traces', async (t) => { + const plan = tspl(t, { plan: 8 }) + const { agent } = t.nr + + helper.runInTransaction(agent, (tx) => { + const expectedUrl = '/nonexistent' + + tx.finalizeNameFromUri(expectedUrl, 501) + + agent.config.transaction_tracer.record_sql = 'raw' + agent.config.transaction_tracer.explain_threshold = 0 + agent.config.slow_sql.enabled = true + + const expectedSql = 'select pg_sleep(1)' + + agent.queries.add(tx.trace.root, 'postgres', expectedSql, 'FAKE STACK') + + tx.end() + agent.once('harvestFinished', () => { + const rawPayload = findPayload(t.nr.writeLogs) + const encodedData = rawPayload[2] + + checkCompressedPayload(plan, encodedData, 'sql_trace_data', function checkData(payload) { + plan.ok(payload, 'should have a payload') + + const [runId, samples] = payload + + // runid should be null/undefined + plan.equal(runId, undefined) + + const sample = samples[0] + + const transactionUrl = sample[1] + const sql = sample[3] + const count = sample[5] + const encodedParams = sample[9] + + plan.equal(transactionUrl, expectedUrl) + plan.equal(sql, expectedSql) + plan.equal(count, 1) + + // won't have anything interesting added this way + plan.ok(encodedParams) + }) + }) + agent.harvestSync() + }) + + await plan.completed +}) + +function findMetric(metrics, name) { + for (let i = 0; i < metrics.length; i++) { + const metric = metrics[i] + if (metric[0].name === name) { + return metric + } + } +} + +function checkCompressedPayload(plan, payload, prop, cb) { + helper.decodeServerlessPayload(payload, (err, decoded) => { + plan.ifError(err) + + const data = decoded.data[prop] + plan.ok(data, `compressed payload includes ${prop} prop`) + + for (const key in decoded.data) { + if (!decoded.data[key].length) { + plan.fail(`payload data.${key} property is empty`) + } + } + + cb(decoded.data[prop]) + }) +} + +function findPayload(args) { + for (let i = 0; i < args.length; ++i) { + const arg = args[i] + if (typeof arg === 'string') { + return JSON.parse(arg) + } + } +} diff --git a/test/integration/agent/start-stop.tap.js b/test/integration/agent/start-stop.tap.js deleted file mode 100644 index 3a2fe671e3..0000000000 --- a/test/integration/agent/start-stop.tap.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 New Relic Corporation. All rights reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -'use strict' - -const tap = require('tap') -const configurator = require('../../../lib/config') -const Agent = require('../../../lib/agent') - -tap.test('Agent should not connect to collector in serverless mode', (t) => { - const config = configurator.initialize({ - app_name: 'node.js Tests', - serverless_mode: { - enabled: true - }, - logging: { - level: 'trace' - } - }) - const agent = new Agent(config) - - // Immediately fail if connect is called - agent.collector.connect = () => t.fail('Agent should not attempt to connect') - - agent.start((error, returned) => { - t.notOk(error, 'started without error') - t.ok(returned, 'got boot configuration') - t.notOk(returned.agent_run_id, 'should not have a run ID') - t.notOk(agent.config.run_id, 'should not have run ID set in configuration') - - agent.stop((error) => { - t.notOk(error, 'should have shut down without issue') - - t.end() - }) - }) -}) diff --git a/test/integration/agent/start-stop.test.js b/test/integration/agent/start-stop.test.js new file mode 100644 index 0000000000..552d8250f8 --- /dev/null +++ b/test/integration/agent/start-stop.test.js @@ -0,0 +1,40 @@ +/* + * Copyright 2020 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +const test = require('node:test') +const assert = require('node:assert') + +const configurator = require('../../../lib/config') +const Agent = require('../../../lib/agent') + +test('Agent should not connect to collector in serverless mode', (t, end) => { + const config = configurator.initialize({ + app_name: 'node.js Tests', + serverless_mode: { + enabled: true + }, + logging: { + level: 'trace' + } + }) + const agent = new Agent(config) + + // Immediately fail if connect is called + agent.collector.connect = () => assert.fail('Agent should not attempt to connect') + + agent.start((error, returned) => { + assert.equal(error, undefined, 'started without error') + assert.ok(returned, 'got boot configuration') + assert.equal(returned.agent_run_id, undefined, 'should not have a run ID') + assert.equal(agent.config.run_id, undefined, 'should not have run ID set in configuration') + + agent.stop((error) => { + assert.equal(error, undefined, 'should have shut down without issue') + end() + }) + }) +}) diff --git a/test/integration/core/exceptions.tap.js b/test/integration/core/exceptions.tap.js index d61a504168..d11c8c6e5c 100644 --- a/test/integration/core/exceptions.tap.js +++ b/test/integration/core/exceptions.tap.js @@ -96,7 +96,7 @@ tap.test('Triggers harvest while in serverless mode', (t) => { const parsed = JSON.parse(lambdaPayload) - helper.decodeServerlessPayload(t, parsed[2], function testDecoded(err, decoded) { + helper.decodeServerlessPayload(parsed[2], function testDecoded(err, decoded) { t.error(err, 'should not run into errors decoding serverless payload') t.ok(decoded.metadata, 'metadata should be present') t.ok(decoded.data, 'data should be present') diff --git a/test/lib/agent_helper.js b/test/lib/agent_helper.js index 3183b81118..a72ea1a049 100644 --- a/test/lib/agent_helper.js +++ b/test/lib/agent_helper.js @@ -523,15 +523,13 @@ helper.runOutOfContext = function runOutOfContext(fn) { tasks.push(fn) } -helper.decodeServerlessPayload = (t, payload, cb) => { +helper.decodeServerlessPayload = (payload, cb) => { if (!payload) { - t.comment('No payload to decode') return cb() } zlib.gunzip(Buffer.from(payload, 'base64'), (err, decompressed) => { if (err) { - t.comment('Error occurred when decompressing payload') return cb(err) } diff --git a/test/lib/test-reporter.mjs b/test/lib/test-reporter.mjs index 61fbd4de57..63b488a899 100644 --- a/test/lib/test-reporter.mjs +++ b/test/lib/test-reporter.mjs @@ -37,13 +37,11 @@ class Tracker extends Map { #passed = 0 #failed = 0 - isTracked(file) { - return this.has(file) - } - enqueue(file, event) { if (this.has(file) === false) { this.set(file, { + started: process.hrtime.bigint(), + finished: -1, queued: new Set(), passed: 0, failed: 0, @@ -57,7 +55,11 @@ class Tracker extends Map { } dequeue(file, event) { - this.get(file).queued.delete(event.data.line) + const tracked = this.get(file) + tracked.queued.delete(event.data.line) + if (tracked.queued.size === 0) { + tracked.finished = process.hrtime.bigint() + } } fail(file) { @@ -95,6 +97,15 @@ class Tracker extends Map { } } +function calcTime(start, end) { + const delta = Number(end - start) / 1_000_000 + return delta.toLocaleString('en-US', { + style: 'unit', + unit: 'millisecond', + maximumSignificantDigits: 4 + }) +} + async function* reporter(source) { const tracker = new Tracker() @@ -145,12 +156,13 @@ async function* reporter(source) { // final passing test, but the suite overall has failed. So we need // report the failure here. At least until we get to Node.js 20 where // there is a finalized `test:complete` event. + const time = calcTime(tracked.started, tracked.finished) if (tracked.failed > 0) { - yield `${colorize('fail', 'failed')}: ${file}\n` + yield `${colorize('fail', 'failed')}: ${file} (${time})\n` break } - yield `${colorize('pass', 'passed')}: ${file}\n` + yield `${colorize('pass', 'passed')}: ${file} (${time})\n` break } @@ -169,7 +181,8 @@ async function* reporter(source) { tracked.reported = true - yield `${colorize('fail', 'failed')}: ${file}\n` + const time = calcTime(tracked.started, tracked.finished) + yield `${colorize('fail', 'failed')}: ${file} (${time})\n` break } diff --git a/third_party_manifest.json b/third_party_manifest.json index e2aca1cf88..bbe5646f50 100644 --- a/third_party_manifest.json +++ b/third_party_manifest.json @@ -1,5 +1,5 @@ { - "lastUpdated": "Thu Oct 24 2024 13:42:10 GMT-0400 (Eastern Daylight Time)", + "lastUpdated": "Fri Nov 22 2024 09:19:29 GMT-0500 (Eastern Standard Time)", "projectName": "New Relic Node Agent", "projectUrl": "https://github.com/newrelic/node-newrelic", "includeOptDeps": true, @@ -68,15 +68,15 @@ "licenseTextSource": "file", "publisher": "Google Inc." }, - "@newrelic/security-agent@2.0.0": { + "@newrelic/security-agent@2.1.1": { "name": "@newrelic/security-agent", - "version": "2.0.0", + "version": "2.1.1", "range": "^2.0.0", "licenses": "UNKNOWN", "repoUrl": "https://github.com/newrelic/csec-node-agent", - "versionedRepoUrl": "https://github.com/newrelic/csec-node-agent/tree/v2.0.0", + "versionedRepoUrl": "https://github.com/newrelic/csec-node-agent/tree/v2.1.1", "licenseFile": "node_modules/@newrelic/security-agent/LICENSE", - "licenseUrl": "https://github.com/newrelic/csec-node-agent/blob/v2.0.0/LICENSE", + "licenseUrl": "https://github.com/newrelic/csec-node-agent/blob/v2.1.1/LICENSE", "licenseTextSource": "file", "publisher": "newrelic" }, @@ -211,43 +211,43 @@ "licenseTextSource": "file", "publisher": "GitHub Inc." }, - "winston-transport@4.8.0": { + "winston-transport@4.9.0": { "name": "winston-transport", - "version": "4.8.0", + "version": "4.9.0", "range": "^4.5.0", "licenses": "MIT", "repoUrl": "https://github.com/winstonjs/winston-transport", - "versionedRepoUrl": "https://github.com/winstonjs/winston-transport/tree/v4.8.0", + "versionedRepoUrl": "https://github.com/winstonjs/winston-transport/tree/v4.9.0", "licenseFile": "node_modules/winston-transport/LICENSE", - "licenseUrl": "https://github.com/winstonjs/winston-transport/blob/v4.8.0/LICENSE", + "licenseUrl": "https://github.com/winstonjs/winston-transport/blob/v4.9.0/LICENSE", "licenseTextSource": "file", "publisher": "Charlie Robbins", "email": "charlie.robbins@gmail.com" } }, "devDependencies": { - "@aws-sdk/client-s3@3.676.0": { + "@aws-sdk/client-s3@3.698.0": { "name": "@aws-sdk/client-s3", - "version": "3.676.0", + "version": "3.698.0", "range": "^3.556.0", "licenses": "Apache-2.0", "repoUrl": "https://github.com/aws/aws-sdk-js-v3", - "versionedRepoUrl": "https://github.com/aws/aws-sdk-js-v3/tree/v3.676.0", + "versionedRepoUrl": "https://github.com/aws/aws-sdk-js-v3/tree/v3.698.0", "licenseFile": "node_modules/@aws-sdk/client-s3/LICENSE", - "licenseUrl": "https://github.com/aws/aws-sdk-js-v3/blob/v3.676.0/LICENSE", + "licenseUrl": "https://github.com/aws/aws-sdk-js-v3/blob/v3.698.0/LICENSE", "licenseTextSource": "file", "publisher": "AWS SDK for JavaScript Team", "url": "https://aws.amazon.com/javascript/" }, - "@aws-sdk/s3-request-presigner@3.676.0": { + "@aws-sdk/s3-request-presigner@3.698.0": { "name": "@aws-sdk/s3-request-presigner", - "version": "3.676.0", + "version": "3.698.0", "range": "^3.556.0", "licenses": "Apache-2.0", "repoUrl": "https://github.com/aws/aws-sdk-js-v3", - "versionedRepoUrl": "https://github.com/aws/aws-sdk-js-v3/tree/v3.676.0", + "versionedRepoUrl": "https://github.com/aws/aws-sdk-js-v3/tree/v3.698.0", "licenseFile": "node_modules/@aws-sdk/s3-request-presigner/LICENSE", - "licenseUrl": "https://github.com/aws/aws-sdk-js-v3/blob/v3.676.0/LICENSE", + "licenseUrl": "https://github.com/aws/aws-sdk-js-v3/blob/v3.698.0/LICENSE", "licenseTextSource": "file", "publisher": "AWS SDK for JavaScript Team", "url": "https://aws.amazon.com/javascript/" @@ -389,28 +389,28 @@ "licenseTextSource": "file", "publisher": "Caolan McMahon" }, - "aws-sdk@2.1691.0": { + "aws-sdk@2.1692.0": { "name": "aws-sdk", - "version": "2.1691.0", + "version": "2.1692.0", "range": "^2.1604.0", "licenses": "Apache-2.0", "repoUrl": "https://github.com/aws/aws-sdk-js", - "versionedRepoUrl": "https://github.com/aws/aws-sdk-js/tree/v2.1691.0", + "versionedRepoUrl": "https://github.com/aws/aws-sdk-js/tree/v2.1692.0", "licenseFile": "node_modules/aws-sdk/LICENSE.txt", - "licenseUrl": "https://github.com/aws/aws-sdk-js/blob/v2.1691.0/LICENSE.txt", + "licenseUrl": "https://github.com/aws/aws-sdk-js/blob/v2.1692.0/LICENSE.txt", "licenseTextSource": "file", "publisher": "Amazon Web Services", "url": "https://aws.amazon.com/" }, - "borp@0.18.0": { + "borp@0.19.0": { "name": "borp", - "version": "0.18.0", - "range": "^0.18.0", + "version": "0.19.0", + "range": "^0.19.0", "licenses": "MIT", "repoUrl": "https://github.com/mcollina/borp", - "versionedRepoUrl": "https://github.com/mcollina/borp/tree/v0.18.0", + "versionedRepoUrl": "https://github.com/mcollina/borp/tree/v0.19.0", "licenseFile": "node_modules/borp/LICENSE", - "licenseUrl": "https://github.com/mcollina/borp/blob/v0.18.0/LICENSE", + "licenseUrl": "https://github.com/mcollina/borp/blob/v0.19.0/LICENSE", "licenseTextSource": "file", "publisher": "Matteo Collina", "email": "hello@matteocollina.com"