diff --git a/test/parallel/test-fs-symlink-longpath.js b/test/parallel/test-fs-symlink-longpath.js index ac15b841df9c3a..f3586317c27ede 100644 --- a/test/parallel/test-fs-symlink-longpath.js +++ b/test/parallel/test-fs-symlink-longpath.js @@ -12,10 +12,10 @@ const longPath = path.join(...[tmpDir].concat(Array(30).fill('1234567890'))); fs.mkdirSync(longPath, { recursive: true }); // Test if we can have symlinks to files and folders with long filenames -const targetDirtectory = path.join(longPath, 'target-directory'); -fs.mkdirSync(targetDirtectory); +const targetDirectory = path.join(longPath, 'target-directory'); +fs.mkdirSync(targetDirectory); const pathDirectory = path.join(tmpDir, 'new-directory'); -fs.symlink(targetDirtectory, pathDirectory, 'dir', common.mustSucceed(() => { +fs.symlink(targetDirectory, pathDirectory, 'dir', common.mustSucceed(() => { assert(fs.existsSync(pathDirectory)); })); diff --git a/test/parallel/test-net-socket-timeout.js b/test/parallel/test-net-socket-timeout.js index 209359fda60810..7ebef777aff734 100644 --- a/test/parallel/test-net-socket-timeout.js +++ b/test/parallel/test-net-socket-timeout.js @@ -54,9 +54,9 @@ for (let i = 0; i < validDelays.length; i++) { } for (let i = 0; i < invalidCallbacks.length; i++) { - [0, 1].forEach((mesc) => + [0, 1].forEach((msec) => assert.throws( - () => s.setTimeout(mesc, invalidCallbacks[i]), + () => s.setTimeout(msec, invalidCallbacks[i]), { code: 'ERR_INVALID_CALLBACK', name: 'TypeError', diff --git a/test/parallel/test-queue-microtask-uncaught-asynchooks.js b/test/parallel/test-queue-microtask-uncaught-asynchooks.js index ee64c6e68ab7ab..35b3d9fa309af9 100644 --- a/test/parallel/test-queue-microtask-uncaught-asynchooks.js +++ b/test/parallel/test-queue-microtask-uncaught-asynchooks.js @@ -11,7 +11,7 @@ let µtaskId; const events = []; async_hooks.createHook({ - init(id, type, triggerId, resoure) { + init(id, type, triggerId, resource) { if (type === 'Microtask') { µtaskId = id; events.push('init'); diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js index 16fec1c165fab8..ce3806ce75fce8 100644 --- a/test/parallel/test-stream-duplex-from.js +++ b/test/parallel/test-stream-duplex-from.js @@ -156,10 +156,10 @@ const { Blob } = require('buffer'); // Ensure that Duplex.from works for blobs { const blob = new Blob(['blob']); - const expecteByteLength = blob.size; + const expectedByteLength = blob.size; const duplex = Duplex.from(blob); duplex.on('data', common.mustCall((arrayBuffer) => { - assert.strictEqual(arrayBuffer.byteLength, expecteByteLength); + assert.strictEqual(arrayBuffer.byteLength, expectedByteLength); })); } diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js index 3fb9796251c16d..94a892567c1b21 100644 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -68,7 +68,7 @@ assert.throws(() => { message: 'Unknown encoding: {}' }); -(function checkVairableCaseEncoding() { +(function checkVariableCaseEncoding() { const m = new MyWritable(function(isBuffer, type, enc) { assert.strictEqual(enc, 'ascii'); }, { decodeStrings: false });