Skip to content

Commit

Permalink
fs: improve fsPromises readFile performance
Browse files Browse the repository at this point in the history
Improve the fsPromises readFile performance
by allocating only one buffer, when size is known,
increase the size of the readbuffer chunks,
and dont read more data if size bytes have been read.
Also moves constants to internal/fs/utils.

refs: nodejs#37583
(Old) Backport-PR-URL: nodejs#37703
PR-URL: nodejs#37608
  • Loading branch information
MoritzLoewenstein committed Aug 21, 2021
1 parent f0b7b93 commit b4fffb0
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 23 deletions.
64 changes: 41 additions & 23 deletions lib/internal/fs/promises.js
Original file line number Diff line number Diff line change
@@ -1,18 +1,7 @@
'use strict';

// Most platforms don't allow reads or writes >= 2 GB.
// See https://github.com/libuv/libuv/pull/1501.
const kIoMaxLength = 2 ** 31 - 1;

// Note: This is different from kReadFileBufferLength used for non-promisified
// fs.readFile.
const kReadFileMaxChunkSize = 2 ** 14;
const kWriteFileMaxChunkSize = 2 ** 14;

// 2 ** 32 - 1
const kMaxUserId = 4294967295;

const {
ArrayPrototypePush,
Error,
MathMax,
MathMin,
Expand Down Expand Up @@ -44,6 +33,13 @@ const {
const { isArrayBufferView } = require('internal/util/types');
const { rimrafPromises } = require('internal/fs/rimraf');
const {
constants: {
kIoMaxLength,
kMaxUserId,
kReadFileBufferLength,
kReadFileUnknownBufferLength,
kWriteFileMaxChunkSize,
},
copyObject,
getDirents,
getOptions,
Expand Down Expand Up @@ -296,24 +292,46 @@ async function readFileHandle(filehandle, options) {
if (size > kIoMaxLength)
throw new ERR_FS_FILE_TOO_LARGE(size);

const chunks = [];
const chunkSize = size === 0 ?
kReadFileMaxChunkSize :
MathMin(size, kReadFileMaxChunkSize);
let endOfFile = false;
let totalRead = 0;
const noSize = size === 0;
const buffers = [];
const fullBuffer = noSize ? undefined : Buffer.allocUnsafeSlow(size);
do {
if (signal && signal.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
}
const buf = Buffer.alloc(chunkSize);
const { bytesRead, buffer } =
await read(filehandle, buf, 0, chunkSize, -1);
endOfFile = bytesRead === 0;
if (bytesRead > 0)
chunks.push(buffer.slice(0, bytesRead));
let buffer;
let offset;
let length;
if (noSize) {
buffer = Buffer.allocUnsafeSlow(kReadFileUnknownBufferLength);
offset = 0;
length = kReadFileUnknownBufferLength;
} else {
buffer = fullBuffer;
offset = totalRead;
length = MathMin(size - totalRead, kReadFileBufferLength);
}

const bytesRead = (await binding.read(filehandle.fd, buffer, offset,
length, -1, kUsePromises)) || 0;
totalRead += bytesRead;
endOfFile = bytesRead === 0 || totalRead === size;
if (noSize && bytesRead > 0) {
const isBufferFull = bytesRead === kReadFileUnknownBufferLength;
const chunkBuffer = isBufferFull ? buffer : buffer.slice(0, bytesRead);
ArrayPrototypePush(buffers, chunkBuffer);
}
} while (!endOfFile);

const result = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks);
let result;
if (size > 0) {
result = totalRead === size ? fullBuffer : fullBuffer.slice(0, totalRead);
} else {
result = buffers.length === 1 ? buffers[0] : Buffer.concat(buffers,
totalRead);
}

return options.encoding ? result.toString(options.encoding) : result;
}
Expand Down
24 changes: 24 additions & 0 deletions lib/internal/fs/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,23 @@ const kMaximumCopyMode = COPYFILE_EXCL |
COPYFILE_FICLONE |
COPYFILE_FICLONE_FORCE;

// Most platforms don't allow reads or writes >= 2 GB.
// See https://github.com/libuv/libuv/pull/1501.
const kIoMaxLength = 2 ** 31 - 1;

// Use 64kb in case the file type is not a regular file and thus do not know the
// actual file size. Increasing the value further results in more frequent over
// allocation for small files and consumes CPU time and memory that should be
// used else wise.
// Use up to 512kb per read otherwise to partition reading big files to prevent
// blocking other threads in case the available threads are all in use.
const kReadFileUnknownBufferLength = 64 * 1024;
const kReadFileBufferLength = 512 * 1024;

const kWriteFileMaxChunkSize = 512 * 1024;

const kMaxUserId = 2 ** 32 - 1;

const isWindows = process.platform === 'win32';

let fs;
Expand Down Expand Up @@ -815,6 +832,13 @@ const validatePosition = hideStackFrames((position, name) => {
});

module.exports = {
constants: {
kIoMaxLength,
kMaxUserId,
kReadFileBufferLength,
kReadFileUnknownBufferLength,
kWriteFileMaxChunkSize,
},
assertEncoding,
BigIntStats, // for testing
copyObject,
Expand Down

0 comments on commit b4fffb0

Please sign in to comment.