Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

stream: always defer 'readable' with nextTick #17979

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions lib/_stream_readable.js
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,6 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
function addChunk(stream, state, chunk, addToFront) {
if (state.flowing && state.length === 0 && !state.sync) {
stream.emit('data', chunk);
stream.read(0);
} else {
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
Expand Down Expand Up @@ -496,7 +495,11 @@ function onEofChunk(stream, state) {
state.ended = true;

// emit 'readable' now to make sure it gets picked up.
emitReadable(stream);
state.needReadable = false;
if (!state.emittedReadable) {
state.emittedReadable = true;
emitReadable_(stream);
}
}

// Don't emit readable right away in sync mode, because this can trigger
Expand Down Expand Up @@ -536,7 +539,7 @@ function maybeReadMore(stream, state) {

function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
while (!state.reading && !state.ended &&
state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
Expand Down Expand Up @@ -1087,13 +1090,16 @@ function copyFromBuffer(n, list) {
function endReadable(stream) {
var state = stream._readableState;

debug('endReadable', state.endEmitted);
if (!state.endEmitted) {
state.ended = true;
process.nextTick(endReadableNT, state, stream);
}
}

function endReadableNT(state, stream) {
debug('endReadableNT', state.endEmitted, state.length);

// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
Expand Down
193 changes: 160 additions & 33 deletions test/parallel/test-stream-readable-object-multi-push-async.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,50 +7,177 @@ const { Readable } = require('stream');
const MAX = 42;
const BATCH = 10;

const readable = new Readable({
objectMode: true,
read: common.mustCall(function() {
console.log('>> READ');
fetchData((err, data) => {
if (err) {
this.destroy(err);
return;
{
const readable = new Readable({
objectMode: true,
read: common.mustCall(function() {
console.log('>> READ');
fetchData((err, data) => {
if (err) {
this.destroy(err);
return;
}

if (data.length === 0) {
console.log('pushing null');
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would prefer to omit the console.log :-)

this.push(null);
return;
}

console.log('pushing');
data.forEach((d) => this.push(d));
});
}, Math.floor(MAX / BATCH) + 2)
});

let i = 0;
function fetchData(cb) {
if (i > MAX) {
setTimeout(cb, 10, null, []);
} else {
const array = [];
const max = i + BATCH;
for (; i < max; i++) {
array.push(i);
}
setTimeout(cb, 10, null, array);
}
}

readable.on('readable', () => {
let data;
console.log('readable emitted');
while (data = readable.read()) {
console.log(data);
}
});

readable.on('end', common.mustCall(() => {
assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please add a comment that explains the calculation

}));
}

{
const readable = new Readable({
objectMode: true,
read: common.mustCall(function() {
console.log('>> READ');
fetchData((err, data) => {
if (err) {
this.destroy(err);
return;
}

if (data.length === 0) {
console.log('pushing null');
this.push(null);
return;
}

console.log('pushing');
data.forEach((d) => this.push(d));
});
}, Math.floor(MAX / BATCH) + 2)
});

if (data.length === 0) {
console.log('pushing null');
this.push(null);
return;
let i = 0;
function fetchData(cb) {
if (i > MAX) {
setTimeout(cb, 10, null, []);
} else {
const array = [];
const max = i + BATCH;
for (; i < max; i++) {
array.push(i);
}
setTimeout(cb, 10, null, array);
}
}

readable.on('data', (data) => {
console.log('data emitted', data);
});

readable.on('end', common.mustCall(() => {
assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH);
}));
}

{
const readable = new Readable({
objectMode: true,
read: common.mustCall(function() {
console.log('>> READ');
fetchData((err, data) => {
if (err) {
this.destroy(err);
return;
}

console.log('pushing');
data.forEach((d) => this.push(d));

if (data[BATCH - 1] >= MAX) {
console.log('pushing null');
this.push(null);
}
});
}, Math.floor(MAX / BATCH) + 1)
});

console.log('pushing');
data.forEach((d) => this.push(d));
});
}, Math.floor(MAX / BATCH) + 2)
});

let i = 0;
function fetchData(cb) {
if (i > MAX) {
setTimeout(cb, 10, null, []);
} else {
let i = 0;
function fetchData(cb) {
const array = [];
const max = i + BATCH;
for (; i < max; i++) {
array.push(i);
}
setTimeout(cb, 10, null, array);
}

readable.on('data', (data) => {
console.log('data emitted', data);
});

readable.on('end', common.mustCall(() => {
assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH);
}));
}

readable.on('readable', () => {
let data;
console.log('readable emitted');
while (data = readable.read()) {
console.log(data);
}
});
{
const readable = new Readable({
objectMode: true,
read: common.mustNotCall()
});

readable.on('data', common.mustNotCall());

readable.on('end', () => {
assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH);
});
readable.push(null);

let nextTickPassed = false;
process.nextTick(() => {
nextTickPassed = true;
});

readable.on('end', common.mustCall(() => {
assert.strictEqual(nextTickPassed, false);
}));
}

{
const readable = new Readable({
objectMode: true,
read: common.mustCall()
});

readable.on('data', (data) => {
console.log('data emitted', data);
});

readable.on('end', common.mustCall());

setImmediate(() => {
readable.push('aaa');
readable.push(null);
});
}
9 changes: 3 additions & 6 deletions test/parallel/test-stream-readable-reading-readingMore.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,10 @@ function onStreamEnd() {
assert.strictEqual(state.reading, false);
}

const expected = [
true, // stream is not ended
false // stream is ended
];

readable.on('readable', common.mustCall(() => {
assert.strictEqual(state.readingMore, expected.shift());
// 'readable' always gets called before 'end'
// since 'end' hasn't been emitted, more data could be incoming
assert.strictEqual(state.readingMore, true);

// if the stream has ended, we shouldn't be reading
assert.strictEqual(state.ended, !state.reading);
Expand Down
4 changes: 2 additions & 2 deletions test/parallel/test-stream2-transform.js
Original file line number Diff line number Diff line change
Expand Up @@ -321,11 +321,11 @@ const Transform = require('_stream_transform');

pt.end();

assert.strictEqual(emits, 0);
assert.strictEqual(emits, 1);
assert.strictEqual(pt.read(5).toString(), 'l');
assert.strictEqual(pt.read(5), null);

assert.strictEqual(emits, 0);
assert.strictEqual(emits, 1);
}

{
Expand Down