Skip to content

Commit

Permalink
Merge branch 'master' into vkarpov15/gh-12595
Browse files Browse the repository at this point in the history
  • Loading branch information
vkarpov15 committed Nov 30, 2022
2 parents f8cede0 + 66474c9 commit 9c925e2
Show file tree
Hide file tree
Showing 16 changed files with 291 additions and 108 deletions.
4 changes: 3 additions & 1 deletion .npmignore
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,6 @@ renovate.json
webpack.config.js
webpack.base.config.js

.nyc-output
.nyc-output

*.tgz
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,23 @@
6.7.4 / 2022-11-28
==================
* fix: allow setting global strictQuery after Schema creation #12717 #12703 [lpizzinidev](https://github.com/lpizzinidev)
* fix(cursor): make eachAsync() avoid modifying batch when mixing parallel and batchSize #12716
* fix(types): infer virtuals in query results #12727 #12702 #12684
* fix(types): correctly infer ReadonlyArray types in schema definitions #12720
* fix(types): avoid typeof Query with generics for TypeScript 4.6 support #12712 #12688
* chore: avoid bundling .tgz files when publishing #12725 [hasezoey](https://github.com/hasezoey)

6.7.3 / 2022-11-22
==================
* fix(document): handle setting array to itself after saving and pushing a new value #12672 #12656
* fix(types): update replaceWith pipeline stage #12715 [coyotte508](https://github.com/coyotte508)
* fix(types): remove incorrect modelName type definition #12682 #12669 [lpizzinidev](https://github.com/lpizzinidev)
* fix(schema): fix setupTimestamps for browser.umd #12683 [raphael-papazikas](https://github.com/raphael-papazikas)
* docs: correct justOne description #12686 #12599 [tianguangcn](https://github.com/tianguangcn)
* docs: make links more consistent #12690 #12645 [hasezoey](https://github.com/hasezoey)
* docs(document): explain that $isNew is false in post('save') hooks #12685 #11990
* docs: fixed line causing a "used before defined" linting error #12707 [sgpinkus](https://github.com/sgpinkus)

6.7.2 / 2022-11-07
==================
* fix(discriminator): skip copying base schema plugins if `applyPlugins == false` #12613 #12604 [lpizzinidev](https://github.com/lpizzinidev)
Expand Down
16 changes: 14 additions & 2 deletions lib/cast.js
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ module.exports = function cast(schema, obj, options, context) {
}

const strict = 'strict' in options ? options.strict : schema.options.strict;
const strictQuery = getStrictQuery(options, schema._userProvidedOptions, schema.options);
const strictQuery = getStrictQuery(options, schema._userProvidedOptions, schema.options, context);
if (options.upsert && strict) {
if (strict === 'throw') {
throw new StrictModeError(path);
Expand Down Expand Up @@ -374,7 +374,7 @@ function _cast(val, numbertype, context) {
}
}

function getStrictQuery(queryOptions, schemaUserProvidedOptions, schemaOptions) {
function getStrictQuery(queryOptions, schemaUserProvidedOptions, schemaOptions, context) {
if ('strictQuery' in queryOptions) {
return queryOptions.strictQuery;
}
Expand All @@ -387,5 +387,17 @@ function getStrictQuery(queryOptions, schemaUserProvidedOptions, schemaOptions)
if ('strict' in schemaUserProvidedOptions) {
return schemaUserProvidedOptions.strict;
}
const mongooseOptions = context.mongooseCollection &&
context.mongooseCollection.conn &&
context.mongooseCollection.conn.base &&
context.mongooseCollection.conn.base.options;
if (mongooseOptions) {
if ('strictQuery' in mongooseOptions) {
return mongooseOptions.strictQuery;
}
if ('strict' in mongooseOptions) {
return mongooseOptions.strict;
}
}
return schemaOptions.strictQuery;
}
38 changes: 29 additions & 9 deletions lib/helpers/common.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
const Binary = require('../driver').get().Binary;
const isBsonType = require('./isBsonType');
const isMongooseObject = require('./isMongooseObject');
const MongooseError = require('../error');
const util = require('util');

exports.flatten = flatten;
exports.modifiedPaths = modifiedPaths;
Expand Down Expand Up @@ -67,7 +69,25 @@ function flatten(update, path, options, schema) {
* ignore
*/

function modifiedPaths(update, path, result) {
function modifiedPaths(update, path, result, recursion = null) {
if (update == null || typeof update !== 'object') {
return;
}

if (recursion == null) {
recursion = {
raw: { update, path },
trace: new WeakSet()
};
}

if (recursion.trace.has(update)) {
throw new MongooseError(`a circular reference in the update value, updateValue:
${util.inspect(recursion.raw.update, { showHidden: false, depth: 1 })}
updatePath: '${recursion.raw.path}'`);
}
recursion.trace.add(update);

const keys = Object.keys(update || {});
const numKeys = keys.length;
result = result || {};
Expand All @@ -83,7 +103,7 @@ function modifiedPaths(update, path, result) {
val = val.toObject({ transform: false, virtuals: false });
}
if (shouldFlatten(val)) {
modifiedPaths(val, path + key, result);
modifiedPaths(val, path + key, result, recursion);
}
}

Expand All @@ -96,11 +116,11 @@ function modifiedPaths(update, path, result) {

function shouldFlatten(val) {
return val &&
typeof val === 'object' &&
!(val instanceof Date) &&
!isBsonType(val, 'ObjectID') &&
(!Array.isArray(val) || val.length !== 0) &&
!(val instanceof Buffer) &&
!isBsonType(val, 'Decimal128') &&
!(val instanceof Binary);
typeof val === 'object' &&
!(val instanceof Date) &&
!isBsonType(val, 'ObjectID') &&
(!Array.isArray(val) || val.length !== 0) &&
!(val instanceof Buffer) &&
!isBsonType(val, 'Decimal128') &&
!(val instanceof Binary);
}
144 changes: 79 additions & 65 deletions lib/helpers/cursor/eachAsync.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ module.exports = function eachAsync(next, fn, options, callback) {
const aggregatedErrors = [];
const enqueue = asyncQueue();

let drained = false;
let aborted = false;

return promiseOrCallback(callback, cb => {
if (signal != null) {
Expand All @@ -42,7 +42,7 @@ module.exports = function eachAsync(next, fn, options, callback) {
}

signal.addEventListener('abort', () => {
drained = true;
aborted = true;
return cb(null);
}, { once: true });
}
Expand All @@ -63,90 +63,104 @@ module.exports = function eachAsync(next, fn, options, callback) {
function iterate(finalCallback) {
let handleResultsInProgress = 0;
let currentDocumentIndex = 0;
let documentsBatch = [];

let error = null;
for (let i = 0; i < parallel; ++i) {
enqueue(fetch);
enqueue(createFetch());
}

function fetch(done) {
if (drained || error) {
return done();
}
function createFetch() {
let documentsBatch = [];
let drained = false;

return fetch;

next(function(err, doc) {
if (drained || error != null) {
function fetch(done) {
if (drained || aborted) {
return done();
} else if (error) {
return done();
}
if (err != null) {
if (continueOnError) {
aggregatedErrors.push(err);
} else {
error = err;
finalCallback(err);

next(function(err, doc) {
if (error != null) {
return done();
}
}
if (doc == null) {
drained = true;
if (handleResultsInProgress <= 0) {
const finalErr = continueOnError ?
createEachAsyncMultiError(aggregatedErrors) :
error;

finalCallback(finalErr);
} else if (batchSize && documentsBatch.length) {
handleNextResult(documentsBatch, currentDocumentIndex++, handleNextResultCallBack);
if (err != null) {
if (err.name === 'MongoCursorExhaustedError') {
// We may end up calling `next()` multiple times on an exhausted
// cursor, which leads to an error. In case cursor is exhausted,
// just treat it as if the cursor returned no document, which is
// how a cursor indicates it is exhausted.
doc = null;
} else if (continueOnError) {
aggregatedErrors.push(err);
} else {
error = err;
finalCallback(err);
return done();
}
}
if (doc == null) {
drained = true;
if (handleResultsInProgress <= 0) {
const finalErr = continueOnError ?
createEachAsyncMultiError(aggregatedErrors) :
error;

finalCallback(finalErr);
} else if (batchSize && documentsBatch.length) {
handleNextResult(documentsBatch, currentDocumentIndex++, handleNextResultCallBack);
}
return done();
}
return done();
}

++handleResultsInProgress;
++handleResultsInProgress;

// Kick off the subsequent `next()` before handling the result, but
// make sure we know that we still have a result to handle re: #8422
immediate(() => done());
// Kick off the subsequent `next()` before handling the result, but
// make sure we know that we still have a result to handle re: #8422
immediate(() => done());

if (batchSize) {
documentsBatch.push(doc);
}
if (batchSize) {
documentsBatch.push(doc);
}

// If the current documents size is less than the provided patch size don't process the documents yet
if (batchSize && documentsBatch.length !== batchSize) {
immediate(() => enqueue(fetch));
return;
}
// If the current documents size is less than the provided batch size don't process the documents yet
if (batchSize && documentsBatch.length !== batchSize) {
immediate(() => enqueue(fetch));
return;
}

const docsToProcess = batchSize ? documentsBatch : doc;
const docsToProcess = batchSize ? documentsBatch : doc;

function handleNextResultCallBack(err) {
if (batchSize) {
handleResultsInProgress -= documentsBatch.length;
documentsBatch = [];
} else {
--handleResultsInProgress;
}
if (err != null) {
if (continueOnError) {
aggregatedErrors.push(err);
function handleNextResultCallBack(err) {
if (batchSize) {
handleResultsInProgress -= documentsBatch.length;
documentsBatch = [];
} else {
error = err;
return finalCallback(err);
--handleResultsInProgress;
}
if (err != null) {
if (continueOnError) {
aggregatedErrors.push(err);
} else {
error = err;
return finalCallback(err);
}
}
if ((drained || aborted) && handleResultsInProgress <= 0) {
const finalErr = continueOnError ?
createEachAsyncMultiError(aggregatedErrors) :
error;
return finalCallback(finalErr);
}
}
if (drained && handleResultsInProgress <= 0) {
const finalErr = continueOnError ?
createEachAsyncMultiError(aggregatedErrors) :
error;
return finalCallback(finalErr);
}

immediate(() => enqueue(fetch));
}
immediate(() => enqueue(fetch));
}

handleNextResult(docsToProcess, currentDocumentIndex++, handleNextResultCallBack);
});
handleNextResult(docsToProcess, currentDocumentIndex++, handleNextResultCallBack);
});
}
}
}

Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "mongoose",
"description": "Mongoose MongoDB ODM",
"version": "6.7.2",
"version": "6.7.4",
"author": "Guillermo Rauch <guillermo@learnboost.com>",
"keywords": [
"mongodb",
Expand Down
2 changes: 2 additions & 0 deletions test/errors.validation.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,8 @@ describe('ValidationError', function() {

describe('when user code defines a r/o Error#toJSON', function() {
it('should not fail', function(done) {
this.timeout(10000);

const err = [];
const child = require('child_process')
.fork('./test/isolated/project-has-error.toJSON.js', ['--no-warnings'], { silent: true });
Expand Down
17 changes: 17 additions & 0 deletions test/helpers/common.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
'use strict';

const assert = require('assert');
const modifiedPaths = require('../../lib/helpers/common').modifiedPaths;

describe('modifiedPaths, bad update value which has circular reference field', () => {
it('update value can be null', function() {
modifiedPaths(null, 'path', null);
});

it('values with obvious error on circular reference', function() {
const objA = {};
objA.a = objA;

assert.throws(() => modifiedPaths(objA, 'path', null), /circular reference/);
});
});
24 changes: 24 additions & 0 deletions test/helpers/cursor.eachAsync.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,30 @@ describe('eachAsync()', function() {
assert.equal(numCalled, 1);
});

it('avoids mutating document batch with parallel (gh-12652)', async() => {
const max = 100;
let numCalled = 0;
function next(cb) {
setImmediate(() => {
if (++numCalled > max) {
return cb(null, null);
}
cb(null, { num: numCalled });
});
}

let numDocsProcessed = 0;
async function fn(batch) {
numDocsProcessed += batch.length;
const length = batch.length;
await new Promise(resolve => setTimeout(resolve, 50));
assert.equal(batch.length, length);
}

await eachAsync(next, fn, { parallel: 7, batchSize: 10 });
assert.equal(numDocsProcessed, max);
});

it('using AbortSignal (gh-12173)', async function() {
if (typeof AbortController === 'undefined') {
return this.skip();
Expand Down
Loading

0 comments on commit 9c925e2

Please sign in to comment.