diff --git a/spec/ParseQuery.Aggregate.spec.js b/spec/ParseQuery.Aggregate.spec.js index 3668d58969..f325e03a43 100644 --- a/spec/ParseQuery.Aggregate.spec.js +++ b/spec/ParseQuery.Aggregate.spec.js @@ -164,6 +164,124 @@ describe('Parse.Query Aggregate testing', () => { }); }); + it('group by date object transform', (done) => { + const obj1 = new TestObject(); + const obj2 = new TestObject(); + const obj3 = new TestObject(); + const pipeline = [{ + group: { + objectId: { day: { $dayOfMonth: "$updatedAt" }, month: { $month: "$createdAt" }, year: { $year: "$createdAt" } }, + count: { $sum: 1 } + } + }]; + Parse.Object.saveAll([obj1, obj2, obj3]).then(() => { + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + const createdAt = new Date(obj1.createdAt); + expect(results[0].objectId.day).toEqual(createdAt.getUTCDate()); + expect(results[0].objectId.month).toEqual(createdAt.getMonth() + 1); + expect(results[0].objectId.year).toEqual(createdAt.getUTCFullYear()); + done(); + }); + }); + + it_exclude_dbs(['postgres'])('group and multiply transform', (done) => { + const obj1 = new TestObject({ name: 'item a', quantity: 2, price: 10 }); + const obj2 = new TestObject({ name: 'item b', quantity: 5, price: 5 }); + const pipeline = [{ + group: { + objectId: null, + total: { $sum: { $multiply: [ '$quantity', '$price' ] } } + } + }]; + Parse.Object.saveAll([obj1, obj2]).then(() => { + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toEqual(1); + expect(results[0].total).toEqual(45); + done(); + }); + }); + + it_exclude_dbs(['postgres'])('project and multiply transform', (done) => { + const obj1 = new TestObject({ name: 'item a', quantity: 2, price: 10 }); + const obj2 = new TestObject({ name: 'item b', quantity: 5, price: 5 }); + const pipeline = [ + { + match: { quantity: { $exists: true } } + }, + { + project: { + name: 1, + total: { $multiply: [ '$quantity', '$price' ] } + } + } + ]; + Parse.Object.saveAll([obj1, obj2]).then(() => { + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toEqual(2); + if (results[0].name === 'item a') { + expect(results[0].total).toEqual(20); + expect(results[1].total).toEqual(25); + } + else { + expect(results[0].total).toEqual(25); + expect(results[1].total).toEqual(20); + } + done(); + }); + }); + + it_exclude_dbs(['postgres'])('project without objectId transform', (done) => { + const obj1 = new TestObject({ name: 'item a', quantity: 2, price: 10 }); + const obj2 = new TestObject({ name: 'item b', quantity: 5, price: 5 }); + const pipeline = [ + { + match: { quantity: { $exists: true } } + }, + { + project: { + objectId: 0, + total: { $multiply: [ '$quantity', '$price' ] } + } + }, + { + sort: { total: 1 } + } + ]; + Parse.Object.saveAll([obj1, obj2]).then(() => { + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toEqual(2); + expect(results[0].total).toEqual(20); + expect(results[0].objectId).toEqual(undefined); + expect(results[1].total).toEqual(25); + expect(results[1].objectId).toEqual(undefined); + done(); + }); + }); + + it_exclude_dbs(['postgres'])('project updatedAt only transform', (done) => { + const pipeline = [{ + project: { objectId: 0, updatedAt: 1 } + }]; + const query = new Parse.Query(TestObject); + query.aggregate(pipeline).then((results) => { + expect(results.length).toEqual(4); + for (let i = 0; i < results.length; i++) { + const item = results[i]; + expect(item.hasOwnProperty('updatedAt')).toEqual(true); + expect(item.hasOwnProperty('objectId')).toEqual(false); + } + done(); + }); + }); + it_exclude_dbs(['postgres'])('cannot group by date field (excluding createdAt and updatedAt)', (done) => { const obj1 = new TestObject({ dateField: new Date(1990, 11, 1) }); const obj2 = new TestObject({ dateField: new Date(1990, 5, 1) }); @@ -339,6 +457,27 @@ describe('Parse.Query Aggregate testing', () => { }).catch(done.fail); }); + it('match comparison date query', (done) => { + const today = new Date(); + const yesterday = new Date(); + const tomorrow = new Date(); + yesterday.setDate(today.getDate() - 1); + tomorrow.setDate(today.getDate() + 1); + const obj1 = new TestObject({ dateField: yesterday }); + const obj2 = new TestObject({ dateField: today }); + const obj3 = new TestObject({ dateField: tomorrow }); + const pipeline = [ + { match: { dateField: { $lt: tomorrow } } } + ]; + Parse.Object.saveAll([obj1, obj2, obj3]).then(() => { + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toBe(2); + done(); + }); + }); + it('match comparison query', (done) => { const options = Object.assign({}, masterKeyOptions, { body: { @@ -474,6 +613,96 @@ describe('Parse.Query Aggregate testing', () => { }); }); + it_exclude_dbs(['postgres'])('match exists query', (done) => { + const pipeline = [ + { match: { score: { $exists: true } } } + ]; + const query = new Parse.Query(TestObject); + query.aggregate(pipeline).then((results) => { + expect(results.length).toEqual(4); + done(); + }); + }); + + it('match date query - createdAt', (done) => { + const obj1 = new TestObject(); + const obj2 = new TestObject(); + + Parse.Object.saveAll([obj1, obj2]).then(() => { + const now = new Date(); + const today = new Date(now.getFullYear(), now.getMonth(), now.getDate()); + const pipeline = [ + { match: { 'createdAt': { $gte: today } } } + ]; + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + // Four objects were created initially, we added two more. + expect(results.length).toEqual(6); + done(); + }); + }); + + it('match date query - updatedAt', (done) => { + const obj1 = new TestObject(); + const obj2 = new TestObject(); + + Parse.Object.saveAll([obj1, obj2]).then(() => { + const now = new Date(); + const today = new Date(now.getFullYear(), now.getMonth(), now.getDate()); + const pipeline = [ + { match: { 'updatedAt': { $gte: today } } } + ]; + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + // Four objects were added initially, we added two more. + expect(results.length).toEqual(6); + done(); + }); + }); + + it('match date query - empty', (done) => { + const obj1 = new TestObject(); + const obj2 = new TestObject(); + + Parse.Object.saveAll([obj1, obj2]).then(() => { + const now = new Date(); + const future = new Date(now.getFullYear(), now.getMonth() + 1, now.getDate()); + const pipeline = [ + { match: { 'createdAt': future } } + ]; + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toEqual(0); + done(); + }); + }); + + it_exclude_dbs(['postgres'])('match pointer with operator query', (done) => { + const pointer = new PointerObject(); + + const obj1 = new TestObject({ pointer }); + const obj2 = new TestObject({ pointer }); + const obj3 = new TestObject(); + + Parse.Object.saveAll([pointer, obj1, obj2, obj3]).then(() => { + const pipeline = [ + { match: { pointer: { $exists: true } } } + ]; + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toEqual(2); + expect(results[0].pointer.objectId).toEqual(pointer.id); + expect(results[1].pointer.objectId).toEqual(pointer.id); + expect(results.some(result => result.objectId === obj1.id)).toEqual(true); + expect(results.some(result => result.objectId === obj2.id)).toEqual(true); + done(); + }); + }); + it('project query', (done) => { const options = Object.assign({}, masterKeyOptions, { body: { @@ -512,6 +741,26 @@ describe('Parse.Query Aggregate testing', () => { }).catch(done.fail); }); + it('project pointer query', (done) => { + const pointer = new PointerObject(); + const obj = new TestObject({ pointer, name: 'hello' }); + + obj.save().then(() => { + const pipeline = [ + { match: { objectId: obj.id } }, + { project: { pointer: 1, name: 1, createdAt: 1 } } + ]; + const query = new Parse.Query(TestObject); + return query.aggregate(pipeline); + }).then((results) => { + expect(results.length).toEqual(1); + expect(results[0].name).toEqual('hello'); + expect(results[0].createdAt).not.toBe(undefined); + expect(results[0].pointer.objectId).toEqual(pointer.id); + done(); + }); + }); + it('project with group query', (done) => { const options = Object.assign({}, masterKeyOptions, { body: { diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index f25fb74fdc..0ec3ae5a82 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -557,26 +557,17 @@ export class MongoStorageAdapter implements StorageAdapter { aggregate(className: string, schema: any, pipeline: any, readPreference: ?string) { let isPointerField = false; pipeline = pipeline.map((stage) => { - if (stage.$group && stage.$group._id && (typeof stage.$group._id === 'string')) { - const field = stage.$group._id.substring(1); - if (schema.fields[field] && schema.fields[field].type === 'Pointer') { + if (stage.$group) { + stage.$group = this._parseAggregateGroupArgs(schema, stage.$group); + if (stage.$group._id && (typeof stage.$group._id === 'string') && stage.$group._id.indexOf('$_p_') >= 0) { isPointerField = true; - stage.$group._id = `$_p_${field}`; } } if (stage.$match) { - for (const field in stage.$match) { - if (schema.fields[field] && schema.fields[field].type === 'Pointer') { - const transformMatch = { [`_p_${field}`] : `${schema.fields[field].targetClass}$${stage.$match[field]}` }; - stage.$match = transformMatch; - } - if (field === 'objectId') { - const transformMatch = Object.assign({}, stage.$match); - transformMatch._id = stage.$match[field]; - delete transformMatch.objectId; - stage.$match = transformMatch; - } - } + stage.$match = this._parseAggregateArgs(schema, stage.$match); + } + if (stage.$project) { + stage.$project = this._parseAggregateProjectArgs(schema, stage.$project); } return stage; }); @@ -608,6 +599,130 @@ export class MongoStorageAdapter implements StorageAdapter { .catch(err => this.handleError(err)); } + // This function will recursively traverse the pipeline and convert any Pointer or Date columns. + // If we detect a pointer column we will rename the column being queried for to match the column + // in the database. We also modify the value to what we expect the value to be in the database + // as well. + // For dates, the driver expects a Date object, but we have a string coming in. So we'll convert + // the string to a Date so the driver can perform the necessary comparison. + // + // The goal of this method is to look for the "leaves" of the pipeline and determine if it needs + // to be converted. The pipeline can have a few different forms. For more details, see: + // https://docs.mongodb.com/manual/reference/operator/aggregation/ + // + // If the pipeline is an array, it means we are probably parsing an '$and' or '$or' operator. In + // that case we need to loop through all of it's children to find the columns being operated on. + // If the pipeline is an object, then we'll loop through the keys checking to see if the key name + // matches one of the schema columns. If it does match a column and the column is a Pointer or + // a Date, then we'll convert the value as described above. + // + // As much as I hate recursion...this seemed like a good fit for it. We're essentially traversing + // down a tree to find a "leaf node" and checking to see if it needs to be converted. + _parseAggregateArgs(schema: any, pipeline: any): any { + if (Array.isArray(pipeline)) { + return pipeline.map((value) => this._parseAggregateArgs(schema, value)); + } else if (typeof pipeline === 'object') { + const returnValue = {}; + for (const field in pipeline) { + if (schema.fields[field] && schema.fields[field].type === 'Pointer') { + if (typeof pipeline[field] === 'object') { + // Pass objects down to MongoDB...this is more than likely an $exists operator. + returnValue[`_p_${field}`] = pipeline[field]; + } else { + returnValue[`_p_${field}`] = `${schema.fields[field].targetClass}$${pipeline[field]}`; + } + } else if (schema.fields[field] && schema.fields[field].type === 'Date') { + returnValue[field] = this._convertToDate(pipeline[field]); + } else { + returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]); + } + + if (field === 'objectId') { + returnValue['_id'] = returnValue[field]; + delete returnValue[field]; + } else if (field === 'createdAt') { + returnValue['_created_at'] = returnValue[field]; + delete returnValue[field]; + } else if (field === 'updatedAt') { + returnValue['_updated_at'] = returnValue[field]; + delete returnValue[field]; + } + } + return returnValue; + } + return pipeline; + } + + // This function is slightly different than the one above. Rather than trying to combine these + // two functions and making the code even harder to understand, I decided to split it up. The + // difference with this function is we are not transforming the values, only the keys of the + // pipeline. + _parseAggregateProjectArgs(schema: any, pipeline: any): any { + const returnValue = {}; + for (const field in pipeline) { + if (schema.fields[field] && schema.fields[field].type === 'Pointer') { + returnValue[`_p_${field}`] = pipeline[field]; + } else { + returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]); + } + + if (field === 'objectId') { + returnValue['_id'] = returnValue[field]; + delete returnValue[field]; + } else if (field === 'createdAt') { + returnValue['_created_at'] = returnValue[field]; + delete returnValue[field]; + } else if (field === 'updatedAt') { + returnValue['_updated_at'] = returnValue[field]; + delete returnValue[field]; + } + } + return returnValue; + } + + // This function is slightly different than the two above. MongoDB $group aggregate looks like: + // { $group: { _id: , : { : }, ... } } + // The could be a column name, prefixed with the '$' character. We'll look for + // these and check to see if it is a 'Pointer' or if it's one of createdAt, + // updatedAt or objectId and change it accordingly. + _parseAggregateGroupArgs(schema: any, pipeline: any): any { + if (Array.isArray(pipeline)) { + return pipeline.map((value) => this._parseAggregateGroupArgs(schema, value)); + } else if (typeof pipeline === 'object') { + const returnValue = {}; + for (const field in pipeline) { + returnValue[field] = this._parseAggregateGroupArgs(schema, pipeline[field]); + } + return returnValue; + } else if (typeof pipeline === 'string') { + const field = pipeline.substring(1); + if (schema.fields[field] && schema.fields[field].type === 'Pointer') { + return `$_p_${field}`; + } else if (field == 'createdAt') { + return '$_created_at'; + } else if (field == 'updatedAt') { + return '$_updated_at'; + } + } + return pipeline; + } + + // This function will attempt to convert the provided value to a Date object. Since this is part + // of an aggregation pipeline, the value can either be a string or it can be another object with + // an operator in it (like $gt, $lt, etc). Because of this I felt it was easier to make this a + // recursive method to traverse down to the "leaf node" which is going to be the string. + _convertToDate(value: any): any { + if (typeof value === 'string') { + return new Date(value); + } + + const returnValue = {} + for (const field in value) { + returnValue[field] = this._convertToDate(value[field]) + } + return returnValue; + } + _parseReadPreference(readPreference: ?string): ?string { switch (readPreference) { case 'PRIMARY':