From 0d5a1e80859e5c37774a6d5eeaaf53616a02544e Mon Sep 17 00:00:00 2001 From: Drew Gross Date: Tue, 5 Apr 2016 10:14:37 -0700 Subject: [PATCH] Progres towards moving mongo logic into its adapter --- package.json | 1 + spec/AdaptableController.spec.js | 18 +- spec/MongoSchemaCollectionAdapter.spec.js | 55 ++++ spec/OAuth.spec.js | 21 +- spec/Schema.spec.js | 278 +++++++++++------- .../Storage/Mongo/MongoSchemaCollection.js | 88 +++++- src/Routers/SchemasRouter.js | 15 +- src/Schema.js | 180 ++++-------- src/middlewares.js | 2 +- 9 files changed, 387 insertions(+), 271 deletions(-) create mode 100644 spec/MongoSchemaCollectionAdapter.spec.js diff --git a/package.json b/package.json index 5a4b691dc4..1f6d73b374 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "deepcopy": "^0.6.1", "express": "^4.13.4", "intersect": "^1.0.1", + "lodash": "^4.8.2", "lru-cache": "^4.0.0", "mailgun-js": "^0.7.7", "mime": "^1.3.4", diff --git a/spec/AdaptableController.spec.js b/spec/AdaptableController.spec.js index 3b275ec4cf..5c7747e512 100644 --- a/spec/AdaptableController.spec.js +++ b/spec/AdaptableController.spec.js @@ -10,7 +10,7 @@ MockController.prototype = Object.create(AdaptableController.prototype); MockController.prototype.constructor = AdaptableController; describe("AdaptableController", ()=>{ - + it("should use the provided adapter", (done) => { var adapter = new FilesAdapter(); var controller = new FilesController(adapter); @@ -22,7 +22,7 @@ describe("AdaptableController", ()=>{ expect(controller.adapter).toBe(adapter); done(); }); - + it("should throw when creating a new mock controller", (done) => { var adapter = new FilesAdapter(); expect(() => { @@ -30,7 +30,7 @@ describe("AdaptableController", ()=>{ }).toThrow(); done(); }); - + it("should fail setting the wrong adapter to the controller", (done) => { function WrongAdapter() {}; var adapter = new FilesAdapter(); @@ -41,7 +41,7 @@ describe("AdaptableController", ()=>{ }).toThrow(); done(); }); - + it("should fail to instantiate a controller with wrong adapter", (done) => { function WrongAdapter() {}; var adapter = new WrongAdapter(); @@ -50,14 +50,14 @@ describe("AdaptableController", ()=>{ }).toThrow(); done(); }); - + it("should fail to instantiate a controller without an adapter", (done) => { expect(() => { new FilesController(); }).toThrow(); done(); }); - + it("should accept an object adapter", (done) => { var adapter = { createFile: function(config, filename, data) { }, @@ -70,18 +70,18 @@ describe("AdaptableController", ()=>{ }).not.toThrow(); done(); }); - + it("should accept an object adapter", (done) => { function AGoodAdapter() {}; AGoodAdapter.prototype.createFile = function(config, filename, data) { }; AGoodAdapter.prototype.deleteFile = function(config, filename) { }; AGoodAdapter.prototype.getFileData = function(config, filename) { }; AGoodAdapter.prototype.getFileLocation = function(config, filename) { }; - + var adapter = new AGoodAdapter(); expect(() => { new FilesController(adapter); }).not.toThrow(); done(); }); -}); \ No newline at end of file +}); diff --git a/spec/MongoSchemaCollectionAdapter.spec.js b/spec/MongoSchemaCollectionAdapter.spec.js new file mode 100644 index 0000000000..00a5b1900c --- /dev/null +++ b/spec/MongoSchemaCollectionAdapter.spec.js @@ -0,0 +1,55 @@ +'use strict'; + +const MongoSchemaCollection = require('../src/Adapters/Storage/Mongo/MongoSchemaCollection').default; + +describe('MongoSchemaCollection', () => { + it('can transform legacy _client_permissions keys to parse format', done => { + expect(MongoSchemaCollection._TESTmongoSchemaToParseSchema({ + "_id":"_Installation", + "_client_permissions":{ + "get":true, + "find":true, + "update":true, + "create":true, + "delete":true, + }, + "_metadata":{ + "class_permissions":{ + "get":{"*":true}, + "find":{"*":true}, + "update":{"*":true}, + "create":{"*":true}, + "delete":{"*":true}, + "addField":{"*":true}, + } + }, + "installationId":"string", + "deviceToken":"string", + "deviceType":"string", + "channels":"array", + "user":"*_User", + })).toEqual({ + className: '_Installation', + fields: { + installationId: { type: 'String' }, + deviceToken: { type: 'String' }, + deviceType: { type: 'String' }, + channels: { type: 'Array' }, + user: { type: 'Pointer', targetClass: '_User' }, + ACL: { type: 'ACL' }, + createdAt: { type: 'Date' }, + updatedAt: { type: 'Date' }, + objectId: { type: 'String' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + } + }); + done(); + }); +}); diff --git a/spec/OAuth.spec.js b/spec/OAuth.spec.js index d96a86e14a..0a50611c0f 100644 --- a/spec/OAuth.spec.js +++ b/spec/OAuth.spec.js @@ -242,22 +242,21 @@ describe('OAuth', function() { it("should only create a single user with REST API", (done) => { var objectId; createOAuthUser((error, response, body) => { + expect(error).toBe(null); + var b = JSON.parse(body); + expect(b.objectId).not.toBeNull(); + expect(b.objectId).not.toBeUndefined(); + objectId = b.objectId; + + createOAuthUser((error, response, body) => { expect(error).toBe(null); var b = JSON.parse(body); expect(b.objectId).not.toBeNull(); expect(b.objectId).not.toBeUndefined(); - objectId = b.objectId; - - createOAuthUser((error, response, body) => { - expect(error).toBe(null); - var b = JSON.parse(body); - expect(b.objectId).not.toBeNull(); - expect(b.objectId).not.toBeUndefined(); - expect(b.objectId).toBe(objectId); - done(); - }); + expect(b.objectId).toBe(objectId); + done(); }); - + }); }); it("unlink and link with custom provider", (done) => { diff --git a/spec/Schema.spec.js b/spec/Schema.spec.js index 2912067ff3..c800543eaf 100644 --- a/spec/Schema.spec.js +++ b/spec/Schema.spec.js @@ -163,14 +163,26 @@ describe('Schema', () => { .then(schema => schema.addClassIfNotExists('NewClass', { foo: {type: 'String'} })) - .then(result => { - expect(result).toEqual({ - _id: 'NewClass', - objectId: 'string', - updatedAt: 'string', - createdAt: 'string', - foo: 'string', - }) + .then(actualSchema => { + const expectedSchema = { + className: 'NewClass', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + foo: { type: 'String' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + } + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); done(); }) .catch(error => { @@ -201,15 +213,27 @@ describe('Schema', () => { .then(schema => { var p1 = schema.addClassIfNotExists('NewClass', {foo: {type: 'String'}}); var p2 = schema.addClassIfNotExists('NewClass', {foo: {type: 'String'}}); - Promise.race([p1, p2]) //Use race because we expect the first completed promise to be the successful one - .then(response => { - expect(response).toEqual({ - _id: 'NewClass', - objectId: 'string', - updatedAt: 'string', - createdAt: 'string', - foo: 'string', - }); + Promise.race([p1, p2]) + .then(actualSchema => { + const expectedSchema = { + className: 'NewClass', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + foo: { type: 'String' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + } + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); }); Promise.all([p1,p2]) .catch(error => { @@ -373,23 +397,36 @@ describe('Schema', () => { aPointer: {type: 'Pointer', targetClass: 'ThisClassDoesNotExistYet'}, aRelation: {type: 'Relation', targetClass: 'NewClass'}, })) - .then(mongoObj => { - expect(mongoObj).toEqual({ - _id: 'NewClass', - objectId: 'string', - createdAt: 'string', - updatedAt: 'string', - aNumber: 'number', - aString: 'string', - aBool: 'boolean', - aDate: 'date', - aObject: 'object', - aArray: 'array', - aGeoPoint: 'geopoint', - aFile: 'file', - aPointer: '*ThisClassDoesNotExistYet', - aRelation: 'relation', - }); + .then(actualSchema => { + const expectedSchema = { + className: 'NewClass', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + aString: { type: 'String' }, + aNumber: { type: 'Number' }, + aString: { type: 'String' }, + aBool: { type: 'Boolean' }, + aDate: { type: 'Date' }, + aObject: { type: 'Object' }, + aArray: { type: 'Array' }, + aGeoPoint: { type: 'GeoPoint' }, + aFile: { type: 'File' }, + aPointer: { type: 'Pointer', targetClass: 'ThisClassDoesNotExistYet' }, + aRelation: { type: 'Relation', targetClass: 'NewClass' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + } + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); done(); }); }); @@ -399,23 +436,35 @@ describe('Schema', () => { .then(schema => schema.addClassIfNotExists('_Installation', { foo: {type: 'Number'}, })) - .then(mongoObj => { - expect(mongoObj).toEqual({ - _id: '_Installation', - createdAt: 'string', - updatedAt: 'string', - objectId: 'string', - foo: 'number', - installationId: 'string', - deviceToken: 'string', - channels: 'array', - deviceType: 'string', - pushType: 'string', - GCMSenderId: 'string', - timeZone: 'string', - localeIdentifier: 'string', - badge: 'number', - }); + .then(actualSchema => { + const expectedSchema = { + className: '_Installation', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + foo: { type: 'Number' }, + installationId: { type: 'String' }, + deviceToken: { type: 'String' }, + channels: { type: 'Array' }, + deviceType: { type: 'String' }, + pushType: { type: 'String' }, + GCMSenderId: { type: 'String' }, + timeZone: { type: 'String' }, + localeIdentifier: { type: 'String' }, + badge: { type: 'Number' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + } + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); done(); }); }); @@ -423,16 +472,28 @@ describe('Schema', () => { it('creates non-custom classes which include relation field', done => { config.database.loadSchema() .then(schema => schema.addClassIfNotExists('_Role', {})) - .then(mongoObj => { - expect(mongoObj).toEqual({ - _id: '_Role', - createdAt: 'string', - updatedAt: 'string', - objectId: 'string', - name: 'string', - users: 'relation<_User>', - roles: 'relation<_Role>', - }); + .then(actualSchema => { + const expectedSchema = { + className: '_Role', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + name: { type: 'String' }, + users: { type: 'Relation', targetClass: '_User' }, + roles: { type: 'Relation', targetClass: '_Role' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + }; + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); done(); }); }); @@ -440,19 +501,31 @@ describe('Schema', () => { it('creates non-custom classes which include pointer field', done => { config.database.loadSchema() .then(schema => schema.addClassIfNotExists('_Session', {})) - .then(mongoObj => { - expect(mongoObj).toEqual({ - _id: '_Session', - createdAt: 'string', - updatedAt: 'string', - objectId: 'string', - restricted: 'boolean', - user: '*_User', - installationId: 'string', - sessionToken: 'string', - expiresAt: 'date', - createdWith: 'object' - }); + .then(actualSchema => { + const expectedSchema = { + className: '_Session', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + restricted: { type: 'Boolean' }, + user: { type: 'Pointer', targetClass: '_User' }, + installationId: { type: 'String' }, + sessionToken: { type: 'String' }, + expiresAt: { type: 'Date' }, + createdWith: { type: 'Object' }, + ACL: { type: 'ACL' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + }; + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); done(); }); }); @@ -583,14 +656,26 @@ describe('Schema', () => { schema.addClassIfNotExists('NewClass', { relationField: {type: 'Relation', targetClass: '_User'} }) - .then(mongoObj => { - expect(mongoObj).toEqual({ - _id: 'NewClass', - objectId: 'string', - updatedAt: 'string', - createdAt: 'string', - relationField: 'relation<_User>', - }); + .then(actualSchema => { + const expectedSchema = { + className: 'NewClass', + fields: { + objectId: { type: 'String' }, + updatedAt: { type: 'Date' }, + createdAt: { type: 'Date' }, + ACL: { type: 'ACL' }, + relationField: { type: 'Relation', targetClass: '_User' }, + }, + classLevelPermissions: { + find: { '*': true }, + get: { '*': true }, + create: { '*': true }, + update: { '*': true }, + delete: { '*': true }, + addField: { '*': true }, + }, + }; + expect(dd(actualSchema, expectedSchema)).toEqual(undefined); }) .then(() => config.database.collectionExists('_Join:relationField:NewClass')) .then(exist => { @@ -703,33 +788,4 @@ describe('Schema', () => { }); done(); }); - - it('handles legacy _client_permissions keys without crashing', done => { - Schema.mongoSchemaToSchemaAPIResponse({ - "_id":"_Installation", - "_client_permissions":{ - "get":true, - "find":true, - "update":true, - "create":true, - "delete":true, - }, - "_metadata":{ - "class_permissions":{ - "get":{"*":true}, - "find":{"*":true}, - "update":{"*":true}, - "create":{"*":true}, - "delete":{"*":true}, - "addField":{"*":true}, - } - }, - "installationId":"string", - "deviceToken":"string", - "deviceType":"string", - "channels":"array", - "user":"*_User", - }); - done(); - }); }); diff --git a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js index 992068b5df..f64867a76d 100644 --- a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js +++ b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js @@ -1,6 +1,67 @@ import MongoCollection from './MongoCollection'; +function mongoFieldToParseSchemaField(type) { + if (type[0] === '*') { + return { + type: 'Pointer', + targetClass: type.slice(1), + }; + } + if (type.startsWith('relation<')) { + return { + type: 'Relation', + targetClass: type.slice('relation<'.length, type.length - 1), + }; + } + switch (type) { + case 'number': return {type: 'Number'}; + case 'string': return {type: 'String'}; + case 'boolean': return {type: 'Boolean'}; + case 'date': return {type: 'Date'}; + case 'map': + case 'object': return {type: 'Object'}; + case 'array': return {type: 'Array'}; + case 'geopoint': return {type: 'GeoPoint'}; + case 'file': return {type: 'File'}; + } +} + +const nonFieldSchemaKeys = ['_id', '_metadata', '_client_permissions']; +function mongoSchemaFieldsToParseSchemaFields(schema) { + var fieldNames = Object.keys(schema).filter(key => nonFieldSchemaKeys.indexOf(key) === -1); + var response = fieldNames.reduce((obj, fieldName) => { + obj[fieldName] = mongoFieldToParseSchemaField(schema[fieldName]) + return obj; + }, {}); + response.ACL = {type: 'ACL'}; + response.createdAt = {type: 'Date'}; + response.updatedAt = {type: 'Date'}; + response.objectId = {type: 'String'}; + return response; +} + +const defaultCLPS = Object.freeze({ + find: {'*': true}, + get: {'*': true}, + create: {'*': true}, + update: {'*': true}, + delete: {'*': true}, + addField: {'*': true}, +}); + +function mongoSchemaToParseSchema(mongoSchema) { + let clpsFromMongoObject = {}; + if (mongoSchema._metadata && mongoSchema._metadata.class_permissions) { + clpsFromMongoObject = mongoSchema._metadata.class_permissions; + } + return { + className: mongoSchema._id, + fields: mongoSchemaFieldsToParseSchemaFields(mongoSchema), + classLevelPermissions: {...defaultCLPS, ...clpsFromMongoObject}, + }; +} + function _mongoSchemaQueryFromNameQuery(name: string, query) { return _mongoSchemaObjectFromNameFields(name, query); } @@ -15,20 +76,31 @@ function _mongoSchemaObjectFromNameFields(name: string, fields) { return object; } -export default class MongoSchemaCollection { +class MongoSchemaCollection { _collection: MongoCollection; constructor(collection: MongoCollection) { this._collection = collection; } + // Return a promise for all schemas known to this adapter, in Parse format. In case the + // schemas cannot be retrieved, returns a promise that rejects. Requirements fot the + // rejection reason are TBD. getAllSchemas() { - return this._collection._rawFind({}); + return this._collection._rawFind({}) + .then(schemas => schemas.map(mongoSchemaToParseSchema)); } + // Return a promise for the schema with the given name, in Parse format. If + // this adapter doesn't know about the schema, return a promise that rejects with + // undefined as the reason. findSchema(name: string) { return this._collection._rawFind(_mongoSchemaQueryFromNameQuery(name), { limit: 1 }).then(results => { - return results[0]; + if (results.length === 1) { + return mongoSchemaToParseSchema(results[0]); + } else { + return Promise.reject(); + } }); } @@ -56,3 +128,13 @@ export default class MongoSchemaCollection { return this._collection.upsertOne(_mongoSchemaQueryFromNameQuery(name, query), update); } } + +// Exported for testing reasons and because we haven't moved all mongo schema format +// related logic into the database adapter yet. +MongoSchemaCollection._TESTmongoSchemaToParseSchema = mongoSchemaToParseSchema + +// Exported because we haven't moved all mongo schema format related logic +// into the database adapter yet. We will remove this before too long. +MongoSchemaCollection._DONOTUSEmongoFieldToParseSchemaField = mongoFieldToParseSchemaField + +export default MongoSchemaCollection diff --git a/src/Routers/SchemasRouter.js b/src/Routers/SchemasRouter.js index 49e4bbb29e..3babd66828 100644 --- a/src/Routers/SchemasRouter.js +++ b/src/Routers/SchemasRouter.js @@ -17,7 +17,6 @@ function classNameMismatchResponse(bodyClass, pathClass) { function getAllSchemas(req) { return req.config.database.schemaCollection() .then(collection => collection.getAllSchemas()) - .then(schemas => schemas.map(Schema.mongoSchemaToSchemaAPIResponse)) .then(schemas => ({ response: { results: schemas } })); } @@ -25,11 +24,13 @@ function getOneSchema(req) { const className = req.params.className; return req.config.database.schemaCollection() .then(collection => collection.findSchema(className)) - .then(mongoSchema => { - if (!mongoSchema) { + .then(schema => ({ response: schema })) + .catch(error => { + if (error === undefined) { throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME, `Class ${className} does not exist.`); + } else { + throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database adapter error.'); } - return { response: Schema.mongoSchemaToSchemaAPIResponse(mongoSchema) }; }); } @@ -47,7 +48,7 @@ function createSchema(req) { return req.config.database.loadSchema() .then(schema => schema.addClassIfNotExists(className, req.body.fields, req.body.classLevelPermissions)) - .then(result => ({ response: Schema.mongoSchemaToSchemaAPIResponse(result) })); + .then(schema => ({ response: schema })); } function modifySchema(req) { @@ -55,8 +56,8 @@ function modifySchema(req) { return classNameMismatchResponse(req.body.className, req.params.className); } - var submittedFields = req.body.fields || {}; - var className = req.params.className; + let submittedFields = req.body.fields || {}; + let className = req.params.className; return req.config.database.loadSchema() .then(schema => { diff --git a/src/Schema.js b/src/Schema.js index 0e43fc73d3..d4bdfc2b81 100644 --- a/src/Schema.js +++ b/src/Schema.js @@ -16,6 +16,8 @@ var Parse = require('parse/node').Parse; var transform = require('./transform'); +import MongoSchemaCollection from './Adapters/Storage/Mongo/MongoSchemaCollection'; +import _ from 'lodash'; const defaultColumns = Object.freeze({ // Contain the default columns for every parse object type (except _Join collection) @@ -113,15 +115,6 @@ function verifyPermissionKey(key) { } const CLPValidKeys = Object.freeze(['find', 'get', 'create', 'update', 'delete', 'addField']); -let DefaultClassLevelPermissions = () => { - return CLPValidKeys.reduce((perms, key) => { - perms[key] = { - '*': true - }; - return perms; - }, {}); -} - function validateCLP(perms) { if (!perms) { return; @@ -220,11 +213,8 @@ function schemaAPITypeToMongoFieldType(type) { } } -// Create a schema from a Mongo collection and the exported schema format. -// mongoSchema should be a list of objects, each with: -// '_id' indicates the className -// '_metadata' is ignored for now -// Everything else is expected to be a userspace field. +// Stores the entire schema of the app in a weird hybrid format somewhere between +// the mongo format and the Parse format. Soon, this will all be Parse format. class Schema { _collection; data; @@ -233,7 +223,8 @@ class Schema { constructor(collection) { this._collection = collection; - // this.data[className][fieldName] tells you the type of that field + // this.data[className][fieldName] tells you the type of that field, in mongo format + // TODO: use Parse format this.data = {}; // this.perms[className][operation] tells you the acl-style permissions this.perms = {}; @@ -242,43 +233,24 @@ class Schema { reloadData() { this.data = {}; this.perms = {}; - return this._collection.getAllSchemas().then(results => { - for (let obj of results) { - let className = null; - let classData = {}; - let permsData = null; - Object.keys(obj).forEach(key => { - let value = obj[key]; - switch (key) { - case '_id': - className = value; - break; - case '_metadata': - if (value && value['class_permissions']) { - permsData = value['class_permissions']; - } - break; - default: - classData[key] = value; - } - }); - if (className) { - // merge with the default schema - let defaultClassData = Object.assign({}, defaultColumns._Default, defaultColumns[className]); - defaultClassData = Object.keys(defaultClassData).reduce((memo, key) => { - let type = schemaAPITypeToMongoFieldType(defaultClassData[key]).result; - if (type) { - memo[key] = type; - } - return memo; - }, {}); - classData = Object.assign({}, defaultClassData, classData); - this.data[className] = classData; - if (permsData) { - this.perms[className] = permsData; - } + return this._collection.getAllSchemas().then(allSchemas => { + allSchemas.forEach(schema => { + const parseFormatSchema = { + ...defaultColumns._Default, + ...(defaultColumns[schema.className] || {}), + ...schema.fields, } - } + // ACL doesn't show up in mongo, it's implicit + delete parseFormatSchema.ACL; + // createdAt and updatedAt are wacky and have legacy baggage + parseFormatSchema.createdAt = { type: 'String' }; + parseFormatSchema.updatedAt = { type: 'String' }; + this.data[schema.className] = _.mapValues(parseFormatSchema, parseField => + schemaAPITypeToMongoFieldType(parseField).result + ); + + this.perms[schema.className] = schema.classLevelPermissions; + }); }); } @@ -300,7 +272,8 @@ class Schema { } return this._collection.addSchema(className, mongoObject.result) - .then(result => result.ops[0]) + //TODO: Move this logic into the database adapter + .then(result => MongoSchemaCollection._TESTmongoSchemaToParseSchema(result.ops[0])) .catch(error => { if (error.code === 11000) { //Mongo's duplicate key error throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME, `Class ${className} already exists.`); @@ -354,7 +327,8 @@ class Schema { .then(() => { return this.setPermissions(className, classLevelPermissions) }) - .then(() => { return mongoSchemaToSchemaAPIResponse(mongoObject.result) }); + //TODO: Move this logic into the database adapter + .then(() => MongoSchemaCollection._TESTmongoSchemaToParseSchema(mongoObject.result)); } @@ -384,7 +358,7 @@ class Schema { 'schema is frozen, cannot add: ' + className); } // We don't have this class. Update the schema - return this._collection.addSchema(className).then(() => { + return this.addClassIfNotExists(className, []).then(() => { // The schema update succeeded. Reload the schema return this.reloadData(); }, () => { @@ -421,20 +395,20 @@ class Schema { } // Returns a promise that resolves successfully to the new schema - // object if the provided className-key-type tuple is valid. + // object if the provided className-fieldName-type tuple is valid. // The className must already be validated. // If 'freeze' is true, refuse to update the schema for this field. - validateField(className, key, type, freeze) { - // Just to check that the key is valid - transform.transformKey(this, className, key); + validateField(className, fieldName, type, freeze) { + // Just to check that the fieldName is valid + transform.transformKey(this, className, fieldName); - if( key.indexOf(".") > 0 ) { + if( fieldName.indexOf(".") > 0 ) { // subdocument key (x.y) => ok if x is of type 'object' - key = key.split(".")[ 0 ]; + fieldName = fieldName.split(".")[ 0 ]; type = 'object'; } - var expected = this.data[className][key]; + let expected = this.data[className][fieldName]; if (expected) { expected = (expected === 'map' ? 'object' : expected); if (expected === type) { @@ -442,14 +416,13 @@ class Schema { } else { throw new Parse.Error( Parse.Error.INCORRECT_TYPE, - 'schema mismatch for ' + className + '.' + key + - '; expected ' + expected + ' but got ' + type); + `schema mismatch for ${className}.${fieldName}; expected ${expected} but got ${type}` + ); } } if (freeze) { - throw new Parse.Error(Parse.Error.INVALID_JSON, - 'schema is frozen, cannot add ' + key + ' field'); + throw new Parse.Error(Parse.Error.INVALID_JSON, `schema is frozen, cannot add ${fieldName} field`); } // We don't have this field, but if the value is null or undefined, @@ -473,9 +446,9 @@ class Schema { // Note that we use the $exists guard and $set to avoid race // conditions in the database. This is important! let query = {}; - query[key] = { '$exists': false }; + query[fieldName] = { '$exists': false }; var update = {}; - update[key] = type; + update[fieldName] = type; update = {'$set': update}; return this._collection.upsertSchema(className, query, update).then(() => { // The update succeeded. Reload the schema @@ -487,7 +460,7 @@ class Schema { return this.reloadData(); }).then(() => { // Ensure that the schema now validates - return this.validateField(className, key, type, true); + return this.validateField(className, fieldName, type, true); }, (error) => { // The schema still doesn't validate. Give up throw new Parse.Error(Parse.Error.INVALID_JSON, @@ -557,11 +530,11 @@ class Schema { validateObject(className, object, query) { var geocount = 0; var promise = this.validateClassName(className); - for (var key in object) { - if (object[key] === undefined) { + for (let fieldName in object) { + if (object[fieldName] === undefined) { continue; } - var expected = getType(object[key]); + var expected = getType(object[fieldName]); if (expected === 'geopoint') { geocount++; } @@ -576,7 +549,12 @@ class Schema { if (!expected) { continue; } - promise = thenValidateField(promise, className, key, expected); + if (fieldName === 'ACL') { + // Every object has ACL implicitly. + continue; + } + + promise = thenValidateField(promise, className, fieldName, expected); } promise = thenValidateRequiredColumns(promise, className, object, query); return promise; @@ -735,32 +713,6 @@ function mongoSchemaFromFieldsAndClassNameAndCLP(fields, className, classLevelPe return { result: mongoObject }; } -function mongoFieldTypeToSchemaAPIType(type) { - if (type[0] === '*') { - return { - type: 'Pointer', - targetClass: type.slice(1), - }; - } - if (type.startsWith('relation<')) { - return { - type: 'Relation', - targetClass: type.slice('relation<'.length, type.length - 1), - }; - } - switch (type) { - case 'number': return {type: 'Number'}; - case 'string': return {type: 'String'}; - case 'boolean': return {type: 'Boolean'}; - case 'date': return {type: 'Date'}; - case 'map': - case 'object': return {type: 'Object'}; - case 'array': return {type: 'Array'}; - case 'geopoint': return {type: 'GeoPoint'}; - case 'file': return {type: 'File'}; - } -} - // Builds a new schema (in schema API response format) out of an // existing mongo schema + a schemas API put request. This response // does not include the default fields, as it is intended to be passed @@ -776,7 +728,7 @@ function buildMergedSchemaObject(mongoObject, putRequest) { } var fieldIsDeleted = putRequest[oldField] && putRequest[oldField].__op === 'Delete' if (!fieldIsDeleted) { - newSchema[oldField] = mongoFieldTypeToSchemaAPIType(mongoObject[oldField]); + newSchema[oldField] = MongoSchemaCollection._DONOTUSEmongoFieldToParseSchemaField(mongoObject[oldField]); } } } @@ -891,41 +843,11 @@ function getObjectType(obj) { return 'object'; } -const nonFieldSchemaKeys = ['_id', '_metadata', '_client_permissions']; -function mongoSchemaAPIResponseFields(schema) { - var fieldNames = Object.keys(schema).filter(key => nonFieldSchemaKeys.indexOf(key) === -1); - var response = fieldNames.reduce((obj, fieldName) => { - obj[fieldName] = mongoFieldTypeToSchemaAPIType(schema[fieldName]) - return obj; - }, {}); - response.ACL = {type: 'ACL'}; - response.createdAt = {type: 'Date'}; - response.updatedAt = {type: 'Date'}; - response.objectId = {type: 'String'}; - return response; -} - -function mongoSchemaToSchemaAPIResponse(schema) { - let result = { - className: schema._id, - fields: mongoSchemaAPIResponseFields(schema), - }; - - let classLevelPermissions = DefaultClassLevelPermissions(); - if (schema._metadata && schema._metadata.class_permissions) { - classLevelPermissions = Object.assign({}, classLevelPermissions, schema._metadata.class_permissions); - } - result.classLevelPermissions = classLevelPermissions; - return result; -} - export { load, classNameIsValid, invalidClassNameMessage, schemaAPITypeToMongoFieldType, buildMergedSchemaObject, - mongoFieldTypeToSchemaAPIType, - mongoSchemaToSchemaAPIResponse, systemClasses, }; diff --git a/src/middlewares.js b/src/middlewares.js index cb625f3f4c..d56840b4b0 100644 --- a/src/middlewares.js +++ b/src/middlewares.js @@ -1,5 +1,5 @@ import cache from './cache'; -import log from './logger'; +import log from './logger'; var Parse = require('parse/node').Parse;