From 46ee5dd91c61f49bad4da8286b2f97c737d96631 Mon Sep 17 00:00:00 2001 From: Ivan Artemiev <29709626+iartemiev@users.noreply.github.com> Date: Thu, 2 Sep 2021 15:22:46 -0400 Subject: [PATCH] feat(@aws-amplify/datastore): add SQLite storage adapter option for RN apps (#8809) --- .circleci/config.yml | 1 + package.json | 1 - packages/datastore-storage-adapter/.npmignore | 15 + .../__tests__/SQLiteUtils.test.ts | 658 +++++++++++++ .../__tests__/helpers.ts | 912 ++++++++++++++++++ packages/datastore-storage-adapter/build.js | 5 + packages/datastore-storage-adapter/index.js | 7 + .../datastore-storage-adapter/package.json | 93 ++ .../src/SQLiteAdapter/SQLiteAdapter.ts | 480 +++++++++ .../src/SQLiteAdapter/SQLiteDatabase.ts | 167 ++++ .../src/SQLiteAdapter/SQLiteUtils.ts | 427 ++++++++ .../src/SQLiteAdapter/types.ts | 31 + .../datastore-storage-adapter/src/index.ts | 2 + .../datastore-storage-adapter/tslint.json | 50 + .../webpack.config.dev.js | 6 + .../webpack.config.js | 44 + packages/datastore/package.json | 2 +- packages/datastore/src/datastore/datastore.ts | 70 +- packages/datastore/src/index.ts | 32 +- packages/datastore/src/storage/storage.ts | 2 +- packages/datastore/src/sync/index.ts | 6 + packages/datastore/src/sync/utils.ts | 12 +- packages/datastore/src/types.ts | 41 +- packages/datastore/src/util.ts | 13 + scripts/build.js | 4 +- 25 files changed, 3064 insertions(+), 17 deletions(-) create mode 100644 packages/datastore-storage-adapter/.npmignore create mode 100644 packages/datastore-storage-adapter/__tests__/SQLiteUtils.test.ts create mode 100644 packages/datastore-storage-adapter/__tests__/helpers.ts create mode 100644 packages/datastore-storage-adapter/build.js create mode 100644 packages/datastore-storage-adapter/index.js create mode 100644 packages/datastore-storage-adapter/package.json create mode 100644 packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts create mode 100644 packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts create mode 100644 packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteUtils.ts create mode 100644 packages/datastore-storage-adapter/src/SQLiteAdapter/types.ts create mode 100644 packages/datastore-storage-adapter/src/index.ts create mode 100644 packages/datastore-storage-adapter/tslint.json create mode 100644 packages/datastore-storage-adapter/webpack.config.dev.js create mode 100644 packages/datastore-storage-adapter/webpack.config.js diff --git a/.circleci/config.yml b/.circleci/config.yml index 8ff7b48e739..e65c59a6a93 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -344,6 +344,7 @@ jobs: command: | mv .yarnrc ._yarnrc yarn + - run: yarn bootstrap - run: yarn build # storing yarn.lock as an artifact, so that we can quickly get a dependency diff # with the last working build in the event that some upstream deps break the build in the future diff --git a/package.json b/package.json index c60ada906c0..329ed2547f1 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,6 @@ "publish:release": "lerna publish --conventional-commits --yes --message 'chore(release): Publish [ci skip]' --no-verify-access", "publish:1.0-stable": "lerna publish --conventional-commits --yes --dist-tag=stable-1.0 --message 'chore(release): Publish [ci skip]' --no-verify-access", "publish:ui-components/main": "lerna publish --canary --force-publish \"*\" --yes --dist-tag=ui-preview --preid=ui-preview --exact --no-verify-access", - "publish:native": "lerna publish --canary --force-publish \"*\" --yes --dist-tag=native --preid=native --exact --no-verify-access", "publish:verdaccio": "lerna publish --no-push --canary minor --dist-tag=unstable --preid=unstable --exact --force-publish --yes --no-verify-access" }, "husky": { diff --git a/packages/datastore-storage-adapter/.npmignore b/packages/datastore-storage-adapter/.npmignore new file mode 100644 index 00000000000..7843ccd1b80 --- /dev/null +++ b/packages/datastore-storage-adapter/.npmignore @@ -0,0 +1,15 @@ +__mocks__/** +__tests__/** +coverage/** +docs/** +node_modules/** +.vscode/** +.DS_Store +*.log +prepend-license.js +prettier.config.json +tsconfig.json +tsfmt.json +tslint.json +typeDoc.js +webpack.config.js \ No newline at end of file diff --git a/packages/datastore-storage-adapter/__tests__/SQLiteUtils.test.ts b/packages/datastore-storage-adapter/__tests__/SQLiteUtils.test.ts new file mode 100644 index 00000000000..09d5ce17b93 --- /dev/null +++ b/packages/datastore-storage-adapter/__tests__/SQLiteUtils.test.ts @@ -0,0 +1,658 @@ +import { + generateSchemaStatements, + queryByIdStatement, + queryAllStatement, + queryOneStatement, + modelInsertStatement, + modelUpdateStatement, + whereClauseFromPredicate, + limitClauseFromPagination, + orderByClauseFromSort, + deleteByIdStatement, + deleteByPredicateStatement, + modelCreateTableStatement, + implicitAuthFieldsForModel, +} from '../src/SQLiteAdapter/SQLiteUtils'; +import { + InternalSchema, + PersistentModelConstructor, + QueryOne, + SchemaModel, + initSchema as initSchemaType, +} from '@aws-amplify/datastore'; +import { Model, testSchema, internalTestSchema } from './helpers'; + +let initSchema: typeof initSchemaType; + +const INTERNAL_TEST_SCHEMA_STATEMENTS = [ + 'CREATE TABLE IF NOT EXISTS "Setting" ("id" PRIMARY KEY NOT NULL, "key" TEXT NOT NULL, "value" TEXT NOT NULL);', + 'CREATE TABLE IF NOT EXISTS "Model" ("id" PRIMARY KEY NOT NULL, "field1" TEXT NOT NULL, "optionalField1" TEXT, "dateCreated" TEXT NOT NULL, "emails" TEXT NOT NULL, "ips" TEXT, "metadata" TEXT, "_version" INTEGER, "_lastChangedAt" INTEGER, "_deleted" INTEGER);', + 'CREATE TABLE IF NOT EXISTS "LocalModel" ("id" PRIMARY KEY NOT NULL, "field1" TEXT NOT NULL, "_version" INTEGER, "_lastChangedAt" INTEGER, "_deleted" INTEGER);', + 'CREATE TABLE IF NOT EXISTS "MutationEvent" ("id" PRIMARY KEY NOT NULL, "model" TEXT NOT NULL, "data" TEXT NOT NULL, "modelId" TEXT NOT NULL, "operation" TEXT NOT NULL, "condition" TEXT NOT NULL);', + 'CREATE TABLE IF NOT EXISTS "ModelMetadata" ("id" PRIMARY KEY NOT NULL, "namespace" TEXT NOT NULL, "model" TEXT NOT NULL, "lastSync" INTEGER, "lastFullSync" INTEGER, "fullSyncInterval" INTEGER NOT NULL);', +]; + +const INTERNAL_TEST_SCHEMA_MANY_TO_MANY_STATEMENT = + 'CREATE TABLE IF NOT EXISTS "PostEditor" ("id" PRIMARY KEY NOT NULL, "post" TEXT, "postID" TEXT NOT NULL, "editor" TEXT, "editorID" TEXT NOT NULL, "createdAt" TEXT, "updatedAt" TEXT, "_version" INTEGER, "_lastChangedAt" INTEGER, "_deleted" INTEGER);'; + +describe('SQLiteUtils tests', () => { + let Model: PersistentModelConstructor; + + beforeAll(async () => { + ({ initSchema } = require('@aws-amplify/datastore')); + + const classes = initSchema(testSchema()); + + ({ Model } = classes as { + Model: PersistentModelConstructor; + }); + }); + + describe('createSchemaStatements', () => { + it('should generate valid CREATE TABLE statements from internal schema', () => { + const schema: InternalSchema = internalTestSchema(); + + expect(generateSchemaStatements(schema)).toEqual( + INTERNAL_TEST_SCHEMA_STATEMENTS + ); + }); + }); + + describe('modelCreateTableStatement', () => { + it('should generate valid CREATE TABLE statement from a M:N join table model with implicit FKs', () => { + expect(modelCreateTableStatement(postEditorImplicit, true)).toEqual( + INTERNAL_TEST_SCHEMA_MANY_TO_MANY_STATEMENT + ); + }); + + it('should generate valid CREATE TABLE statement from a M:N join table model with explicit FKs', () => { + expect(modelCreateTableStatement(postEditorExplicit, true)).toEqual( + INTERNAL_TEST_SCHEMA_MANY_TO_MANY_STATEMENT + ); + }); + }); + + describe('implicitAuthFieldsForModel', () => { + it('should extract implicitly defined owner field from model attributes', () => { + expect(implicitAuthFieldsForModel(ownerAuthImplicit)).toEqual(['owner']); + }); + + it('should skip explicitly defined owner field', () => { + expect(implicitAuthFieldsForModel(ownerAuthExplicit)).toEqual([]); + }); + + it('should extract implicitly defined groups field from model attributes', () => { + expect(implicitAuthFieldsForModel(groupsAuthImplicit)).toEqual([ + 'allowedGroups', + ]); + }); + + it('should skip explicitly defined groups field', () => { + expect(implicitAuthFieldsForModel(groupsAuthExplicit)).toEqual([]); + }); + }); + + describe('queryByIdStatement', () => { + it('should generate valid SELECT by id statement', () => { + const model = new Model({ + field1: 'test', + dateCreated: new Date().toISOString(), + }); + + const expected = [`SELECT * FROM "Model" WHERE "id" = ?`, [model.id]]; + + expect(queryByIdStatement(model.id, 'Model')).toEqual(expected); + }); + }); + + describe('queryAllStatement', () => { + it('should generate valid SELECT all statement', () => { + const expected = [`SELECT * FROM "Model" ORDER BY _rowid_ ASC`, []]; + + expect(queryAllStatement('Model')).toEqual(expected); + }); + + it('should generate valid SELECT all statement - with predicates, sort, & pagination', () => { + const tableName = 'Model'; + + const predicateGroup = { + type: 'and', + predicates: [ + { + field: 'firstName', + operator: 'eq', + operand: 'Bob', + }, + { + field: 'lastName', + operator: 'beginsWith', + operand: 'Sm', + }, + { + field: 'sortOrder', + operator: 'gt', + operand: 5, + }, + ], + }; + + const sortPredicateGroup = [ + { + field: 'sortOrder', + sortDirection: 'ASCENDING', + }, + { + field: 'lastName', + sortDirection: 'DESCENDING', + }, + ]; + + const limit = 10; + const page = 3; + + const expected = [ + `SELECT * FROM "Model" WHERE ("firstName" = ? AND "lastName" LIKE ? AND "sortOrder" > ?) ORDER BY "sortOrder" ASC, "lastName" DESC, _rowid_ ASC LIMIT ? OFFSET ?`, + ['Bob', 'Sm%', 5, 10, 30], + ]; + + expect( + queryAllStatement( + tableName, + predicateGroup as any, + sortPredicateGroup as any, + limit, + page + ) + ).toEqual(expected); + }); + }); + + describe('queryOneStatement', () => { + it('should generate valid SELECT statement for query first', () => { + const expected = [`SELECT * FROM Model ORDER BY _rowid_ LIMIT 1`, []]; + + expect(queryOneStatement(QueryOne.FIRST, 'Model')).toEqual(expected); + }); + + it('should generate valid SELECT statement for query last', () => { + const expected = [ + `SELECT * FROM Model ORDER BY _rowid_ DESC LIMIT 1`, + [], + ]; + + expect(queryOneStatement(QueryOne.LAST, 'Model')).toEqual(expected); + }); + }); + + describe('modelInsertStatement', () => { + it('should generate valid SELECT by id statement', () => { + const model = new Model({ + field1: 'test', + dateCreated: new Date().toISOString(), + }); + + const expected = [ + 'INSERT INTO "Model" ("field1", "dateCreated", "id", "_version", "_lastChangedAt", "_deleted") VALUES (?, ?, ?, ?, ?, ?)', + [ + model.field1, + model.dateCreated, + model.id, + undefined, + undefined, + undefined, + ], + ]; + + expect(modelInsertStatement(model, 'Model')).toEqual(expected); + }); + }); + + describe('modelUpdateStatement', () => { + it('should generate valid UPDATE by id statement', () => { + const model = new Model({ + field1: 'test', + dateCreated: new Date().toISOString(), + }); + + const expected = [ + `UPDATE "Model" SET "field1"=?, "dateCreated"=?, "_version"=?, "_lastChangedAt"=?, "_deleted"=? WHERE id=?`, + [ + model.field1, + model.dateCreated, + undefined, + undefined, + undefined, + model.id, + ], + ]; + + expect(modelUpdateStatement(model, 'Model')).toEqual(expected); + }); + }); + + describe('whereClauseFromPredicate', () => { + it('should generate valid WHERE clause from predicate', () => { + const predicateGroup = { + type: 'and', + predicates: [ + { + field: 'firstName', + operator: 'eq', + operand: 'Bob', + }, + { + field: 'lastName', + operator: 'beginsWith', + operand: 'Sm', + }, + { + field: 'sortOrder', + operator: 'gt', + operand: 5, + }, + ], + }; + + const expected = [ + `WHERE ("firstName" = ? AND "lastName" LIKE ? AND "sortOrder" > ?)`, + ['Bob', 'Sm%', 5], + ]; + + expect(whereClauseFromPredicate(predicateGroup as any)).toEqual(expected); + }); + }); + + describe('limitClauseFromPagination', () => { + it('should generate valid LIMIT clause from pagination limit', () => { + const limit = 10; + + const expected = ['LIMIT ?', [10]]; + + expect(limitClauseFromPagination(limit)).toEqual(expected); + }); + + it('should generate valid LIMIT clause from pagination limit and page', () => { + const limit = 10; + const page = 3; + + const expected = ['LIMIT ? OFFSET ?', [10, 30]]; + + expect(limitClauseFromPagination(limit, page)).toEqual(expected); + }); + }); + + describe('orderByClauseFromSort', () => { + it('should generate valid ORDER BY clause from pagination sort', () => { + const sortPredicateGroup = [ + { + field: 'sortOrder', + sortDirection: 'ASCENDING', + }, + ]; + + const expected = 'ORDER BY "sortOrder" ASC, _rowid_ ASC'; + + expect(orderByClauseFromSort(sortPredicateGroup as any)).toEqual( + expected + ); + }); + + it('should generate valid ORDER BY clause from pagination sort - multi field', () => { + const sortPredicateGroup = [ + { + field: 'sortOrder', + sortDirection: 'ASCENDING', + }, + { + field: 'lastName', + sortDirection: 'DESCENDING', + }, + ]; + + const expected = 'ORDER BY "sortOrder" ASC, "lastName" DESC, _rowid_ ASC'; + + expect(orderByClauseFromSort(sortPredicateGroup as any)).toEqual( + expected + ); + }); + }); + + describe('deleteByIdStatement', () => { + it('should generate valid DELETE statement', () => { + const model = new Model({ + field1: 'test', + dateCreated: new Date().toISOString(), + }); + + const expected = ['DELETE FROM "Model" WHERE "id"=?', [model.id]]; + + expect(deleteByIdStatement(model.id, 'Model')).toEqual(expected); + }); + }); + + describe('deleteByPredicateStatement', () => { + it('should generate valid DELETE statement', () => { + const model = new Model({ + field1: 'test', + dateCreated: new Date().toISOString(), + }); + + const predicateGroup = { + type: 'and', + predicates: [ + { + field: 'createdAt', + operator: 'gt', + operand: '2021-06-20', + }, + ], + }; + + const expected = [ + 'DELETE FROM "Model" WHERE ("createdAt" > ?)', + ['2021-06-20'], + ]; + + expect( + deleteByPredicateStatement('Model', predicateGroup as any) + ).toEqual(expected); + }); + }); +}); + +const postEditorImplicit: SchemaModel = { + name: 'PostEditor', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + post: { + name: 'post', + isArray: false, + type: { model: 'Post' }, + isRequired: true, + attributes: [], + association: { + connectionType: 'BELONGS_TO', + targetName: 'postID', + }, + }, + editor: { + name: 'editor', + isArray: false, + type: { model: 'User' }, + isRequired: true, + attributes: [], + association: { + connectionType: 'BELONGS_TO', + targetName: 'editorID', + }, + }, + createdAt: { + name: 'createdAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + }, + updatedAt: { + name: 'updatedAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + }, + }, + syncable: true, + pluralName: 'PostEditors', + attributes: [ + { type: 'model', properties: { queries: null } }, + { + type: 'key', + properties: { + name: 'byPost', + fields: ['postID', 'editorID'], + }, + }, + { + type: 'key', + properties: { + name: 'byEditor', + fields: ['editorID', 'postID'], + }, + }, + ], +}; + +const postEditorExplicit: SchemaModel = { + name: 'PostEditor', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + post: { + name: 'post', + isArray: false, + type: { model: 'Post' }, + isRequired: true, + attributes: [], + association: { + connectionType: 'BELONGS_TO', + targetName: 'postID', + }, + }, + postID: { + name: 'postID', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + editor: { + name: 'editor', + isArray: false, + type: { model: 'User' }, + isRequired: true, + attributes: [], + association: { + connectionType: 'BELONGS_TO', + targetName: 'editorID', + }, + }, + editorID: { + name: 'editorID', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + createdAt: { + name: 'createdAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + }, + updatedAt: { + name: 'updatedAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + }, + }, + syncable: true, + pluralName: 'PostEditors', + attributes: [ + { type: 'model', properties: { queries: null } }, + { + type: 'key', + properties: { name: 'byPost', fields: ['postID', 'editorID'] }, + }, + { + type: 'key', + properties: { + name: 'byEditor', + fields: ['editorID', 'postID'], + }, + }, + ], +}; + +const ownerAuthImplicit: SchemaModel = { + name: 'OwnerAuthImplicit', + pluralName: 'OwnerAuthImplicit', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + }, + attributes: [ + { + type: 'auth', + properties: { + rules: [ + { + provider: 'userPools', + ownerField: 'owner', + allow: 'owner', + identityClaim: 'cognito:username', + operations: ['create', 'update', 'delete', 'read'], + }, + { + allow: 'public', + operations: ['read'], + }, + ], + }, + }, + ], +}; + +const ownerAuthExplicit: SchemaModel = { + name: 'OwnerAuthImplicit', + pluralName: 'OwnerAuthImplicit', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + owner: { + name: 'owner', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + }, + attributes: [ + { + type: 'auth', + properties: { + rules: [ + { + provider: 'userPools', + ownerField: 'owner', + allow: 'owner', + identityClaim: 'cognito:username', + operations: ['create', 'update', 'delete', 'read'], + }, + { + allow: 'public', + operations: ['read'], + }, + ], + }, + }, + ], +}; + +const groupsAuthImplicit: SchemaModel = { + name: 'OwnerAuthImplicit', + pluralName: 'OwnerAuthImplicit', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + }, + attributes: [ + { + type: 'auth', + properties: { + rules: [ + { + groupClaim: 'cognito:groups', + provider: 'userPools', + allow: 'groups', + groupsField: 'allowedGroups', + operations: ['create', 'update', 'delete', 'read'], + }, + { + allow: 'public', + operations: ['read'], + }, + ], + }, + }, + ], +}; + +const groupsAuthExplicit: SchemaModel = { + name: 'OwnerAuthImplicit', + pluralName: 'OwnerAuthImplicit', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + allowedGroups: { + name: 'allowedGroups', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + }, + attributes: [ + { + type: 'auth', + properties: { + rules: [ + { + groupClaim: 'cognito:groups', + provider: 'userPools', + allow: 'groups', + groupsField: 'allowedGroups', + operations: ['create', 'update', 'delete', 'read'], + }, + { + allow: 'public', + operations: ['read'], + }, + ], + }, + }, + ], +}; diff --git a/packages/datastore-storage-adapter/__tests__/helpers.ts b/packages/datastore-storage-adapter/__tests__/helpers.ts new file mode 100644 index 00000000000..8e2b00a1feb --- /dev/null +++ b/packages/datastore-storage-adapter/__tests__/helpers.ts @@ -0,0 +1,912 @@ +import { + ModelInit, + MutableModel, + Schema, + InternalSchema, + SchemaModel, +} from '@aws-amplify/datastore'; + +export declare class Model { + public readonly id: string; + public readonly field1: string; + public readonly optionalField1?: string; + public readonly dateCreated: string; + public readonly emails?: string[]; + public readonly ips?: (string | null)[]; + public readonly metadata?: Metadata; + public readonly createdAt?: string; + public readonly updatedAt?: string; + + constructor(init: ModelInit); + + static copyOf( + src: Model, + mutator: (draft: MutableModel) => void | Model + ): Model; +} +export declare class Metadata { + readonly author: string; + readonly tags?: string[]; + readonly rewards: string[]; + readonly penNames: string[]; + readonly nominations?: string[]; + readonly misc?: (string | null)[]; + constructor(init: Metadata); +} + +export declare class Post { + public readonly id: string; + public readonly title: string; +} + +export declare class Comment { + public readonly id: string; + public readonly content: string; + public readonly post: Post; +} + +export declare class User { + public readonly id: string; + public readonly name: string; + public readonly profileID: string; +} +export declare class Profile { + public readonly id: string; + public readonly firstName: string; + public readonly lastName: string; +} + +export declare class PostComposite { + public readonly id: string; + public readonly title: string; + public readonly description: string; + public readonly created: string; + public readonly sort: number; +} + +export declare class PostCustomPK { + public readonly id: string; + public readonly postId: number; + public readonly title: string; + public readonly description?: string; +} + +export declare class PostCustomPKSort { + public readonly id: string; + public readonly postId: number; + public readonly title: string; + public readonly description?: string; +} +export declare class PostCustomPKComposite { + public readonly id: string; + public readonly postId: number; + public readonly title: string; + public readonly description?: string; + public readonly sort: number; +} + +export function testSchema(): Schema { + return { + enums: {}, + models: { + Model: { + name: 'Model', + pluralName: 'Models', + syncable: true, + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + }, + field1: { + name: 'field1', + isArray: false, + type: 'String', + isRequired: true, + }, + optionalField1: { + name: 'optionalField1', + isArray: false, + type: 'String', + isRequired: false, + }, + dateCreated: { + name: 'dateCreated', + isArray: false, + type: 'AWSDateTime', + isRequired: true, + attributes: [], + }, + emails: { + name: 'emails', + isArray: true, + type: 'AWSEmail', + isRequired: true, + attributes: [], + isArrayNullable: true, + }, + ips: { + name: 'ips', + isArray: true, + type: 'AWSIPAddress', + isRequired: false, + attributes: [], + isArrayNullable: true, + }, + metadata: { + name: 'metadata', + isArray: false, + type: { + nonModel: 'Metadata', + }, + isRequired: false, + attributes: [], + }, + createdAt: { + name: 'createdAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + isReadOnly: true, + }, + updatedAt: { + name: 'updatedAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + isReadOnly: true, + }, + }, + }, + Post: { + name: 'Post', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + title: { + name: 'title', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + comments: { + name: 'comments', + isArray: true, + type: { + model: 'Comment', + }, + isRequired: true, + attributes: [], + isArrayNullable: true, + association: { + connectionType: 'HAS_MANY', + associatedWith: 'postId', + }, + }, + }, + syncable: true, + pluralName: 'Posts', + attributes: [ + { + type: 'model', + properties: {}, + }, + ], + }, + Comment: { + name: 'Comment', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + content: { + name: 'content', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + post: { + name: 'post', + isArray: false, + type: { + model: 'Post', + }, + isRequired: false, + attributes: [], + association: { + connectionType: 'BELONGS_TO', + targetName: 'postId', + }, + }, + }, + syncable: true, + pluralName: 'Comments', + attributes: [ + { + type: 'model', + properties: {}, + }, + { + type: 'key', + properties: { + name: 'byPost', + fields: ['postId'], + }, + }, + ], + }, + LocalModel: { + name: 'LocalModel', + pluralName: 'LocalModels', + syncable: false, + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + }, + field1: { + name: 'field1', + isArray: false, + type: 'String', + isRequired: true, + }, + }, + }, + User: { + name: 'User', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + name: { + name: 'name', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + profileID: { + name: 'profileID', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + profile: { + name: 'profile', + isArray: false, + type: { + model: 'Profile', + }, + isRequired: false, + attributes: [], + association: { + connectionType: 'HAS_ONE', + associatedWith: 'id', + targetName: 'profileID', + }, + }, + }, + syncable: true, + pluralName: 'Users', + attributes: [ + { + type: 'model', + properties: {}, + }, + ], + }, + Profile: { + name: 'Profile', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + firstName: { + name: 'firstName', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + lastName: { + name: 'lastName', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + }, + syncable: true, + pluralName: 'Profiles', + attributes: [ + { + type: 'model', + properties: {}, + }, + ], + }, + PostComposite: { + name: 'PostComposite', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + title: { + name: 'title', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + description: { + name: 'description', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + created: { + name: 'created', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + sort: { + name: 'sort', + isArray: false, + type: 'Int', + isRequired: false, + attributes: [], + }, + }, + syncable: true, + pluralName: 'PostComposites', + attributes: [ + { + type: 'model', + properties: {}, + }, + { + type: 'key', + properties: { + name: 'titleCreatedSort', + fields: ['title', 'created', 'sort'], + }, + }, + ], + }, + PostCustomPK: { + name: 'PostCustomPK', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + postId: { + name: 'postId', + isArray: false, + type: 'Int', + isRequired: true, + attributes: [], + }, + title: { + name: 'title', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + description: { + name: 'description', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + }, + syncable: true, + pluralName: 'PostCustomPKS', + attributes: [ + { + type: 'model', + properties: {}, + }, + { + type: 'key', + properties: { + fields: ['postId'], + }, + }, + ], + }, + PostCustomPKSort: { + name: 'PostCustomPKSort', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + postId: { + name: 'postId', + isArray: false, + type: 'Int', + isRequired: true, + attributes: [], + }, + title: { + name: 'title', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + description: { + name: 'description', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + }, + syncable: true, + pluralName: 'PostCustomPKSorts', + attributes: [ + { + type: 'model', + properties: {}, + }, + { + type: 'key', + properties: { + fields: ['id', 'postId'], + }, + }, + ], + }, + PostCustomPKComposite: { + name: 'PostCustomPKComposite', + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + attributes: [], + }, + postId: { + name: 'postId', + isArray: false, + type: 'Int', + isRequired: true, + attributes: [], + }, + title: { + name: 'title', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + description: { + name: 'description', + isArray: false, + type: 'String', + isRequired: false, + attributes: [], + }, + sort: { + name: 'sort', + isArray: false, + type: 'Int', + isRequired: true, + attributes: [], + }, + }, + syncable: true, + pluralName: 'PostCustomPKComposites', + attributes: [ + { + type: 'model', + properties: {}, + }, + { + type: 'key', + properties: { + fields: ['id', 'postId', 'sort'], + }, + }, + ], + }, + }, + nonModels: { + Metadata: { + name: 'Metadata', + fields: { + author: { + name: 'author', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + tags: { + name: 'tags', + isArray: true, + type: 'String', + isRequired: false, + isArrayNullable: true, + attributes: [], + }, + rewards: { + name: 'rewards', + isArray: true, + type: 'String', + isRequired: true, + attributes: [], + }, + penNames: { + name: 'penNames', + isArray: true, + type: 'String', + isRequired: true, + isArrayNullable: true, + attributes: [], + }, + nominations: { + name: 'nominations', + isArray: true, + type: 'String', + isRequired: false, + attributes: [], + }, + misc: { + name: 'misc', + isArray: true, + type: 'String', + isRequired: false, + isArrayNullable: true, + attributes: [], + }, + }, + }, + }, + version: '1', + }; +} + +export function internalTestSchema(): InternalSchema { + return { + namespaces: { + datastore: { + name: 'datastore', + relationships: { + Setting: { + indexes: [], + relationTypes: [], + }, + }, + enums: {}, + nonModels: {}, + models: { + Setting: { + name: 'Setting', + pluralName: 'Settings', + syncable: false, + fields: { + id: { + name: 'id', + type: 'ID', + isRequired: true, + isArray: false, + }, + key: { + name: 'key', + type: 'String', + isRequired: true, + isArray: false, + }, + value: { + name: 'value', + type: 'String', + isRequired: true, + isArray: false, + }, + }, + }, + }, + }, + user: { + name: 'user', + enums: {}, + models: { + Model: { + name: 'Model', + pluralName: 'Models', + syncable: true, + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + }, + field1: { + name: 'field1', + isArray: false, + type: 'String', + isRequired: true, + }, + optionalField1: { + name: 'optionalField1', + isArray: false, + type: 'String', + isRequired: false, + }, + dateCreated: { + name: 'dateCreated', + isArray: false, + type: 'AWSDateTime', + isRequired: true, + attributes: [], + }, + emails: { + name: 'emails', + isArray: true, + type: 'AWSEmail', + isRequired: true, + attributes: [], + isArrayNullable: true, + }, + ips: { + name: 'ips', + isArray: true, + type: 'AWSIPAddress', + isRequired: false, + attributes: [], + isArrayNullable: true, + }, + metadata: { + name: 'metadata', + isArray: false, + type: { + nonModel: 'Metadata', + }, + isRequired: false, + attributes: [], + }, + }, + }, + LocalModel: { + name: 'LocalModel', + pluralName: 'LocalModels', + syncable: false, + fields: { + id: { + name: 'id', + isArray: false, + type: 'ID', + isRequired: true, + }, + field1: { + name: 'field1', + isArray: false, + type: 'String', + isRequired: true, + }, + }, + }, + }, + nonModels: { + Metadata: { + name: 'Metadata', + fields: { + author: { + name: 'author', + isArray: false, + type: 'String', + isRequired: true, + attributes: [], + }, + tags: { + name: 'tags', + isArray: true, + type: 'String', + isRequired: false, + isArrayNullable: true, + attributes: [], + }, + rewards: { + name: 'rewards', + isArray: true, + type: 'String', + isRequired: true, + attributes: [], + }, + penNames: { + name: 'penNames', + isArray: true, + type: 'String', + isRequired: true, + isArrayNullable: true, + attributes: [], + }, + nominations: { + name: 'nominations', + isArray: true, + type: 'String', + isRequired: false, + attributes: [], + }, + misc: { + name: 'misc', + isArray: true, + type: 'String', + isRequired: false, + isArrayNullable: true, + attributes: [], + }, + }, + }, + }, + relationships: { + Model: { + indexes: [], + relationTypes: [], + }, + LocalModel: { + indexes: [], + relationTypes: [], + }, + }, + }, + sync: { + name: 'sync', + relationships: { + MutationEvent: { + indexes: [], + relationTypes: [], + }, + ModelMetadata: { + indexes: [], + relationTypes: [], + }, + }, + enums: { + OperationType: { + name: 'OperationType', + values: ['CREATE', 'UPDATE', 'DELETE'], + }, + }, + nonModels: {}, + models: { + MutationEvent: { + name: 'MutationEvent', + pluralName: 'MutationEvents', + syncable: false, + fields: { + id: { + name: 'id', + type: 'ID', + isRequired: true, + isArray: false, + }, + model: { + name: 'model', + type: 'String', + isRequired: true, + isArray: false, + }, + data: { + name: 'data', + type: 'String', + isRequired: true, + isArray: false, + }, + modelId: { + name: 'modelId', + type: 'String', + isRequired: true, + isArray: false, + }, + operation: { + name: 'operation', + type: { + enum: 'Operationtype', + }, + isArray: false, + isRequired: true, + }, + condition: { + name: 'condition', + type: 'String', + isArray: false, + isRequired: true, + }, + }, + }, + ModelMetadata: { + name: 'ModelMetadata', + pluralName: 'ModelsMetadata', + syncable: false, + fields: { + id: { + name: 'id', + type: 'ID', + isRequired: true, + isArray: false, + }, + namespace: { + name: 'namespace', + type: 'String', + isRequired: true, + isArray: false, + }, + model: { + name: 'model', + type: 'String', + isRequired: true, + isArray: false, + }, + lastSync: { + name: 'lastSync', + type: 'Int', + isRequired: false, + isArray: false, + }, + lastFullSync: { + name: 'lastFullSync', + type: 'Int', + isRequired: false, + isArray: false, + }, + fullSyncInterval: { + name: 'fullSyncInterval', + type: 'Int', + isRequired: true, + isArray: false, + }, + }, + }, + }, + }, + }, + version: '1', + }; +} diff --git a/packages/datastore-storage-adapter/build.js b/packages/datastore-storage-adapter/build.js new file mode 100644 index 00000000000..35e84b281a1 --- /dev/null +++ b/packages/datastore-storage-adapter/build.js @@ -0,0 +1,5 @@ +'use strict'; + +const build = require('../../scripts/build'); + +build(process.argv[2], process.argv[3]); diff --git a/packages/datastore-storage-adapter/index.js b/packages/datastore-storage-adapter/index.js new file mode 100644 index 00000000000..b47ac4febbd --- /dev/null +++ b/packages/datastore-storage-adapter/index.js @@ -0,0 +1,7 @@ +'use strict'; + +if (process.env.NODE_ENV === 'production') { + module.exports = require('./dist/aws-amplify-datastore-storage-adapter.min.js'); +} else { + module.exports = require('./dist/aws-amplify-datastore-storage-adapter.js'); +} diff --git a/packages/datastore-storage-adapter/package.json b/packages/datastore-storage-adapter/package.json new file mode 100644 index 00000000000..8c7a8634d58 --- /dev/null +++ b/packages/datastore-storage-adapter/package.json @@ -0,0 +1,93 @@ +{ + "name": "@aws-amplify/datastore-storage-adapter", + "version": "1.0.0", + "description": "SQLite storage adapter for Amplify DataStore ", + "main": "./lib/index.js", + "module": "./lib-esm/index.js", + "typings": "./lib-esm/index.d.ts", + "react-native": { + "./lib/index": "./lib-esm/index.js" + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "test": "npm run lint && jest -w 1 --coverage", + "build-with-test": "npm test && npm run build", + "build:cjs": "node ./build es5 && webpack && webpack --config ./webpack.config.dev.js", + "build:esm": "node ./build es6", + "build:cjs:watch": "node ./build es5 --watch", + "build:esm:watch": "rimraf lib-esm && node ./build es6 --watch", + "build": "yarn clean && yarn build:esm && npm run build:cjs", + "clean": "rimraf lib-esm lib dist", + "format": "echo \"Not implemented\"", + "lint": "tslint '{__tests__,src}/**/*.ts'" + }, + "repository": { + "type": "git", + "url": "https://github.com/aws-amplify/amplify-js.git" + }, + "author": "Amazon Web Services", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/aws/aws-amplify/issues" + }, + "homepage": "https://aws-amplify.github.io/", + "dependencies": { + "@aws-amplify/core": "4.2.5", + "@aws-amplify/datastore": "3.3.3" + }, + "devDependencies": { + "react-native-sqlite-storage": "5.0.0" + }, + "jest": { + "globals": { + "ts-jest": { + "diagnostics": true, + "tsConfig": { + "lib": [ + "es5", + "es2015", + "esnext.asynciterable", + "es2019" + ], + "allowJs": true, + "esModuleInterop": true, + "downlevelIteration": true + } + } + }, + "transform": { + "^.+\\.(js|jsx|ts|tsx)$": "ts-jest" + }, + "testRegex": "(/__tests__/.*|\\.(test|spec))\\.(tsx?|jsx?)$", + "testPathIgnorePatterns": [ + "__tests__/model.ts", + "__tests__/schema.ts", + "__tests__/helpers.ts" + ], + "moduleFileExtensions": [ + "ts", + "tsx", + "js", + "json", + "jsx" + ], + "testEnvironment": "jsdom", + "testURL": "http://localhost/", + "coverageThreshold": { + "global": { + "branches": 0, + "functions": 0, + "lines": 0, + "statements": 0 + } + }, + "coveragePathIgnorePatterns": [ + "/node_modules/", + "dist", + "lib", + "lib-esm" + ] + } +} diff --git a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts new file mode 100644 index 00000000000..1ec312e4c10 --- /dev/null +++ b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteAdapter.ts @@ -0,0 +1,480 @@ +import { ConsoleLogger as Logger } from '@aws-amplify/core'; +import SQLiteDatabase from './SQLiteDatabase'; +import { + generateSchemaStatements, + queryByIdStatement, + modelUpdateStatement, + modelInsertStatement, + queryAllStatement, + queryOneStatement, + deleteByIdStatement, + deleteByPredicateStatement, + ParameterizedStatement, +} from './SQLiteUtils'; + +import { + StorageAdapter, + ModelInstanceCreator, + ModelPredicateCreator, + ModelSortPredicateCreator, + InternalSchema, + isPredicateObj, + ModelInstanceMetadata, + ModelPredicate, + NamespaceResolver, + OpType, + PaginationInput, + PersistentModel, + PersistentModelConstructor, + PredicateObject, + PredicatesGroup, + QueryOne, + utils, +} from '@aws-amplify/datastore'; + +const { traverseModel, validatePredicate, isModelConstructor } = utils; + +const logger = new Logger('DataStore'); +export class SQLiteAdapter implements StorageAdapter { + private schema: InternalSchema; + private namespaceResolver: NamespaceResolver; + private modelInstanceCreator: ModelInstanceCreator; + private getModelConstructorByModelName: ( + namsespaceName: string, + modelName: string + ) => PersistentModelConstructor; + private db: SQLiteDatabase; + private initPromise: Promise; + private resolve: (value?: any) => void; + private reject: (value?: any) => void; + + public async setUp( + theSchema: InternalSchema, + namespaceResolver: NamespaceResolver, + modelInstanceCreator: ModelInstanceCreator, + getModelConstructorByModelName: ( + namsespaceName: string, + modelName: string + ) => PersistentModelConstructor + ) { + if (!this.initPromise) { + this.initPromise = new Promise((res, rej) => { + this.resolve = res; + this.reject = rej; + }); + } else { + await this.initPromise; + return; + } + this.schema = theSchema; + this.namespaceResolver = namespaceResolver; + this.modelInstanceCreator = modelInstanceCreator; + this.getModelConstructorByModelName = getModelConstructorByModelName; + + try { + if (!this.db) { + this.db = new SQLiteDatabase(); + await this.db.init(); + + const statements = generateSchemaStatements(this.schema); + await this.db.createSchema(statements); + this.resolve(); + } + } catch (error) { + this.reject(error); + } + } + + async clear(): Promise { + await this.db.clear(); + + this.db = undefined; + this.initPromise = undefined; + } + + async save( + model: T, + condition?: ModelPredicate + ): Promise<[T, OpType.INSERT | OpType.UPDATE][]> { + const modelConstructor = Object.getPrototypeOf(model) + .constructor as PersistentModelConstructor; + const { name: tableName } = modelConstructor; + const connectedModels = traverseModel( + modelConstructor.name, + model, + this.schema.namespaces[this.namespaceResolver(modelConstructor)], + this.modelInstanceCreator, + this.getModelConstructorByModelName + ); + const connectionStoreNames = Object.values(connectedModels).map( + ({ modelName, item, instance }) => { + return { modelName, item, instance }; + } + ); + + const [queryStatement, params] = queryByIdStatement(model.id, tableName); + + const fromDB = await this.db.get(queryStatement, params); + + if (condition && fromDB) { + const predicates = ModelPredicateCreator.getPredicates(condition); + const { predicates: predicateObjs, type } = predicates; + + const isValid = validatePredicate(fromDB, type, predicateObjs); + + if (!isValid) { + const msg = 'Conditional update failed'; + logger.error(msg, { model: fromDB, condition: predicateObjs }); + + throw new Error(msg); + } + } + + const result: [T, OpType.INSERT | OpType.UPDATE][] = []; + const saveStatements = new Set(); + + for await (const resItem of connectionStoreNames) { + const { modelName, item, instance } = resItem; + const { id } = item; + + const [queryStatement, params] = queryByIdStatement(id, modelName); + const fromDB = await this.db.get(queryStatement, params); + + const opType: OpType = + fromDB === undefined ? OpType.INSERT : OpType.UPDATE; + + const saveStatement = fromDB + ? modelUpdateStatement(instance, modelName) + : modelInsertStatement(instance, modelName); + + saveStatements.add(saveStatement); + + result.push([instance, opType]); + } + + await this.db.batchSave(saveStatements); + + return result; + } + + private async load( + namespaceName: string, + srcModelName: string, + records: T[] + ): Promise { + const namespace = this.schema.namespaces[namespaceName]; + const relations = namespace.relationships[srcModelName].relationTypes; + const connectionTableNames = relations.map(({ modelName }) => modelName); + + const modelConstructor = this.getModelConstructorByModelName( + namespaceName, + srcModelName + ); + + if (connectionTableNames.length === 0) { + return records.map(record => + this.modelInstanceCreator(modelConstructor, record) + ); + } + + for await (const relation of relations) { + const { + fieldName, + modelName: tableName, + targetName, + relationType, + } = relation; + + const modelConstructor = this.getModelConstructorByModelName( + namespaceName, + tableName + ); + + // TODO: use SQL JOIN instead + switch (relationType) { + case 'HAS_ONE': + for await (const recordItem of records) { + if (recordItem[fieldName]) { + const [queryStatement, params] = queryByIdStatement( + recordItem[fieldName], + tableName + ); + + const connectionRecord = await this.db.get( + queryStatement, + params + ); + + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + } + } + + break; + case 'BELONGS_TO': + for await (const recordItem of records) { + if (recordItem[targetName]) { + const [queryStatement, params] = queryByIdStatement( + recordItem[targetName], + tableName + ); + const connectionRecord = await this.db.get( + queryStatement, + params + ); + + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + delete recordItem[targetName]; + } + } + + break; + case 'HAS_MANY': + // TODO: Lazy loading + break; + default: + const _: never = relationType; + throw new Error(`invalid relation type ${relationType}`); + break; + } + } + + return records.map(record => + this.modelInstanceCreator(modelConstructor, record) + ); + } + + async query( + modelConstructor: PersistentModelConstructor, + predicate?: ModelPredicate, + pagination?: PaginationInput + ): Promise { + const { name: tableName } = modelConstructor; + const namespaceName = this.namespaceResolver(modelConstructor); + + const predicates = + predicate && ModelPredicateCreator.getPredicates(predicate); + const sortPredicates = + pagination && + pagination.sort && + ModelSortPredicateCreator.getPredicates(pagination.sort); + const limit = pagination && pagination.limit; + const page = limit && pagination.page; + + const queryById = predicates && this.idFromPredicate(predicates); + + const records: T[] = await (async () => { + if (queryById) { + const record = await this.getById(tableName, queryById); + return record ? [record] : []; + } + + const [queryStatement, params] = queryAllStatement( + tableName, + predicates, + sortPredicates, + limit, + page + ); + + return await this.db.getAll(queryStatement, params); + })(); + + return await this.load(namespaceName, modelConstructor.name, records); + } + + private async getById( + tableName: string, + id: string + ): Promise { + const [queryStatement, params] = queryByIdStatement(id, tableName); + const record = await this.db.get(queryStatement, params); + return record; + } + + private idFromPredicate( + predicates: PredicatesGroup + ) { + const { predicates: predicateObjs } = predicates; + const idPredicate = + predicateObjs.length === 1 && + (predicateObjs.find( + p => isPredicateObj(p) && p.field === 'id' && p.operator === 'eq' + ) as PredicateObject); + + return idPredicate && idPredicate.operand; + } + + async queryOne( + modelConstructor: PersistentModelConstructor, + firstOrLast: QueryOne = QueryOne.FIRST + ): Promise { + const { name: tableName } = modelConstructor; + const [queryStatement, params] = queryOneStatement(firstOrLast, tableName); + + const result = await this.db.get(queryStatement, params); + + const modelInstance = + result && this.modelInstanceCreator(modelConstructor, result); + + return modelInstance; + } + + // Currently does not cascade + // TODO: use FKs in relations and have `ON DELETE CASCADE` set + // For Has Many and Has One relations to have SQL handle cascades automatically + async delete( + modelOrModelConstructor: T | PersistentModelConstructor, + condition?: ModelPredicate + ): Promise<[T[], T[]]> { + if (isModelConstructor(modelOrModelConstructor)) { + const modelConstructor = modelOrModelConstructor; + const namespaceName = this.namespaceResolver(modelConstructor); + const { name: tableName } = modelConstructor; + + const predicates = + condition && ModelPredicateCreator.getPredicates(condition); + + const queryStatement = queryAllStatement(tableName, predicates); + const deleteStatement = deleteByPredicateStatement(tableName, predicates); + + const models = await this.db.selectAndDelete( + queryStatement, + deleteStatement + ); + + const modelInstances = await this.load( + namespaceName, + modelConstructor.name, + models + ); + + return [modelInstances, modelInstances]; + } else { + const model = modelOrModelConstructor as T; + const modelConstructor = Object.getPrototypeOf(model) + .constructor as PersistentModelConstructor; + const { name: tableName } = modelConstructor; + + if (condition) { + const [queryStatement, params] = queryByIdStatement( + model.id, + tableName + ); + + const fromDB = await this.db.get(queryStatement, params); + + if (fromDB === undefined) { + const msg = 'Model instance not found in storage'; + logger.warn(msg, { model }); + + return [[model], []]; + } + + const predicates = ModelPredicateCreator.getPredicates(condition); + const { predicates: predicateObjs, type } = predicates; + + const isValid = validatePredicate(fromDB, type, predicateObjs); + + if (!isValid) { + const msg = 'Conditional update failed'; + logger.error(msg, { model: fromDB, condition: predicateObjs }); + + throw new Error(msg); + } + + const [deleteStatement, deleteParams] = deleteByIdStatement( + model.id, + tableName + ); + await this.db.save(deleteStatement, deleteParams); + return [[model], [model]]; + } else { + const [deleteStatement, params] = deleteByIdStatement( + model.id, + tableName + ); + await this.db.save(deleteStatement, params); + return [[model], [model]]; + } + } + } + + async batchSave( + modelConstructor: PersistentModelConstructor, + items: ModelInstanceMetadata[] + ): Promise<[T, OpType][]> { + const { name: tableName } = modelConstructor; + + const result: [T, OpType][] = []; + + const itemsToSave: T[] = []; + // To determine whether an item should result in an insert or update operation + // We first need to query the local DB on the item id + const queryStatements = new Set(); + // Deletes don't need to be queried first, because if the item doesn't exist, + // the delete operation will be a no-op + const deleteStatements = new Set(); + const saveStatements = new Set(); + + for (const item of items) { + const connectedModels = traverseModel( + modelConstructor.name, + this.modelInstanceCreator(modelConstructor, item), + this.schema.namespaces[this.namespaceResolver(modelConstructor)], + this.modelInstanceCreator, + this.getModelConstructorByModelName + ); + + const { id, _deleted } = item; + + const { instance } = connectedModels.find( + ({ instance }) => instance.id === id + ); + + if (_deleted) { + // create the delete statements right away + const deleteStatement = deleteByIdStatement(instance.id, tableName); + deleteStatements.add(deleteStatement); + result.push([(item), OpType.DELETE]); + } else { + // query statements for the saves at first + const queryStatement = queryByIdStatement(id, tableName); + queryStatements.add(queryStatement); + // combination of insert and update items + itemsToSave.push(instance); + } + } + + // returns the query results for each of the save items + const queryResponses = await this.db.batchQuery(queryStatements); + + queryResponses.forEach((response, idx) => { + if (response === undefined) { + const insertStatement = modelInsertStatement( + itemsToSave[idx], + tableName + ); + saveStatements.add(insertStatement); + result.push([(itemsToSave[idx]), OpType.INSERT]); + } else { + const updateStatement = modelUpdateStatement( + itemsToSave[idx], + tableName + ); + saveStatements.add(updateStatement); + result.push([(itemsToSave[idx]), OpType.UPDATE]); + } + }); + + // perform all of the insert/update/delete operations in a single transaction + await this.db.batchSave(saveStatements, deleteStatements); + + return result; + } +} + +export default new SQLiteAdapter(); diff --git a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts new file mode 100644 index 00000000000..782bab10745 --- /dev/null +++ b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteDatabase.ts @@ -0,0 +1,167 @@ +import SQLite from 'react-native-sqlite-storage'; +import { ConsoleLogger as Logger } from '@aws-amplify/core'; +import { PersistentModel } from '@aws-amplify/datastore'; +import { ParameterizedStatement } from './SQLiteUtils'; + +const logger = new Logger('SQLiteDatabase'); + +SQLite.enablePromise(true); + +if (Logger.LOG_LEVEL === 'DEBUG') { + SQLite.DEBUG(true); +} + +const DB_NAME = 'AmplifyDatastore'; +const DB_DISPLAYNAME = 'AWS Amplify DataStore SQLite Database'; + +// TODO: make these configurable +const DB_SIZE = 200000; +const DB_VERSION = '1.0'; + +/* + +Note: +I purposely avoided using arrow functions () => {} in this class, +Because I ran into issues with them in some of the SQLite method callbacks + +Also, even though the SQLite library is promisified, certain operations +only work correctly with callbacks. Specifically, any time you need to +get the result of an `executeSql` command inside of a transaction +(see the batchQuery method below) + +*/ + +class SQLiteDatabase { + private db: SQLite.SQLiteDatabase; + + public async init(): Promise { + this.db = await SQLite.openDatabase( + DB_NAME, + DB_VERSION, + DB_DISPLAYNAME, + DB_SIZE + ); + } + + public async createSchema(statements: string[]) { + return await this.executeStatements(statements); + } + + public async clear() { + await this.closeDB(); + logger.debug('Deleting database'); + await SQLite.deleteDatabase(DB_NAME); + logger.debug('Database deleted'); + } + + public async get( + statement: string, + params: any[] + ): Promise { + const [resultSet] = await this.db.executeSql(statement, params); + const result = + resultSet && + resultSet.rows && + resultSet.rows.length && + resultSet.rows.raw && + resultSet.rows.raw(); + + return result[0] || undefined; + } + + public async getAll( + statement: string, + params: any[] + ): Promise { + const [resultSet] = await this.db.executeSql(statement, params); + const result = + resultSet && + resultSet.rows && + resultSet.rows.length && + resultSet.rows.raw && + resultSet.rows.raw(); + + return result || []; + } + + public async save(statement: string, params: any[]): Promise { + await this.db.executeSql(statement, params); + } + + public async batchQuery(queryStatements: Set) { + const results = []; + + await this.db.readTransaction(function(tx) { + for (const [statement, params] of queryStatements) { + tx.executeSql( + statement, + params, + function(_tx, res) { + results.push(res.rows.raw()[0]); + }, + logger.warn + ); + } + }); + + return results; + } + + public async batchSave( + saveStatements: Set, + deleteStatements?: Set + ) { + await this.db.transaction(function(tx) { + for (const [statement, params] of saveStatements) { + tx.executeSql(statement, params); + } + if (deleteStatements) { + for (const [statement, params] of deleteStatements) { + tx.executeSql(statement, params); + } + } + }); + } + + public async selectAndDelete( + query: ParameterizedStatement, + _delete: ParameterizedStatement + ) { + let results = []; + + const [queryStatement, queryParams] = query; + const [deleteStatement, deleteParams] = _delete; + + await this.db.transaction(function(tx) { + tx.executeSql( + queryStatement, + queryParams, + function(_tx, res) { + results = res.rows.raw(); + }, + logger.warn + ); + tx.executeSql(deleteStatement, deleteParams, () => {}, logger.warn); + }); + + return results; + } + + private async executeStatements(statements: string[]): Promise { + return await this.db.transaction(function(tx) { + for (const statement of statements) { + tx.executeSql(statement); + } + }); + } + + private async closeDB() { + if (this.db) { + logger.debug('Closing Database'); + await this.db.close(); + logger.debug('Database closed'); + } + } +} + +export default SQLiteDatabase; diff --git a/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteUtils.ts b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteUtils.ts new file mode 100644 index 00000000000..8f33cbf5696 --- /dev/null +++ b/packages/datastore-storage-adapter/src/SQLiteAdapter/SQLiteUtils.ts @@ -0,0 +1,427 @@ +import { + InternalSchema, + SchemaModel, + ModelField, + PersistentModel, + isGraphQLScalarType, + QueryOne, + PredicatesGroup, + isPredicateObj, + SortPredicatesGroup, + PredicateObject, + isPredicateGroup, + isModelFieldType, + isTargetNameAssociation, + isModelAttributeAuth, + ModelAttributeAuth, + ModelAuthRule, + utils, +} from '@aws-amplify/datastore'; + +import { getSQLiteType } from './types'; + +const { USER, isNonModelConstructor, isModelConstructor } = utils; + +export type ParameterizedStatement = [string, any[]]; + +const keysFromModel = model => + Object.keys(model) + .map(k => `"${k}"`) + .join(', '); + +const valuesFromModel = (model): [string, any[]] => { + const values = Object.values(model).map(prepareValueForDML); + const paramaterized = values.map(() => '?').join(', '); + + return [paramaterized, values]; +}; + +const updateSet: (model: any) => [any, any] = model => { + const values = []; + const paramaterized = Object.entries(model) + .filter(([k]) => k !== 'id') + .map(([k, v]) => { + values.push(prepareValueForDML(v)); + return `"${k}"=?`; + }) + .join(', '); + + return [paramaterized, values]; +}; + +function prepareValueForDML(value: unknown): any { + const scalarTypes = ['string', 'number', 'boolean']; + + const isScalarType = + value === null || value === undefined || scalarTypes.includes(typeof value); + + if (isScalarType) { + return value; + } + + const isObjectType = + typeof value === 'object' && + (Object.getPrototypeOf(value).constructor === Object || + isNonModelConstructor(Object.getPrototypeOf(value).constructor) || + isModelConstructor(Object.getPrototypeOf(value).constructor)); + + if (Array.isArray(value) || isObjectType) { + return JSON.stringify(value); + } + + return `${value}`; +} + +export function generateSchemaStatements(schema: InternalSchema): string[] { + return Object.keys(schema.namespaces).flatMap(namespaceName => { + const namespace = schema.namespaces[namespaceName]; + const isUserModel = namespaceName === USER; + + return Object.values(namespace.models).map(model => + modelCreateTableStatement(model, isUserModel) + ); + }); +} + +export const implicitAuthFieldsForModel: (model: SchemaModel) => string[] = ( + model: SchemaModel +) => { + if (!model.attributes || !model.attributes.length) { + return []; + } + + const authRules: ModelAttributeAuth = model.attributes.find( + isModelAttributeAuth + ); + + if (!authRules) { + return []; + } + + const authFieldsForModel = authRules.properties.rules + .filter((rule: ModelAuthRule) => rule.ownerField || rule.groupsField) + .map((rule: ModelAuthRule) => rule.ownerField || rule.groupsField); + + return authFieldsForModel.filter((authField: string) => { + const authFieldExplicitlyDefined = Object.values(model.fields).find( + (f: ModelField) => f.name === authField + ); + return !authFieldExplicitlyDefined; + }); +}; + +export function modelCreateTableStatement( + model: SchemaModel, + userModel: boolean = false +): string { + // implicitly defined auth fields, e.g., `owner`, `groupsField`, etc. + const implicitAuthFields = implicitAuthFieldsForModel(model); + + let fields = Object.values(model.fields).reduce((acc, field: ModelField) => { + if (isGraphQLScalarType(field.type)) { + if (field.name === 'id') { + return acc + '"id" PRIMARY KEY NOT NULL'; + } + + let columnParam = `"${field.name}" ${getSQLiteType(field.type)}`; + + if (field.isRequired) { + columnParam += ' NOT NULL'; + } + + return acc + `, ${columnParam}`; + } + + if (isModelFieldType(field.type)) { + // add targetName as well as field name for BELONGS_TO relations + if (isTargetNameAssociation(field.association)) { + const required = field.isRequired ? ' NOT NULL' : ''; + + let columnParam = `"${field.name}" TEXT`; + // check if this field has been explicitly defined in the model + const fkDefinedInModel = Object.values(model.fields).find( + (f: ModelField) => f.name === field.association.targetName + ); + + // only add auto-generate it if not + if (!fkDefinedInModel) { + columnParam += `, "${field.association.targetName}" TEXT${required}`; + } + + return acc + `, ${columnParam}`; + } + } + + // default to TEXT + let columnParam = `"${field.name}" TEXT`; + + if (field.isRequired) { + columnParam += ' NOT NULL'; + } + + return acc + `, ${columnParam}`; + }, ''); + + implicitAuthFields.forEach((authField: string) => { + fields += `, ${authField} TEXT`; + }); + + if (userModel) { + fields += + ', "_version" INTEGER, "_lastChangedAt" INTEGER, "_deleted" INTEGER'; + } + + const createTableStatement = `CREATE TABLE IF NOT EXISTS "${model.name}" (${fields});`; + return createTableStatement; +} + +export function modelInsertStatement( + model: PersistentModel, + tableName: string +): ParameterizedStatement { + const keys = keysFromModel(model); + const [paramaterized, values] = valuesFromModel(model); + + const insertStatement = `INSERT INTO "${tableName}" (${keys}) VALUES (${paramaterized})`; + + return [insertStatement, values]; +} + +export function modelUpdateStatement( + model: PersistentModel, + tableName: string +): ParameterizedStatement { + const [paramaterized, values] = updateSet(model); + + const updateStatement = `UPDATE "${tableName}" SET ${paramaterized} WHERE id=?`; + + return [updateStatement, [...values, model.id]]; +} + +export function queryByIdStatement( + id: string, + tableName: string +): ParameterizedStatement { + return [`SELECT * FROM "${tableName}" WHERE "id" = ?`, [id]]; +} + +/* + Predicates supported by DataStore: + + Strings: eq | ne | le | lt | ge | gt | contains | notContains | beginsWith | between + Numbers: eq | ne | le | lt | ge | gt | between + Lists: contains | notContains +*/ + +const comparisonOperatorMap = { + eq: '=', + ne: '!=', + le: '<=', + lt: '<', + ge: '>=', + gt: '>', +}; + +const logicalOperatorMap = { + beginsWith: 'LIKE', + contains: 'LIKE', + notContains: 'NOT LIKE', + between: 'BETWEEN', +}; + +const whereConditionFromPredicateObject = ({ + field, + operator, + operand, +}: { + field: string; + operator: + | keyof typeof logicalOperatorMap + | keyof typeof comparisonOperatorMap; + operand: any; +}): ParameterizedStatement => { + const comparisonOperator = comparisonOperatorMap[operator]; + + if (comparisonOperator) { + return [`"${field}" ${comparisonOperator} ?`, [operand]]; + } + + const logicalOperatorKey = operator; + const logicalOperator = logicalOperatorMap[logicalOperatorKey]; + + if (logicalOperator) { + let rightExp = []; + switch (logicalOperatorKey) { + case 'between': + rightExp = operand; // operand is a 2-tuple + break; + case 'beginsWith': + rightExp = [`${operand}%`]; + break; + case 'contains': + case 'notContains': + rightExp = [`%${operand}%`]; + break; + default: + const _: never = logicalOperatorKey; + // Incorrect WHERE clause can result in data loss + throw new Error('Cannot map predicate to a valid WHERE clause'); + } + return [ + `"${field}" ${logicalOperator} ${rightExp.map(_ => '?').join(' AND ')}`, + rightExp, + ]; + } +}; + +export function whereClauseFromPredicate( + predicate: PredicatesGroup +): ParameterizedStatement { + const result = []; + const params = []; + + recurse(predicate, result, params); + const whereClause = `WHERE ${result.join(' ')}`; + + return [whereClause, params]; + + function recurse( + predicate: PredicatesGroup | PredicateObject, + result = [], + params = [] + ): void { + if (isPredicateGroup(predicate)) { + const { type: groupType, predicates: groupPredicates } = predicate; + let filterType: string = ''; + let isNegation = false; + switch (groupType) { + case 'not': + isNegation = true; + break; + case 'and': + filterType = 'AND'; + break; + case 'or': + filterType = 'OR'; + break; + default: + const _: never = groupType; + throw new Error(`Invalid ${groupType}`); + } + + const groupResult = []; + for (const p of groupPredicates) { + recurse(p, groupResult, params); + } + result.push( + `${isNegation ? 'NOT' : ''}(${groupResult.join(` ${filterType} `)})` + ); + } else if (isPredicateObj(predicate)) { + const [condition, conditionParams] = whereConditionFromPredicateObject( + predicate + ); + + result.push(condition); + params.push(...conditionParams); + } + } +} + +const sortDirectionMap = { + ASCENDING: 'ASC', + DESCENDING: 'DESC', +}; + +export function orderByClauseFromSort( + sortPredicate: SortPredicatesGroup = [] +): string { + const orderByParts = sortPredicate.map( + ({ field, sortDirection }) => + `"${field}" ${sortDirectionMap[sortDirection]}` + ); + + // We always sort by _rowid_ last + orderByParts.push(`_rowid_ ${sortDirectionMap.ASCENDING}`); + + return `ORDER BY ${orderByParts.join(', ')}`; +} + +export function limitClauseFromPagination( + limit: number, + page: number = 0 +): ParameterizedStatement { + const params = [limit]; + let clause = 'LIMIT ?'; + if (page) { + const offset = limit * page; + params.push(offset); + clause += ' OFFSET ?'; + } + + return [clause, params]; +} + +export function queryAllStatement( + tableName: string, + predicate?: PredicatesGroup, + sort?: SortPredicatesGroup, + limit?: number, + page?: number +): ParameterizedStatement { + let statement = `SELECT * FROM "${tableName}"`; + const params = []; + + if (predicate && predicate.predicates.length) { + const [whereClause, whereParams] = whereClauseFromPredicate(predicate); + statement += ` ${whereClause}`; + params.push(...whereParams); + } + + const orderByClause = orderByClauseFromSort(sort); + statement += ` ${orderByClause}`; + + if (limit) { + const [limitClause, limitParams] = limitClauseFromPagination(limit, page); + statement += ` ${limitClause}`; + params.push(...limitParams); + } + + return [statement, params]; +} + +export function queryOneStatement( + firstOrLast, + tableName: string +): ParameterizedStatement { + if (firstOrLast === QueryOne.FIRST) { + // ORDER BY rowid will no longer work as expected if a customer has + // a field by that name in their schema. We may want to enforce it + // as a reserved keyword in Codegen + return [`SELECT * FROM ${tableName} ORDER BY _rowid_ LIMIT 1`, []]; + } else { + return [`SELECT * FROM ${tableName} ORDER BY _rowid_ DESC LIMIT 1`, []]; + } +} + +export function deleteByIdStatement( + id: string, + tableName: string +): ParameterizedStatement { + const deleteStatement = `DELETE FROM "${tableName}" WHERE "id"=?`; + return [deleteStatement, [id]]; +} + +export function deleteByPredicateStatement( + tableName: string, + predicate?: PredicatesGroup +): ParameterizedStatement { + let statement = `DELETE FROM "${tableName}"`; + const params = []; + + if (predicate && predicate.predicates.length) { + const [whereClause, whereParams] = whereClauseFromPredicate(predicate); + statement += ` ${whereClause}`; + params.push(...whereParams); + } + return [statement, params]; +} diff --git a/packages/datastore-storage-adapter/src/SQLiteAdapter/types.ts b/packages/datastore-storage-adapter/src/SQLiteAdapter/types.ts new file mode 100644 index 00000000000..dd781657c57 --- /dev/null +++ b/packages/datastore-storage-adapter/src/SQLiteAdapter/types.ts @@ -0,0 +1,31 @@ +import { GraphQLScalarType } from '@aws-amplify/datastore'; + +export function getSQLiteType( + scalar: keyof Omit< + typeof GraphQLScalarType, + 'getJSType' | 'getValidationFunction' | 'getSQLiteType' + > +): 'TEXT' | 'INTEGER' | 'REAL' | 'BLOB' { + switch (scalar) { + case 'Boolean': + case 'Int': + case 'AWSTimestamp': + return 'INTEGER'; + case 'ID': + case 'String': + case 'AWSDate': + case 'AWSTime': + case 'AWSDateTime': + case 'AWSEmail': + case 'AWSJSON': + case 'AWSURL': + case 'AWSPhone': + case 'AWSIPAddress': + return 'TEXT'; + case 'Float': + return 'REAL'; + default: + const _: never = scalar; + throw new Error(`unknown type ${scalar}`); + } +} diff --git a/packages/datastore-storage-adapter/src/index.ts b/packages/datastore-storage-adapter/src/index.ts new file mode 100644 index 00000000000..b771fd5286d --- /dev/null +++ b/packages/datastore-storage-adapter/src/index.ts @@ -0,0 +1,2 @@ +import SQLiteAdapter from './SQLiteAdapter/SQLiteAdapter'; +export { SQLiteAdapter }; diff --git a/packages/datastore-storage-adapter/tslint.json b/packages/datastore-storage-adapter/tslint.json new file mode 100644 index 00000000000..8eafab1d2b4 --- /dev/null +++ b/packages/datastore-storage-adapter/tslint.json @@ -0,0 +1,50 @@ +{ + "defaultSeverity": "error", + "plugins": ["prettier"], + "extends": [], + "jsRules": {}, + "rules": { + "prefer-const": true, + "max-line-length": [true, 120], + "no-empty-interface": true, + "no-var-keyword": true, + "object-literal-shorthand": true, + "no-eval": true, + "space-before-function-paren": [ + true, + { + "anonymous": "never", + "named": "never" + } + ], + "no-parameter-reassignment": true, + "align": [true, "parameters"], + "no-duplicate-imports": true, + "one-variable-per-declaration": [false, "ignore-for-loop"], + "triple-equals": [true, "allow-null-check"], + "comment-format": [true, "check-space"], + "indent": [false], + "whitespace": [ + false, + "check-branch", + "check-decl", + "check-operator", + "check-preblock" + ], + "eofline": true, + "variable-name": [ + true, + "check-format", + "allow-pascal-case", + "allow-snake-case", + "allow-leading-underscore" + ], + "semicolon": [ + true, + "always", + "ignore-interfaces", + "ignore-bound-class-methods" + ] + }, + "rulesDirectory": [] +} diff --git a/packages/datastore-storage-adapter/webpack.config.dev.js b/packages/datastore-storage-adapter/webpack.config.dev.js new file mode 100644 index 00000000000..31512a1245b --- /dev/null +++ b/packages/datastore-storage-adapter/webpack.config.dev.js @@ -0,0 +1,6 @@ +var config = require('./webpack.config.js'); + +var entry = { + 'aws-amplify-datastore-storage-adapter': './lib-esm/index.js', +}; +module.exports = Object.assign(config, { entry, mode: 'development' }); diff --git a/packages/datastore-storage-adapter/webpack.config.js b/packages/datastore-storage-adapter/webpack.config.js new file mode 100644 index 00000000000..ecc1d2fcb18 --- /dev/null +++ b/packages/datastore-storage-adapter/webpack.config.js @@ -0,0 +1,44 @@ +module.exports = { + entry: { + 'aws-amplify-datastore-storage-adapter.min': './lib-esm/index.js', + }, + externals: [ + '@aws-amplify/datastore', + '@aws-amplify/core', + 'react-native-sqlite-storage', + ], + output: { + filename: '[name].js', + path: __dirname + '/dist', + library: 'aws-amplify-datastore-storage-adapter', + libraryTarget: 'umd', + umdNamedDefine: true, + devtoolModuleFilenameTemplate: require('../aws-amplify/webpack-utils') + .devtoolModuleFilenameTemplate, + }, + // Enable sourcemaps for debugging webpack's output. + devtool: 'source-map', + resolve: { + extensions: ['.mjs', '.js', '.json'], + }, + mode: 'production', + module: { + rules: [ + // All output '.js' files will have any sourcemaps re-processed by 'source-map-loader'. + //{ enforce: 'pre', test: /\.js$/, loader: 'source-map-loader' }, + { + test: /\.js?$/, + exclude: /node_modules/, + use: [ + 'babel-loader', + { + loader: 'babel-loader', + options: { + presets: ['@babel/preset-env'], + }, + }, + ], + }, + ], + }, +}; diff --git a/packages/datastore/package.json b/packages/datastore/package.json index abbd3b2a781..91c5ff1df68 100644 --- a/packages/datastore/package.json +++ b/packages/datastore/package.json @@ -70,7 +70,7 @@ "es2015", "dom", "esnext.asynciterable", - "es2019.object" + "es2019" ], "allowJs": true, "esModuleInterop": true, diff --git a/packages/datastore/src/datastore/datastore.ts b/packages/datastore/src/datastore/datastore.ts index 989433c332f..cf61efaaebd 100644 --- a/packages/datastore/src/datastore/datastore.ts +++ b/packages/datastore/src/datastore/datastore.ts @@ -16,6 +16,7 @@ import { ModelSortPredicateCreator, PredicateAll, } from '../predicates'; +import { Adapter } from '../storage/adapter'; import { ExclusiveStorage as Storage } from '../storage/storage'; import { ControlMessage, SyncEngine } from '../sync'; import { @@ -49,6 +50,8 @@ import { ErrorHandler, SyncExpression, AuthModeStrategyType, + isNonModelFieldType, + isModelFieldType, } from '../types'; import { DATASTORE, @@ -61,6 +64,7 @@ import { SYNC, USER, isNullOrUndefined, + registerNonModelClass, } from '../util'; setAutoFreeze(true); @@ -276,6 +280,20 @@ const validateModelFields = (modelDefinition: SchemaModel | SchemaNonModel) => ( const jsType = GraphQLScalarType.getJSType(type); const validateScalar = GraphQLScalarType.getValidationFunction(type); + if (type === 'AWSJSON') { + if (typeof v === jsType) { + return; + } + if (typeof v === 'string') { + try { + JSON.parse(v); + return; + } catch (error) { + throw new Error(`Field ${name} is an invalid JSON object. ${v}`); + } + } + } + if (isArray) { let errorTypeText: string = jsType; if (!isRequired) { @@ -339,6 +357,35 @@ const validateModelFields = (modelDefinition: SchemaModel | SchemaNonModel) => ( } }; +const castInstanceType = ( + modelDefinition: SchemaModel | SchemaNonModel, + k: string, + v: any +) => { + const { isArray, type } = modelDefinition.fields[k] || {}; + // attempt to parse stringified JSON + if ( + typeof v === 'string' && + (isArray || + type === 'AWSJSON' || + isNonModelFieldType(type) || + isModelFieldType(type)) + ) { + try { + return JSON.parse(v); + } catch { + // if JSON is invalid, don't throw and let modelValidator handle it + } + } + + // cast from numeric representation of boolean to JS boolean + if (typeof v === 'number' && type === 'Boolean') { + return Boolean(v); + } + + return v; +}; + const initializeInstance = ( init: ModelInit, modelDefinition: SchemaModel | SchemaNonModel, @@ -346,8 +393,10 @@ const initializeInstance = ( ) => { const modelValidator = validateModelFields(modelDefinition); Object.entries(init).forEach(([k, v]) => { - modelValidator(k, v); - (draft)[k] = v; + const parsedValue = castInstanceType(modelDefinition, k, v); + + modelValidator(k, parsedValue); + (draft)[k] = parsedValue; }); }; @@ -415,7 +464,9 @@ const createModelClass = ( draft.id = source.id; const modelValidator = validateModelFields(modelDefinition); Object.entries(draft).forEach(([k, v]) => { - modelValidator(k, v); + const parsedValue = castInstanceType(modelDefinition, k, v); + + modelValidator(k, parsedValue); }); }, p => (patches = p) @@ -502,6 +553,8 @@ const createNonModelClass = (typeDefinition: SchemaNonModel) => { Object.defineProperty(clazz, 'name', { value: typeDefinition.name }); + registerNonModelClass(clazz); + return clazz; }; @@ -652,7 +705,7 @@ class DataStore { ModelPredicate > = new WeakMap>(); private sessionId: string; - private getAuthToken: Promise; + private storageAdapter: Adapter; getModuleName() { return 'DataStore'; @@ -676,7 +729,7 @@ class DataStore { namespaceResolver, getModelConstructorByModelName, modelInstanceCreator, - undefined, + this.storageAdapter, this.sessionId ); @@ -1104,6 +1157,7 @@ class DataStore { fullSyncInterval: configFullSyncInterval, syncExpressions: configSyncExpressions, authProviders: configAuthProviders, + storageAdapter: configStorageAdapter, ...configFromAmplify } = config; @@ -1154,6 +1208,12 @@ class DataStore { configFullSyncInterval || 24 * 60; // 1 day + this.storageAdapter = + (configDataStore && configDataStore.storageAdapter) || + this.storageAdapter || + configStorageAdapter || + undefined; + this.sessionId = this.retrieveSessionId(); }; diff --git a/packages/datastore/src/index.ts b/packages/datastore/src/index.ts index 7689c7c7a5c..cd86f64ba03 100644 --- a/packages/datastore/src/index.ts +++ b/packages/datastore/src/index.ts @@ -1,3 +1,31 @@ -export { DataStore, DataStoreClass, initSchema } from './datastore/datastore'; -export { Predicates } from './predicates'; +export { + DataStore, + DataStoreClass, + initSchema, + ModelInstanceCreator, +} from './datastore/datastore'; + +export { + Predicates, + ModelPredicateCreator, + ModelSortPredicateCreator, +} from './predicates'; +export { Adapter as StorageAdapter } from './storage/adapter'; + +import { + traverseModel, + validatePredicate, + USER, + isNonModelConstructor, + isModelConstructor, +} from './util'; + +export const utils = { + USER, + traverseModel, + validatePredicate, + isNonModelConstructor, + isModelConstructor, +}; + export * from './types'; diff --git a/packages/datastore/src/storage/storage.ts b/packages/datastore/src/storage/storage.ts index c6f9a76228b..7dbb767fe39 100644 --- a/packages/datastore/src/storage/storage.ts +++ b/packages/datastore/src/storage/storage.ts @@ -57,7 +57,7 @@ class StorageClass implements StorageFacade { private readonly adapter?: Adapter, private readonly sessionId?: string ) { - this.adapter = getDefaultAdapter(); + this.adapter = this.adapter || getDefaultAdapter(); this.pushStream = new PushStream(); } diff --git a/packages/datastore/src/sync/index.ts b/packages/datastore/src/sync/index.ts index 29086bbe6c4..a0e7d31a91a 100644 --- a/packages/datastore/src/sync/index.ts +++ b/packages/datastore/src/sync/index.ts @@ -908,6 +908,12 @@ export class SyncEngine { isRequired: true, isArray: false, }, + lastSyncPredicate: { + name: 'lastSyncPredicate', + type: 'String', + isRequired: false, + isArray: false, + }, }, }, }, diff --git a/packages/datastore/src/sync/utils.ts b/packages/datastore/src/sync/utils.ts index 36c45722905..0d7e3d5086c 100644 --- a/packages/datastore/src/sync/utils.ts +++ b/packages/datastore/src/sync/utils.ts @@ -395,9 +395,19 @@ export function createMutationInstanceFromModelOperation< exhaustiveCheck(opType); } + // stringify nested objects + // this allows us to return parsed JSON to users (see `castInstanceType()` in datastore.ts), + // but still send the object correctly over-the-wire + const replacer = (k, v) => { + if (k && v !== null && typeof v === 'object' && !Array.isArray(v)) { + return JSON.stringify(v); + } + return v; + }; + const mutationEvent = modelInstanceCreator(MutationEventConstructor, { ...(id ? { id } : {}), - data: JSON.stringify(element), + data: JSON.stringify(element, replacer), modelId: element.id, model: model.name, operation, diff --git a/packages/datastore/src/types.ts b/packages/datastore/src/types.ts index 27abe95fcb9..21ec62e2656 100644 --- a/packages/datastore/src/types.ts +++ b/packages/datastore/src/types.ts @@ -13,6 +13,7 @@ import { } from './util'; import { PredicateAll } from './predicates'; import { GRAPHQL_AUTH_MODE } from '@aws-amplify/api-graphql'; +import { Adapter } from './storage/adapter'; //#region Schema types export type Schema = UserSchema & { @@ -79,6 +80,35 @@ export function isTargetNameAssociation( export type ModelAttributes = ModelAttribute[]; type ModelAttribute = { type: string; properties?: Record }; +export type ModelAuthRule = { + allow: string; + provider?: string; + operations?: string[]; + ownerField?: string; + identityClaim?: string; + groups?: string[]; + groupClaim?: string; + groupsField?: string; +}; + +export type ModelAttributeAuth = { + type: 'auth'; + properties: { + rules: ModelAuthRule[]; + }; +}; + +export function isModelAttributeAuth( + attr: ModelAttribute +): attr is ModelAttributeAuth { + return ( + attr.type === 'auth' && + attr.properties && + attr.properties.rules && + attr.properties.rules.length > 0 + ); +} + type ModelAttributeKey = { type: 'key'; properties: { @@ -179,7 +209,7 @@ export namespace GraphQLScalarType { typeof GraphQLScalarType, 'getJSType' | 'getValidationFunction' > - ): 'string' | 'number' | 'boolean' { + ): 'string' | 'number' | 'boolean' | 'object' { switch (scalar) { case 'Boolean': return 'boolean'; @@ -189,7 +219,6 @@ export namespace GraphQLScalarType { case 'AWSTime': case 'AWSDateTime': case 'AWSEmail': - case 'AWSJSON': case 'AWSURL': case 'AWSPhone': case 'AWSIPAddress': @@ -198,8 +227,10 @@ export namespace GraphQLScalarType { case 'Float': case 'AWSTimestamp': return 'number'; + case 'AWSJSON': + return 'object'; default: - exhaustiveCheck(scalar); + exhaustiveCheck(scalar as never); } } @@ -277,7 +308,7 @@ export function isEnumFieldType(obj: any): obj is EnumFieldType { return false; } -type ModelField = { +export type ModelField = { name: string; type: | keyof Omit< @@ -619,6 +650,7 @@ export type DataStoreConfig = { fullSyncInterval?: number; syncExpressions?: SyncExpression[]; authProviders?: AuthProviders; + storageAdapter?: Adapter; }; authModeStrategyType?: AuthModeStrategyType; conflictHandler?: ConflictHandler; // default : retry until client wins up to x times @@ -628,6 +660,7 @@ export type DataStoreConfig = { fullSyncInterval?: number; syncExpressions?: SyncExpression[]; authProviders?: AuthProviders; + storageAdapter?: Adapter; }; export type AuthProviders = { diff --git a/packages/datastore/src/util.ts b/packages/datastore/src/util.ts index 161faceb70a..7bf3b9e2c80 100644 --- a/packages/datastore/src/util.ts +++ b/packages/datastore/src/util.ts @@ -22,6 +22,7 @@ import { isModelAttributeKey, isModelAttributePrimaryKey, isModelAttributeCompositeKey, + NonModelTypeConstructor, } from './types'; import { WordArray } from 'amazon-cognito-identity-js'; @@ -131,6 +132,18 @@ export const isModelConstructor = ( ); }; +const nonModelClasses = new WeakSet>(); + +export function registerNonModelClass(clazz: NonModelTypeConstructor) { + nonModelClasses.add(clazz); +} + +export const isNonModelConstructor = ( + obj: any +): obj is NonModelTypeConstructor => { + return nonModelClasses.has(obj); +}; + /* When we have GSI(s) with composite sort keys defined on a model There are some very particular rules regarding which fields must be included in the update mutation input diff --git a/scripts/build.js b/scripts/build.js index 04913358d80..511c49e1d20 100644 --- a/scripts/build.js +++ b/scripts/build.js @@ -162,7 +162,7 @@ async function buildES5(typeScriptCompiler, watchMode) { 'es2017', 'esnext.asynciterable', 'es2018.asyncgenerator', - 'es2019.object', + 'es2019', ], downlevelIteration: true, jsx: jsx, @@ -219,7 +219,7 @@ function buildES6(typeScriptCompiler, watchMode) { 'es2017', 'esnext.asynciterable', 'es2018.asyncgenerator', - 'es2019.object', + 'es2019', ], downlevelIteration: true, jsx: jsx,