diff --git a/lib/Onyx.d.ts b/lib/Onyx.d.ts index 0c7fdd31..c5be53c6 100644 --- a/lib/Onyx.d.ts +++ b/lib/Onyx.d.ts @@ -1,7 +1,6 @@ import {Component} from 'react'; -import {PartialDeep} from 'type-fest'; import * as Logger from './Logger'; -import {CollectionKey, CollectionKeyBase, DeepRecord, KeyValueMapping, OnyxCollection, OnyxEntry, OnyxKey, NullableProperties} from './types'; +import {CollectionKey, CollectionKeyBase, DeepRecord, KeyValueMapping, NullishDeep, OnyxCollection, OnyxEntry, OnyxKey} from './types'; /** * Represents a mapping object where each `OnyxKey` maps to either a value of its corresponding type in `KeyValueMapping` or `null`. @@ -79,14 +78,14 @@ type OnyxUpdate = | { onyxMethod: typeof METHOD.MERGE; key: TKey; - value: PartialDeep; + value: NullishDeep; }; }[OnyxKey] | { [TKey in CollectionKeyBase]: { onyxMethod: typeof METHOD.MERGE_COLLECTION; key: TKey; - value: Record<`${TKey}${string}`, PartialDeep>; + value: Record<`${TKey}${string}`, NullishDeep>; }; }[CollectionKeyBase]; @@ -202,7 +201,7 @@ declare function multiSet(data: Partial): Promise * @param key ONYXKEYS key * @param value Object or Array value to merge */ -declare function merge(key: TKey, value: NullableProperties>): Promise; +declare function merge(key: TKey, value: NullishDeep): Promise; /** * Clear out all the data in the store @@ -244,10 +243,7 @@ declare function clear(keysToPreserve?: OnyxKey[]): Promise; * @param collectionKey e.g. `ONYXKEYS.COLLECTION.REPORT` * @param collection Object collection keyed by individual collection member keys and values */ -declare function mergeCollection( - collectionKey: TKey, - collection: Collection>, -): Promise; +declare function mergeCollection(collectionKey: TKey, collection: Collection>): Promise; /** * Insert API responses and lifecycle data into Onyx diff --git a/lib/Onyx.js b/lib/Onyx.js index 5957c8ce..a685fb32 100644 --- a/lib/Onyx.js +++ b/lib/Onyx.js @@ -1041,7 +1041,13 @@ function set(key, value) { Logger.logAlert(`Onyx.set() called after Onyx.merge() for key: ${key}. It is recommended to use set() or merge() not both.`); } - const valueWithNullRemoved = utils.removeNullObjectValues(value); + // We can remove all null values in an object by merging it with itself + // utils.fastMerge recursively goes through the object and removes all null values + // Passing two identical objects as source and target to fastMerge will not change it, but only remove the null values + let valueWithNullRemoved = value; + if (typeof value === 'object' && !_.isArray(value)) { + valueWithNullRemoved = utils.fastMerge(value, value); + } const hasChanged = cache.hasValueChanged(key, valueWithNullRemoved); @@ -1098,9 +1104,10 @@ function multiSet(data) { * @private * @param {*} existingValue * @param {Array<*>} changes Array of changes that should be applied to the existing value + * @param {Boolean} shouldRemoveNullObjectValues * @returns {*} */ -function applyMerge(existingValue, changes) { +function applyMerge(existingValue, changes, shouldRemoveNullObjectValues) { const lastChange = _.last(changes); if (_.isArray(lastChange)) { @@ -1109,7 +1116,7 @@ function applyMerge(existingValue, changes) { if (_.some(changes, _.isObject)) { // Object values are then merged one after the other - return _.reduce(changes, (modifiedData, change) => utils.fastMerge(modifiedData, change), + return _.reduce(changes, (modifiedData, change) => utils.fastMerge(modifiedData, change, shouldRemoveNullObjectValues), existingValue || {}); } @@ -1157,7 +1164,8 @@ function merge(key, changes) { .then((existingValue) => { try { // We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge) - let batchedChanges = applyMerge(undefined, mergeQueue[key]); + // We don't want to remove null values from the "batchedChanges", because SQLite uses them to remove keys from storage natively. + let batchedChanges = applyMerge(undefined, mergeQueue[key], false); if (_.isNull(batchedChanges)) { return remove(key); @@ -1172,15 +1180,16 @@ function merge(key, changes) { delete mergeQueuePromise[key]; // After that we merge the batched changes with the existing value - const updatedValue = shouldOverwriteExistingValue ? batchedChanges : applyMerge(existingValue, [batchedChanges]); - const modifiedData = utils.removeNullObjectValues(updatedValue); + // We can remove null values from the "modifiedData", because "null" implicates that the user wants to remove a value from storage. + // The "modifiedData" will be directly "set" in storage instead of being merged + const modifiedData = shouldOverwriteExistingValue ? batchedChanges : applyMerge(existingValue, [batchedChanges], true); // On native platforms we use SQLite which utilises JSON_PATCH to merge changes. // JSON_PATCH generally removes top-level nullish values from the stored object. - // When there is no existing value though, SQLite will just insert the changes as a new value and thus the top-level nullish values won't be removed. - // Therefore we need to remove nullish values from the `batchedChanges` which are sent to the SQLite, if no existing value is present. + // When there is no existing value though, SQLite will just insert the changes as a new value and thus the null values won't be removed. + // Therefore we need to remove null values from the `batchedChanges` which are sent to the SQLite, if no existing value is present. if (!existingValue) { - batchedChanges = utils.removeNullObjectValues(batchedChanges); + batchedChanges = applyMerge(undefined, mergeQueue[key], true); } const hasChanged = cache.hasValueChanged(key, modifiedData); diff --git a/lib/storage/providers/IDBKeyVal.js b/lib/storage/providers/IDBKeyVal.js index 75ec074e..09357d9f 100644 --- a/lib/storage/providers/IDBKeyVal.js +++ b/lib/storage/providers/IDBKeyVal.js @@ -56,7 +56,7 @@ const provider = { const upsertMany = _.map(pairs, ([key, value], index) => { const prev = values[index]; const newValue = _.isObject(prev) ? utils.fastMerge(prev, value) : value; - return promisifyRequest(store.put(utils.removeNullObjectValues(newValue), key)); + return promisifyRequest(store.put(newValue, key)); }); return Promise.all(upsertMany); }); diff --git a/lib/types.d.ts b/lib/types.d.ts index c43d0640..80b586cc 100644 --- a/lib/types.d.ts +++ b/lib/types.d.ts @@ -1,10 +1,13 @@ import {Merge} from 'type-fest'; +import {BuiltIns} from 'type-fest/source/internal'; /** * Represents a deeply nested record. It maps keys to values, * and those values can either be of type `TValue` or further nested `DeepRecord` instances. */ -type DeepRecord = {[key: string]: TValue | DeepRecord}; +type DeepRecord = { + [key: string]: TValue | DeepRecord; +}; /** * Represents type options to configure all Onyx methods. @@ -180,26 +183,42 @@ type OnyxEntry = TOnyxValue | null; */ type OnyxCollection = OnyxEntry>; +type NonTransformableTypes = + | BuiltIns + | ((...args: any[]) => unknown) + | Map + | Set + | ReadonlyMap + | ReadonlySet + | unknown[] + | readonly unknown[]; + /** - * The `NullableProperties` sets the values of all properties in `T` to be nullable (i.e., `| null`). - * It doesn't recurse into nested property values, this means it applies the nullability only to the top-level properties. + * Create a type from another type with all keys and nested keys set to optional or null. + * + * @example + * const settings: Settings = { + * textEditor: { + * fontSize: 14; + * fontColor: '#000000'; + * fontWeight: 400; + * } + * autosave: true; + * }; * - * @template T The type of the properties to convert to nullable properties. + * const applySavedSettings = (savedSettings: NullishDeep) => { + * return {...settings, ...savedSettings}; + * } + * + * settings = applySavedSettings({textEditor: {fontWeight: 500, fontColor: null}}); */ -type NullableProperties = { - [P in keyof T]: T[P] | null; -}; +type NullishDeep = T extends NonTransformableTypes ? T : T extends object ? NullishObjectDeep : unknown; -export { - CollectionKey, - CollectionKeyBase, - CustomTypeOptions, - DeepRecord, - Key, - KeyValueMapping, - OnyxCollection, - OnyxEntry, - OnyxKey, - Selector, - NullableProperties, +/** +Same as `NullishDeep`, but accepts only `object`s as inputs. Internal helper for `NullishDeep`. +*/ +type NullishObjectDeep = { + [KeyType in keyof ObjectType]?: NullishDeep | null; }; + +export {CollectionKey, CollectionKeyBase, CustomTypeOptions, DeepRecord, Key, KeyValueMapping, OnyxCollection, OnyxEntry, OnyxKey, Selector, NullishDeep}; diff --git a/lib/utils.js b/lib/utils.js index bdc2378a..858cb271 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,4 +1,4 @@ -import * as _ from 'underscore'; +import _ from 'underscore'; function areObjectsEmpty(a, b) { return ( @@ -25,9 +25,12 @@ function isMergeableObject(val) { /** * @param {Object} target * @param {Object} source +* @param {Boolean} shouldRemoveNullObjectValues * @returns {Object} */ -function mergeObject(target, source) { +function mergeObject(target, source, shouldRemoveNullObjectValues = true) { + const targetAndSourceIdentical = target === source; + const destination = {}; if (isMergeableObject(target)) { // lodash adds a small overhead so we don't use it here @@ -35,6 +38,13 @@ function mergeObject(target, source) { const targetKeys = Object.keys(target); for (let i = 0; i < targetKeys.length; ++i) { const key = targetKeys[i]; + + // If shouldRemoveNullObjectValues is true, we want to remove null values from the merged object + if (shouldRemoveNullObjectValues && (target[key] === null || source[key] === null)) { + // eslint-disable-next-line no-continue + continue; + } + destination[key] = target[key]; } } @@ -44,15 +54,22 @@ function mergeObject(target, source) { const sourceKeys = Object.keys(source); for (let i = 0; i < sourceKeys.length; ++i) { const key = sourceKeys[i]; - if (source[key] === undefined) { + + // If shouldRemoveNullObjectValues is true, we want to remove null values from the merged object + if (shouldRemoveNullObjectValues && source[key] === null) { // eslint-disable-next-line no-continue continue; } + if (!isMergeableObject(source[key]) || !target[key]) { + if (targetAndSourceIdentical) { + // eslint-disable-next-line no-continue + continue; + } destination[key] = source[key]; } else { // eslint-disable-next-line no-use-before-define - destination[key] = fastMerge(target[key], source[key]); + destination[key] = fastMerge(target[key], source[key], shouldRemoveNullObjectValues); } } @@ -60,39 +77,26 @@ function mergeObject(target, source) { } /** + * Merges two objects and removes null values if "shouldRemoveNullObjectValues" is set to true + * + * We generally want to remove null values from objects written to disk and cache, because it decreases the amount of data stored in memory and on disk. + * On native, when merging an existing value with new changes, SQLite will use JSON_PATCH, which removes top-level nullish values. + * To be consistent with the behaviour for merge, we'll also want to remove null values for "set" operations. + * * @param {Object|Array} target * @param {Object|Array} source +* @param {Boolean} shouldRemoveNullObjectValues * @returns {Object|Array} */ -function fastMerge(target, source) { +function fastMerge(target, source, shouldRemoveNullObjectValues = true) { // We have to ignore arrays and nullish values here, // otherwise "mergeObject" will throw an error, // because it expects an object as "source" - if (_.isArray(source) || _.isNull(source) || _.isUndefined(source)) { + if (_.isArray(source) || source === null || source === undefined) { return source; } - return mergeObject(target, source); -} - -/** - * We generally want to remove top-level nullish values from objects written to disk and cache, because it decreases the amount of data stored in memory and on disk. - * On native, when merging an existing value with new changes, SQLite will use JSON_PATCH, which removes top-level nullish values. - * To be consistent with the behaviour for merge, we'll also want to remove nullish values for "set" operations. - * On web, IndexedDB will keep the top-level keys along with a null value and this uses up storage and memory. - * This method will ensure that keys for null values are removed before an object is written to disk and cache so that all platforms are storing the data in the same efficient way. - * @private - * @param {*} value - * @returns {*} - */ -function removeNullObjectValues(value) { - if (_.isArray(value) || !_.isObject(value)) { - return value; - } - - const objectWithoutNullObjectValues = _.omit(value, objectValue => _.isNull(objectValue)); - - return objectWithoutNullObjectValues; + return mergeObject(target, source, shouldRemoveNullObjectValues); } -export default {removeNullObjectValues, areObjectsEmpty, fastMerge}; +export default {areObjectsEmpty, fastMerge}; diff --git a/tests/unit/onyxCacheTest.js b/tests/unit/onyxCacheTest.js index d48c4c26..cc6ac818 100644 --- a/tests/unit/onyxCacheTest.js +++ b/tests/unit/onyxCacheTest.js @@ -309,7 +309,7 @@ describe('Onyx', () => { expect(cache.getValue('mockKey')).toEqual({value: 'myMockObject'}); }); - it('Should do nothing to a key which value is `undefined`', () => { + it('Should merge a key with `undefined`', () => { // Given cache with existing data cache.set('mockKey', {ID: 5}); @@ -317,8 +317,7 @@ describe('Onyx', () => { cache.merge({mockKey: undefined}); // Then the key should still be in cache and the value unchanged - expect(cache.hasCacheForKey('mockKey')).toBe(true); - expect(cache.getValue('mockKey')).toEqual({ID: 5}); + expect(cache.getValue('mockKey')).toEqual(undefined); }); it('Should update storageKeys when new keys are created', () => { diff --git a/tests/unit/onyxTest.js b/tests/unit/onyxTest.js index ef2b49f9..76afb1bb 100644 --- a/tests/unit/onyxTest.js +++ b/tests/unit/onyxTest.js @@ -223,7 +223,7 @@ describe('Onyx', () => { }); }); - it('should remove top-level keys that are set to null/undefined when merging', () => { + it('should remove keys that are set to null when merging', () => { let testKeyValue; connectionID = Onyx.connect({ @@ -234,13 +234,70 @@ describe('Onyx', () => { }, }); - return Onyx.set(ONYX_KEYS.TEST_KEY, {test1: 'test1', test2: 'test2'}) + return Onyx.set(ONYX_KEYS.TEST_KEY, { + test1: { + test2: 'test2', + test3: 'test3', + }, + }) .then(() => { - expect(testKeyValue).toEqual({test1: 'test1', test2: 'test2'}); + expect(testKeyValue).toEqual({ + test1: { + test2: 'test2', + test3: 'test3', + }, + }); + return Onyx.merge(ONYX_KEYS.TEST_KEY, { + test1: { + test2: null, + }, + }); + }) + .then(() => { + expect(testKeyValue).toEqual({test1: {test3: 'test3'}}); return Onyx.merge(ONYX_KEYS.TEST_KEY, {test1: null}); }) .then(() => { - expect(testKeyValue).toEqual({test2: 'test2'}); + expect(testKeyValue).toEqual({}); + }); + }); + + it('should note remove keys that are set to undefined when merging', () => { + let testKeyValue; + + connectionID = Onyx.connect({ + key: ONYX_KEYS.TEST_KEY, + initWithStoredValues: false, + callback: (value) => { + testKeyValue = value; + }, + }); + + return Onyx.set(ONYX_KEYS.TEST_KEY, { + test1: { + test2: 'test2', + test3: 'test3', + }, + }) + .then(() => { + expect(testKeyValue).toEqual({ + test1: { + test2: 'test2', + test3: 'test3', + }, + }); + return Onyx.merge(ONYX_KEYS.TEST_KEY, { + test1: { + test2: undefined, + }, + }); + }) + .then(() => { + expect(testKeyValue).toEqual({test1: {test2: undefined, test3: 'test3'}}); + return Onyx.merge(ONYX_KEYS.TEST_KEY, {test1: undefined}); + }) + .then(() => { + expect(testKeyValue).toEqual({test1: undefined}); }); });