Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into fix/39852
Browse files Browse the repository at this point in the history
  • Loading branch information
dominictb committed Apr 25, 2024
2 parents affab42 + ab8222d commit 1de05cf
Show file tree
Hide file tree
Showing 14 changed files with 26,947 additions and 55 deletions.
9 changes: 9 additions & 0 deletions .github/actions/validateReassureOutput/action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
name: 'Validate Regression Test Output'
description: 'Validates the output of regression tests and determines if a test action should fail.'
inputs:
DURATION_DEVIATION_PERCENTAGE:
description: Allowable percentage deviation for the mean duration in regression test results.
required: true
runs:
using: 'node20'
main: './index.js'
26,708 changes: 26,708 additions & 0 deletions .github/actions/validateReassureOutput/index.js

Large diffs are not rendered by default.

47 changes: 47 additions & 0 deletions .github/actions/validateReassureOutput/validateReassureOutput.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* NOTE: After changes to the file it needs to be compiled using [`ncc`](https://github.com/vercel/ncc)
* Example: ncc build -t validateReassureOutput.ts -o index.js
*/

import * as core from '@actions/core';
import type {CompareResult, PerformanceEntry} from '@callstack/reassure-compare/src/types';
import fs from 'fs';

async function run() {
try {
const regressionOutput: CompareResult = JSON.parse(fs.readFileSync('.reassure/output.json', 'utf8'));
const durationDeviation = Number(core.getInput('DURATION_DEVIATION_PERCENTAGE', {required: true}));

if (regressionOutput.significant === undefined || regressionOutput.significant.length === 0) {
console.log('No significant data available. Exiting...');
return true;
}

console.log(`Processing ${regressionOutput.significant.length} measurements...`);

for (let i = 0; i < regressionOutput.significant.length; i++) {
const measurement = regressionOutput.significant[i];
const baseline: PerformanceEntry = measurement.baseline;
const current: PerformanceEntry = measurement.current;

console.log(`Processing measurement ${i + 1}: ${measurement.name}`);

const increasePercentage = ((current.meanDuration - baseline.meanDuration) / baseline.meanDuration) * 100;
if (increasePercentage > durationDeviation) {
core.setFailed(`Duration increase percentage exceeded the allowed deviation of ${durationDeviation}%. Current percentage: ${increasePercentage}%`);
break;
} else {
console.log(`Duration increase percentage ${increasePercentage}% is within the allowed deviation range of ${durationDeviation}%.`);
}
}

return true;
} catch (error) {
console.log('error: ', error);
core.setFailed(error.message);
}
}

run();

export default run;
45 changes: 45 additions & 0 deletions .github/workflows/reassurePerfTests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: Reassure Performance Tests

on:
pull_request:
types: [opened, synchronize]
branches-ignore: [staging, production]
paths-ignore: [tests/**, '**.md', '**.sh']
jobs:
perf-tests:
if: ${{ github.actor != 'OSBotify' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'

- name: Set dummy git credentials
run: |
git config --global user.email "test@test.com"
git config --global user.name "Test"
- name: Run performance testing script
shell: bash
run: |
set -e
BASELINE_BRANCH=${BASELINE_BRANCH:="main"}
git fetch origin "$BASELINE_BRANCH" --no-tags --depth=1
git switch "$BASELINE_BRANCH"
npm install --force
npm install reassure
npx reassure --baseline
git switch --force --detach -
npm install --force
npm install reassure
npx reassure --branch
- name: Validate output.json
id: validateReassureOutput
uses: ./.github/actions/validateReassureOutput
with:
DURATION_DEVIATION_PERCENTAGE: 20
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,9 @@ dist/
# Published package
*.tgz

# Yalc
.yalc
yalc.lock

# Perf tests
.reassure
.reassure
4 changes: 2 additions & 2 deletions lib/DevTools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ class DevTools {
try {
// We don't want to augment the window type in a library code, so we use type assertion instead
// eslint-disable-next-line no-underscore-dangle, @typescript-eslint/no-explicit-any
const reduxDevtools: ReduxDevtools = (window as any).__REDUX_DEVTOOLS_EXTENSION__;
const reduxDevtools: ReduxDevtools = typeof window === 'undefined' ? undefined : (window as any).__REDUX_DEVTOOLS_EXTENSION__;

if ((options && options.remote) || typeof window === 'undefined' || !reduxDevtools) {
if (options?.remote || !reduxDevtools) {
return;
}

Expand Down
14 changes: 10 additions & 4 deletions lib/Onyx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ function set<TKey extends OnyxKey>(key: TKey, value: OnyxEntry<KeyValueMapping[T
* @param data object keyed by ONYXKEYS and the values to set
*/
function multiSet(data: Partial<NullableKeyValueMapping>): Promise<void> {
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(data);
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(data, true);

const updatePromises = keyValuePairs.map(([key, value]) => {
const prevValue = cache.getValue(key, false);
Expand Down Expand Up @@ -294,7 +294,7 @@ function merge<TKey extends OnyxKey>(key: TKey, changes: OnyxEntry<NullishDeep<K
const mergeQueuePromise = OnyxUtils.getMergeQueuePromise();

// Top-level undefined values are ignored
// Therefore we need to prevent adding them to the merge queue
// Therefore, we need to prevent adding them to the merge queue
if (changes === undefined) {
return mergeQueue[key] ? mergeQueuePromise[key] : Promise.resolve();
}
Expand Down Expand Up @@ -448,8 +448,14 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
return obj;
}, {});

const keyValuePairsForExistingCollection = OnyxUtils.prepareKeyValuePairsForStorage(existingKeyCollection);
const keyValuePairsForNewCollection = OnyxUtils.prepareKeyValuePairsForStorage(newCollection);
// When (multi-)merging the values with the existing values in storage,
// we don't want to remove nested null values from the data that we pass to the storage layer,
// because the storage layer uses them to remove nested keys from storage natively.
const keyValuePairsForExistingCollection = OnyxUtils.prepareKeyValuePairsForStorage(existingKeyCollection, false);

// We can safely remove nested null values when using (multi-)set,
// because we will simply overwrite the existing values in storage.
const keyValuePairsForNewCollection = OnyxUtils.prepareKeyValuePairsForStorage(newCollection, true);

const promises = [];

Expand Down
50 changes: 26 additions & 24 deletions lib/OnyxUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
// We prepare the "cached collection" which is the entire collection + the new partial data that
// was merged in via mergeCollection().
const cachedCollection = getCachedCollection(collectionKey);
const cachedCollectionWithoutNestedNulls = utils.removeNestedNullValues(cachedCollection) as Record<string, unknown>;

// Regular Onyx.connect() subscriber found.
if (typeof subscriber.callback === 'function') {
Expand All @@ -464,7 +465,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
// send the whole cached collection.
if (isSubscribedToCollectionKey) {
if (subscriber.waitForCollectionCallback) {
subscriber.callback(cachedCollection);
subscriber.callback(cachedCollectionWithoutNestedNulls);
continue;
}

Expand All @@ -473,7 +474,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
const dataKeys = Object.keys(partialCollection ?? {});
for (let j = 0; j < dataKeys.length; j++) {
const dataKey = dataKeys[j];
subscriber.callback(cachedCollection[dataKey], dataKey);
subscriber.callback(cachedCollectionWithoutNestedNulls[dataKey], dataKey);
}
continue;
}
Expand All @@ -482,7 +483,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
// notify them with the cached data for that key only.
if (isSubscribedToCollectionMemberKey) {
const subscriberCallback = subscriber.callback as DefaultConnectCallback<TKey>;
subscriberCallback(cachedCollection[subscriber.key], subscriber.key as TKey);
subscriberCallback(cachedCollectionWithoutNestedNulls[subscriber.key], subscriber.key as TKey);
continue;
}

Expand Down Expand Up @@ -621,13 +622,16 @@ function keyChanged<TKey extends OnyxKey>(
}
if (isCollectionKey(subscriber.key) && subscriber.waitForCollectionCallback) {
const cachedCollection = getCachedCollection(subscriber.key);
cachedCollection[key] = data;
subscriber.callback(cachedCollection);
const cachedCollectionWithoutNestedNulls = utils.removeNestedNullValues(cachedCollection) as Record<string, unknown>;

cachedCollectionWithoutNestedNulls[key] = data;
subscriber.callback(cachedCollectionWithoutNestedNulls);
continue;
}

const dataWithoutNestedNulls = utils.removeNestedNullValues(data);
const subscriberCallback = subscriber.callback as DefaultConnectCallback<TKey>;
subscriberCallback(data, key);
subscriberCallback(dataWithoutNestedNulls, key);
continue;
}

Expand Down Expand Up @@ -752,7 +756,8 @@ function sendDataToConnection<TKey extends OnyxKey>(mapping: Mapping<TKey>, val:
return;
}

(mapping as DefaultConnectOptions<TKey>).callback?.(val, matchedKey as TKey);
const valuesWithoutNestedNulls = utils.removeNestedNullValues(val);
(mapping as DefaultConnectOptions<TKey>).callback?.(valuesWithoutNestedNulls, matchedKey as TKey);
}

/**
Expand Down Expand Up @@ -963,11 +968,12 @@ type RemoveNullValuesOutput = {

/**
* Removes a key from storage if the value is null.
* Otherwise removes all nested null values in objects and returns the object
* Otherwise removes all nested null values in objects,
* if shouldRemoveNestedNulls is true and returns the object.
*
* @returns The value without null values and a boolean "wasRemoved", which indicates if the key got removed completely
*/
function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullValuesOutput {
function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>, shouldRemoveNestedNulls = true): RemoveNullValuesOutput {
if (value === null) {
remove(key);
return {value, wasRemoved: true};
Expand All @@ -976,7 +982,7 @@ function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullVa
// We can remove all null values in an object by merging it with itself
// utils.fastMerge recursively goes through the object and removes all null values
// Passing two identical objects as source and target to fastMerge will not change it, but only remove the null values
return {value: utils.removeNestedNullValues(value as Record<string, unknown>), wasRemoved: false};
return {value: shouldRemoveNestedNulls ? utils.removeNestedNullValues(value as Record<string, unknown>) : (value as Record<string, unknown>), wasRemoved: false};
}

/**
Expand All @@ -986,38 +992,34 @@ function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullVa
* @return an array of key - value pairs <[key, value]>
*/
function prepareKeyValuePairsForStorage(data: Record<OnyxKey, OnyxValue<OnyxKey>>): Array<[OnyxKey, OnyxValue<OnyxKey>]> {
const keyValuePairs: Array<[OnyxKey, OnyxValue<OnyxKey>]> = [];

Object.entries(data).forEach(([key, value]) => {
const {value: valueAfterRemoving, wasRemoved} = removeNullValues(key, value);
function prepareKeyValuePairsForStorage(data: Record<OnyxKey, OnyxValue<OnyxKey>>, shouldRemoveNestedNulls: boolean): Array<[OnyxKey, OnyxValue<OnyxKey>]> {
return Object.entries(data).reduce<Array<[OnyxKey, OnyxValue<OnyxKey>]>>((pairs, [key, value]) => {
const {value: valueAfterRemoving, wasRemoved} = removeNullValues(key, value, shouldRemoveNestedNulls);

if (wasRemoved) {
return;
if (!wasRemoved) {
pairs.push([key, valueAfterRemoving]);
}

keyValuePairs.push([key, valueAfterRemoving]);
});

return keyValuePairs;
return pairs;
}, []);
}

/**
* Merges an array of changes with an existing value
*
* @param changes Array of changes that should be applied to the existing value
*/
function applyMerge(existingValue: OnyxValue<OnyxKey>, changes: Array<OnyxValue<OnyxKey>>, shouldRemoveNullObjectValues: boolean): OnyxValue<OnyxKey> {
function applyMerge(existingValue: OnyxValue<OnyxKey>, changes: Array<OnyxValue<OnyxKey>>, shouldRemoveNestedNulls: boolean): OnyxValue<OnyxKey> {
const lastChange = changes?.at(-1);

if (Array.isArray(lastChange)) {
return lastChange;
}

if (changes.some((change) => typeof change === 'object')) {
if (changes.some((change) => change && typeof change === 'object')) {
// Object values are then merged one after the other
return changes.reduce(
(modifiedData, change) => utils.fastMerge(modifiedData as Record<OnyxKey, unknown>, change as Record<OnyxKey, unknown>, shouldRemoveNullObjectValues),
(modifiedData, change) => utils.fastMerge(modifiedData as Record<OnyxKey, unknown>, change as Record<OnyxKey, unknown>, shouldRemoveNestedNulls),
existingValue || {},
);
}
Expand Down
6 changes: 1 addition & 5 deletions lib/storage/providers/MemoryOnlyProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,7 @@ const provider: StorageProvider = {
multiSet(pairs) {
const setPromises = _.map(pairs, ([key, value]) => this.setItem(key, value));

return new Promise((resolve) => {
Promise.all(setPromises).then(() => {
resolve(undefined);
});
});
return Promise.all(setPromises).then(() => undefined);
},

/**
Expand Down
Loading

0 comments on commit 1de05cf

Please sign in to comment.