Skip to content

Commit

Permalink
Merge branch 'main' into feature/useOnyx-type-improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
fabioh8010 committed Apr 26, 2024
2 parents 96f8483 + b386eff commit 894ba69
Show file tree
Hide file tree
Showing 18 changed files with 27,814 additions and 77 deletions.
9 changes: 9 additions & 0 deletions .github/actions/validateReassureOutput/action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
name: 'Validate Regression Test Output'
description: 'Validates the output of regression tests and determines if a test action should fail.'
inputs:
DURATION_DEVIATION_PERCENTAGE:
description: Allowable percentage deviation for the mean duration in regression test results.
required: true
runs:
using: 'node20'
main: './index.js'
26,708 changes: 26,708 additions & 0 deletions .github/actions/validateReassureOutput/index.js

Large diffs are not rendered by default.

47 changes: 47 additions & 0 deletions .github/actions/validateReassureOutput/validateReassureOutput.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* NOTE: After changes to the file it needs to be compiled using [`ncc`](https://github.com/vercel/ncc)
* Example: ncc build -t validateReassureOutput.ts -o index.js
*/

import * as core from '@actions/core';
import type {CompareResult, PerformanceEntry} from '@callstack/reassure-compare/src/types';
import fs from 'fs';

async function run() {
try {
const regressionOutput: CompareResult = JSON.parse(fs.readFileSync('.reassure/output.json', 'utf8'));
const durationDeviation = Number(core.getInput('DURATION_DEVIATION_PERCENTAGE', {required: true}));

if (regressionOutput.significant === undefined || regressionOutput.significant.length === 0) {
console.log('No significant data available. Exiting...');
return true;
}

console.log(`Processing ${regressionOutput.significant.length} measurements...`);

for (let i = 0; i < regressionOutput.significant.length; i++) {
const measurement = regressionOutput.significant[i];
const baseline: PerformanceEntry = measurement.baseline;
const current: PerformanceEntry = measurement.current;

console.log(`Processing measurement ${i + 1}: ${measurement.name}`);

const increasePercentage = ((current.meanDuration - baseline.meanDuration) / baseline.meanDuration) * 100;
if (increasePercentage > durationDeviation) {
core.setFailed(`Duration increase percentage exceeded the allowed deviation of ${durationDeviation}%. Current percentage: ${increasePercentage}%`);
break;
} else {
console.log(`Duration increase percentage ${increasePercentage}% is within the allowed deviation range of ${durationDeviation}%.`);
}
}

return true;
} catch (error) {
console.log('error: ', error);
core.setFailed(error.message);
}
}

run();

export default run;
45 changes: 45 additions & 0 deletions .github/workflows/reassurePerfTests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: Reassure Performance Tests

on:
pull_request:
types: [opened, synchronize]
branches-ignore: [staging, production]
paths-ignore: [tests/**, '**.md', '**.sh']
jobs:
perf-tests:
if: ${{ github.actor != 'OSBotify' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'

- name: Set dummy git credentials
run: |
git config --global user.email "[email protected]"
git config --global user.name "Test"
- name: Run performance testing script
shell: bash
run: |
set -e
BASELINE_BRANCH=${BASELINE_BRANCH:="main"}
git fetch origin "$BASELINE_BRANCH" --no-tags --depth=1
git switch "$BASELINE_BRANCH"
npm install --force
npm install reassure
npx reassure --baseline
git switch --force --detach -
npm install --force
npm install reassure
npx reassure --branch
- name: Validate output.json
id: validateReassureOutput
uses: ./.github/actions/validateReassureOutput
with:
DURATION_DEVIATION_PERCENTAGE: 20
9 changes: 8 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,11 @@ dist/
.github/OSBotify-private-key.asc

# Published package
*.tgz
*.tgz

# Yalc
.yalc
yalc.lock

# Perf tests
.reassure
4 changes: 2 additions & 2 deletions lib/DevTools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ class DevTools {
try {
// We don't want to augment the window type in a library code, so we use type assertion instead
// eslint-disable-next-line no-underscore-dangle, @typescript-eslint/no-explicit-any
const reduxDevtools: ReduxDevtools = (window as any).__REDUX_DEVTOOLS_EXTENSION__;
const reduxDevtools: ReduxDevtools = typeof window === 'undefined' ? undefined : (window as any).__REDUX_DEVTOOLS_EXTENSION__;

if ((options && options.remote) || typeof window === 'undefined' || !reduxDevtools) {
if (options?.remote || !reduxDevtools) {
return;
}

Expand Down
52 changes: 45 additions & 7 deletions lib/Onyx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import type {
OnyxValue,
} from './types';
import OnyxUtils from './OnyxUtils';
import logMessages from './logMessages';

// Keeps track of the last connectionID that was used so we can keep incrementing it
let lastConnectionID = 0;
Expand Down Expand Up @@ -209,6 +210,14 @@ function disconnect(connectionID: number, keyToRemoveFromEvictionBlocklist?: Ony
* @param value value to store
*/
function set<TKey extends OnyxKey>(key: TKey, value: NonUndefined<OnyxEntry<KeyValueMapping[TKey]>>): Promise<void> {
// check if the value is compatible with the existing value in the storage
const existingValue = cache.getValue(key, false);
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(value, existingValue);
if (!isCompatible) {
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'set', existingValueType, newValueType));
return Promise.resolve();
}

// If the value is null, we remove the key from storage
const {value: valueAfterRemoving, wasRemoved} = OnyxUtils.removeNullValues(key, value);
const valueWithoutNullValues = valueAfterRemoving as OnyxValue<TKey>;
Expand Down Expand Up @@ -246,7 +255,7 @@ function set<TKey extends OnyxKey>(key: TKey, value: NonUndefined<OnyxEntry<KeyV
* @param data object keyed by ONYXKEYS and the values to set
*/
function multiSet(data: Partial<NullableKeyValueMapping>): Promise<void> {
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(data);
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(data, true);

const updatePromises = keyValuePairs.map(([key, value]) => {
const prevValue = cache.getValue(key, false);
Expand Down Expand Up @@ -286,7 +295,7 @@ function merge<TKey extends OnyxKey>(key: TKey, changes: NonUndefined<OnyxEntry<
const mergeQueuePromise = OnyxUtils.getMergeQueuePromise();

// Top-level undefined values are ignored
// Therefore we need to prevent adding them to the merge queue
// Therefore, we need to prevent adding them to the merge queue
if (changes === undefined) {
return mergeQueue[key] ? mergeQueuePromise[key] : Promise.resolve();
}
Expand All @@ -308,7 +317,18 @@ function merge<TKey extends OnyxKey>(key: TKey, changes: NonUndefined<OnyxEntry<
try {
// We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge)
// We don't want to remove null values from the "batchedDeltaChanges", because SQLite uses them to remove keys from storage natively.
const batchedDeltaChanges = OnyxUtils.applyMerge(undefined, mergeQueue[key], false);
const validChanges = mergeQueue[key].filter((change) => {
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(change, existingValue);
if (!isCompatible) {
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'merge', existingValueType, newValueType));
}
return isCompatible;
});

if (!validChanges.length) {
return undefined;
}
const batchedDeltaChanges = OnyxUtils.applyMerge(undefined, validChanges, false);

// Case (1): When there is no existing value in storage, we want to set the value instead of merge it.
// Case (2): The presence of a top-level `null` in the merge queue instructs us to drop the whole existing value.
Expand Down Expand Up @@ -407,9 +427,17 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
});

const existingKeys = keys.filter((key) => persistedKeys.has(key));

const cachedCollectionForExistingKeys = OnyxUtils.getCachedCollection(collectionKey, existingKeys);

const newKeys = keys.filter((key) => !persistedKeys.has(key));

const existingKeyCollection = existingKeys.reduce((obj: NullableKeyValueMapping, key) => {
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(mergedCollection[key], cachedCollectionForExistingKeys[key]);
if (!isCompatible) {
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'mergeCollection', existingValueType, newValueType));
return obj;
}
// eslint-disable-next-line no-param-reassign
obj[key] = mergedCollection[key];
return obj;
Expand All @@ -420,8 +448,15 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
obj[key] = mergedCollection[key];
return obj;
}, {});
const keyValuePairsForExistingCollection = OnyxUtils.prepareKeyValuePairsForStorage(existingKeyCollection);
const keyValuePairsForNewCollection = OnyxUtils.prepareKeyValuePairsForStorage(newCollection);

// When (multi-)merging the values with the existing values in storage,
// we don't want to remove nested null values from the data that we pass to the storage layer,
// because the storage layer uses them to remove nested keys from storage natively.
const keyValuePairsForExistingCollection = OnyxUtils.prepareKeyValuePairsForStorage(existingKeyCollection, false);

// We can safely remove nested null values when using (multi-)set,
// because we will simply overwrite the existing values in storage.
const keyValuePairsForNewCollection = OnyxUtils.prepareKeyValuePairsForStorage(newCollection, true);

const promises = [];

Expand All @@ -435,11 +470,14 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
promises.push(Storage.multiSet(keyValuePairsForNewCollection));
}

// finalMergedCollection contains all the keys that were merged, without the keys of incompatible updates
const finalMergedCollection = {...existingKeyCollection, ...newCollection};

// Prefill cache if necessary by calling get() on any existing keys and then merge original data to cache
// and update all subscribers
const promiseUpdate = Promise.all(existingKeys.map(OnyxUtils.get)).then(() => {
cache.merge(mergedCollection);
return OnyxUtils.scheduleNotifyCollectionSubscribers(collectionKey, mergedCollection);
cache.merge(finalMergedCollection);
return OnyxUtils.scheduleNotifyCollectionSubscribers(collectionKey, finalMergedCollection);
});

return Promise.all(promises)
Expand Down
56 changes: 29 additions & 27 deletions lib/OnyxUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -401,10 +401,10 @@ function addAllSafeEvictionKeysToRecentlyAccessedList(): Promise<void> {
});
}

function getCachedCollection<TKey extends CollectionKeyBase>(collectionKey: TKey): NonNullable<OnyxCollection<KeyValueMapping[TKey]>> {
const collectionMemberKeys = Array.from(cache.getAllKeys()).filter((storedKey) => isCollectionMemberKey(collectionKey, storedKey));
function getCachedCollection<TKey extends CollectionKeyBase>(collectionKey: TKey, collectionMemberKeys?: string[]): NonNullable<OnyxCollection<KeyValueMapping[TKey]>> {
const resolvedCollectionMemberKeys = collectionMemberKeys || Array.from(cache.getAllKeys()).filter((storedKey) => isCollectionMemberKey(collectionKey, storedKey));

return collectionMemberKeys.reduce((prev: NonNullable<OnyxCollection<KeyValueMapping[TKey]>>, key) => {
return resolvedCollectionMemberKeys.reduce((prev: NonNullable<OnyxCollection<KeyValueMapping[TKey]>>, key) => {
const cachedValue = cache.getValue(key);
if (!cachedValue) {
return prev;
Expand Down Expand Up @@ -453,6 +453,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
// We prepare the "cached collection" which is the entire collection + the new partial data that
// was merged in via mergeCollection().
const cachedCollection = getCachedCollection(collectionKey);
const cachedCollectionWithoutNestedNulls = utils.removeNestedNullValues(cachedCollection) as Record<string, unknown>;

// Regular Onyx.connect() subscriber found.
if (typeof subscriber.callback === 'function') {
Expand All @@ -464,7 +465,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
// send the whole cached collection.
if (isSubscribedToCollectionKey) {
if (subscriber.waitForCollectionCallback) {
subscriber.callback(cachedCollection);
subscriber.callback(cachedCollectionWithoutNestedNulls);
continue;
}

Expand All @@ -473,7 +474,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
const dataKeys = Object.keys(partialCollection ?? {});
for (let j = 0; j < dataKeys.length; j++) {
const dataKey = dataKeys[j];
subscriber.callback(cachedCollection[dataKey], dataKey);
subscriber.callback(cachedCollectionWithoutNestedNulls[dataKey], dataKey);
}
continue;
}
Expand All @@ -482,7 +483,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
// notify them with the cached data for that key only.
if (isSubscribedToCollectionMemberKey) {
const subscriberCallback = subscriber.callback as DefaultConnectCallback<TKey>;
subscriberCallback(cachedCollection[subscriber.key], subscriber.key as TKey);
subscriberCallback(cachedCollectionWithoutNestedNulls[subscriber.key], subscriber.key as TKey);
continue;
}

Expand Down Expand Up @@ -621,13 +622,16 @@ function keyChanged<TKey extends OnyxKey>(
}
if (isCollectionKey(subscriber.key) && subscriber.waitForCollectionCallback) {
const cachedCollection = getCachedCollection(subscriber.key);
cachedCollection[key] = data;
subscriber.callback(cachedCollection);
const cachedCollectionWithoutNestedNulls = utils.removeNestedNullValues(cachedCollection) as Record<string, unknown>;

cachedCollectionWithoutNestedNulls[key] = data;
subscriber.callback(cachedCollectionWithoutNestedNulls);
continue;
}

const dataWithoutNestedNulls = utils.removeNestedNullValues(data);
const subscriberCallback = subscriber.callback as DefaultConnectCallback<TKey>;
subscriberCallback(data, key);
subscriberCallback(dataWithoutNestedNulls, key);
continue;
}

Expand Down Expand Up @@ -752,7 +756,8 @@ function sendDataToConnection<TKey extends OnyxKey>(mapping: Mapping<TKey>, val:
return;
}

(mapping as DefaultConnectOptions<TKey>).callback?.(val, matchedKey as TKey);
const valuesWithoutNestedNulls = utils.removeNestedNullValues(val);
(mapping as DefaultConnectOptions<TKey>).callback?.(valuesWithoutNestedNulls, matchedKey as TKey);
}

/**
Expand Down Expand Up @@ -963,11 +968,12 @@ type RemoveNullValuesOutput = {

/**
* Removes a key from storage if the value is null.
* Otherwise removes all nested null values in objects and returns the object
* Otherwise removes all nested null values in objects,
* if shouldRemoveNestedNulls is true and returns the object.
*
* @returns The value without null values and a boolean "wasRemoved", which indicates if the key got removed completely
*/
function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullValuesOutput {
function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>, shouldRemoveNestedNulls = true): RemoveNullValuesOutput {
if (value === null) {
remove(key);
return {value, wasRemoved: true};
Expand All @@ -976,7 +982,7 @@ function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullVa
// We can remove all null values in an object by merging it with itself
// utils.fastMerge recursively goes through the object and removes all null values
// Passing two identical objects as source and target to fastMerge will not change it, but only remove the null values
return {value: utils.removeNestedNullValues(value as Record<string, unknown>), wasRemoved: false};
return {value: shouldRemoveNestedNulls ? utils.removeNestedNullValues(value as Record<string, unknown>) : (value as Record<string, unknown>), wasRemoved: false};
}

/**
Expand All @@ -986,38 +992,34 @@ function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullVa
* @return an array of key - value pairs <[key, value]>
*/
function prepareKeyValuePairsForStorage(data: Record<OnyxKey, OnyxValue<OnyxKey>>): Array<[OnyxKey, OnyxValue<OnyxKey>]> {
const keyValuePairs: Array<[OnyxKey, OnyxValue<OnyxKey>]> = [];

Object.entries(data).forEach(([key, value]) => {
const {value: valueAfterRemoving, wasRemoved} = removeNullValues(key, value);
function prepareKeyValuePairsForStorage(data: Record<OnyxKey, OnyxValue<OnyxKey>>, shouldRemoveNestedNulls: boolean): Array<[OnyxKey, OnyxValue<OnyxKey>]> {
return Object.entries(data).reduce<Array<[OnyxKey, OnyxValue<OnyxKey>]>>((pairs, [key, value]) => {
const {value: valueAfterRemoving, wasRemoved} = removeNullValues(key, value, shouldRemoveNestedNulls);

if (wasRemoved) {
return;
if (!wasRemoved) {
pairs.push([key, valueAfterRemoving]);
}

keyValuePairs.push([key, valueAfterRemoving]);
});

return keyValuePairs;
return pairs;
}, []);
}

/**
* Merges an array of changes with an existing value
*
* @param changes Array of changes that should be applied to the existing value
*/
function applyMerge(existingValue: OnyxValue<OnyxKey>, changes: Array<OnyxValue<OnyxKey>>, shouldRemoveNullObjectValues: boolean): OnyxValue<OnyxKey> {
function applyMerge(existingValue: OnyxValue<OnyxKey>, changes: Array<OnyxValue<OnyxKey>>, shouldRemoveNestedNulls: boolean): OnyxValue<OnyxKey> {
const lastChange = changes?.at(-1);

if (Array.isArray(lastChange)) {
return lastChange;
}

if (changes.some((change) => typeof change === 'object')) {
if (changes.some((change) => change && typeof change === 'object')) {
// Object values are then merged one after the other
return changes.reduce(
(modifiedData, change) => utils.fastMerge(modifiedData as Record<OnyxKey, unknown>, change as Record<OnyxKey, unknown>, shouldRemoveNullObjectValues),
(modifiedData, change) => utils.fastMerge(modifiedData as Record<OnyxKey, unknown>, change as Record<OnyxKey, unknown>, shouldRemoveNestedNulls),
existingValue || {},
);
}
Expand Down
Loading

0 comments on commit 894ba69

Please sign in to comment.