Skip to content

Commit 894ba69

Browse files
committed
Merge branch 'main' into feature/useOnyx-type-improvements
2 parents 96f8483 + b386eff commit 894ba69

File tree

18 files changed

+27814
-77
lines changed

18 files changed

+27814
-77
lines changed
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
name: 'Validate Regression Test Output'
2+
description: 'Validates the output of regression tests and determines if a test action should fail.'
3+
inputs:
4+
DURATION_DEVIATION_PERCENTAGE:
5+
description: Allowable percentage deviation for the mean duration in regression test results.
6+
required: true
7+
runs:
8+
using: 'node20'
9+
main: './index.js'

.github/actions/validateReassureOutput/index.js

Lines changed: 26708 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
/*
2+
* NOTE: After changes to the file it needs to be compiled using [`ncc`](https://github.com/vercel/ncc)
3+
* Example: ncc build -t validateReassureOutput.ts -o index.js
4+
*/
5+
6+
import * as core from '@actions/core';
7+
import type {CompareResult, PerformanceEntry} from '@callstack/reassure-compare/src/types';
8+
import fs from 'fs';
9+
10+
async function run() {
11+
try {
12+
const regressionOutput: CompareResult = JSON.parse(fs.readFileSync('.reassure/output.json', 'utf8'));
13+
const durationDeviation = Number(core.getInput('DURATION_DEVIATION_PERCENTAGE', {required: true}));
14+
15+
if (regressionOutput.significant === undefined || regressionOutput.significant.length === 0) {
16+
console.log('No significant data available. Exiting...');
17+
return true;
18+
}
19+
20+
console.log(`Processing ${regressionOutput.significant.length} measurements...`);
21+
22+
for (let i = 0; i < regressionOutput.significant.length; i++) {
23+
const measurement = regressionOutput.significant[i];
24+
const baseline: PerformanceEntry = measurement.baseline;
25+
const current: PerformanceEntry = measurement.current;
26+
27+
console.log(`Processing measurement ${i + 1}: ${measurement.name}`);
28+
29+
const increasePercentage = ((current.meanDuration - baseline.meanDuration) / baseline.meanDuration) * 100;
30+
if (increasePercentage > durationDeviation) {
31+
core.setFailed(`Duration increase percentage exceeded the allowed deviation of ${durationDeviation}%. Current percentage: ${increasePercentage}%`);
32+
break;
33+
} else {
34+
console.log(`Duration increase percentage ${increasePercentage}% is within the allowed deviation range of ${durationDeviation}%.`);
35+
}
36+
}
37+
38+
return true;
39+
} catch (error) {
40+
console.log('error: ', error);
41+
core.setFailed(error.message);
42+
}
43+
}
44+
45+
run();
46+
47+
export default run;
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
name: Reassure Performance Tests
2+
3+
on:
4+
pull_request:
5+
types: [opened, synchronize]
6+
branches-ignore: [staging, production]
7+
paths-ignore: [tests/**, '**.md', '**.sh']
8+
jobs:
9+
perf-tests:
10+
if: ${{ github.actor != 'OSBotify' }}
11+
runs-on: ubuntu-latest
12+
steps:
13+
- name: Checkout
14+
uses: actions/checkout@v4
15+
with:
16+
fetch-depth: 0
17+
18+
- name: Setup Node
19+
uses: actions/setup-node@v4
20+
with:
21+
node-version-file: '.nvmrc'
22+
23+
- name: Set dummy git credentials
24+
run: |
25+
git config --global user.email "[email protected]"
26+
git config --global user.name "Test"
27+
- name: Run performance testing script
28+
shell: bash
29+
run: |
30+
set -e
31+
BASELINE_BRANCH=${BASELINE_BRANCH:="main"}
32+
git fetch origin "$BASELINE_BRANCH" --no-tags --depth=1
33+
git switch "$BASELINE_BRANCH"
34+
npm install --force
35+
npm install reassure
36+
npx reassure --baseline
37+
git switch --force --detach -
38+
npm install --force
39+
npm install reassure
40+
npx reassure --branch
41+
- name: Validate output.json
42+
id: validateReassureOutput
43+
uses: ./.github/actions/validateReassureOutput
44+
with:
45+
DURATION_DEVIATION_PERCENTAGE: 20

.gitignore

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,11 @@ dist/
1616
.github/OSBotify-private-key.asc
1717

1818
# Published package
19-
*.tgz
19+
*.tgz
20+
21+
# Yalc
22+
.yalc
23+
yalc.lock
24+
25+
# Perf tests
26+
.reassure

lib/DevTools.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,9 @@ class DevTools {
3939
try {
4040
// We don't want to augment the window type in a library code, so we use type assertion instead
4141
// eslint-disable-next-line no-underscore-dangle, @typescript-eslint/no-explicit-any
42-
const reduxDevtools: ReduxDevtools = (window as any).__REDUX_DEVTOOLS_EXTENSION__;
42+
const reduxDevtools: ReduxDevtools = typeof window === 'undefined' ? undefined : (window as any).__REDUX_DEVTOOLS_EXTENSION__;
4343

44-
if ((options && options.remote) || typeof window === 'undefined' || !reduxDevtools) {
44+
if (options?.remote || !reduxDevtools) {
4545
return;
4646
}
4747

lib/Onyx.ts

Lines changed: 45 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import type {
2424
OnyxValue,
2525
} from './types';
2626
import OnyxUtils from './OnyxUtils';
27+
import logMessages from './logMessages';
2728

2829
// Keeps track of the last connectionID that was used so we can keep incrementing it
2930
let lastConnectionID = 0;
@@ -209,6 +210,14 @@ function disconnect(connectionID: number, keyToRemoveFromEvictionBlocklist?: Ony
209210
* @param value value to store
210211
*/
211212
function set<TKey extends OnyxKey>(key: TKey, value: NonUndefined<OnyxEntry<KeyValueMapping[TKey]>>): Promise<void> {
213+
// check if the value is compatible with the existing value in the storage
214+
const existingValue = cache.getValue(key, false);
215+
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(value, existingValue);
216+
if (!isCompatible) {
217+
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'set', existingValueType, newValueType));
218+
return Promise.resolve();
219+
}
220+
212221
// If the value is null, we remove the key from storage
213222
const {value: valueAfterRemoving, wasRemoved} = OnyxUtils.removeNullValues(key, value);
214223
const valueWithoutNullValues = valueAfterRemoving as OnyxValue<TKey>;
@@ -246,7 +255,7 @@ function set<TKey extends OnyxKey>(key: TKey, value: NonUndefined<OnyxEntry<KeyV
246255
* @param data object keyed by ONYXKEYS and the values to set
247256
*/
248257
function multiSet(data: Partial<NullableKeyValueMapping>): Promise<void> {
249-
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(data);
258+
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(data, true);
250259

251260
const updatePromises = keyValuePairs.map(([key, value]) => {
252261
const prevValue = cache.getValue(key, false);
@@ -286,7 +295,7 @@ function merge<TKey extends OnyxKey>(key: TKey, changes: NonUndefined<OnyxEntry<
286295
const mergeQueuePromise = OnyxUtils.getMergeQueuePromise();
287296

288297
// Top-level undefined values are ignored
289-
// Therefore we need to prevent adding them to the merge queue
298+
// Therefore, we need to prevent adding them to the merge queue
290299
if (changes === undefined) {
291300
return mergeQueue[key] ? mergeQueuePromise[key] : Promise.resolve();
292301
}
@@ -308,7 +317,18 @@ function merge<TKey extends OnyxKey>(key: TKey, changes: NonUndefined<OnyxEntry<
308317
try {
309318
// We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge)
310319
// We don't want to remove null values from the "batchedDeltaChanges", because SQLite uses them to remove keys from storage natively.
311-
const batchedDeltaChanges = OnyxUtils.applyMerge(undefined, mergeQueue[key], false);
320+
const validChanges = mergeQueue[key].filter((change) => {
321+
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(change, existingValue);
322+
if (!isCompatible) {
323+
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'merge', existingValueType, newValueType));
324+
}
325+
return isCompatible;
326+
});
327+
328+
if (!validChanges.length) {
329+
return undefined;
330+
}
331+
const batchedDeltaChanges = OnyxUtils.applyMerge(undefined, validChanges, false);
312332

313333
// Case (1): When there is no existing value in storage, we want to set the value instead of merge it.
314334
// Case (2): The presence of a top-level `null` in the merge queue instructs us to drop the whole existing value.
@@ -407,9 +427,17 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
407427
});
408428

409429
const existingKeys = keys.filter((key) => persistedKeys.has(key));
430+
431+
const cachedCollectionForExistingKeys = OnyxUtils.getCachedCollection(collectionKey, existingKeys);
432+
410433
const newKeys = keys.filter((key) => !persistedKeys.has(key));
411434

412435
const existingKeyCollection = existingKeys.reduce((obj: NullableKeyValueMapping, key) => {
436+
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(mergedCollection[key], cachedCollectionForExistingKeys[key]);
437+
if (!isCompatible) {
438+
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'mergeCollection', existingValueType, newValueType));
439+
return obj;
440+
}
413441
// eslint-disable-next-line no-param-reassign
414442
obj[key] = mergedCollection[key];
415443
return obj;
@@ -420,8 +448,15 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
420448
obj[key] = mergedCollection[key];
421449
return obj;
422450
}, {});
423-
const keyValuePairsForExistingCollection = OnyxUtils.prepareKeyValuePairsForStorage(existingKeyCollection);
424-
const keyValuePairsForNewCollection = OnyxUtils.prepareKeyValuePairsForStorage(newCollection);
451+
452+
// When (multi-)merging the values with the existing values in storage,
453+
// we don't want to remove nested null values from the data that we pass to the storage layer,
454+
// because the storage layer uses them to remove nested keys from storage natively.
455+
const keyValuePairsForExistingCollection = OnyxUtils.prepareKeyValuePairsForStorage(existingKeyCollection, false);
456+
457+
// We can safely remove nested null values when using (multi-)set,
458+
// because we will simply overwrite the existing values in storage.
459+
const keyValuePairsForNewCollection = OnyxUtils.prepareKeyValuePairsForStorage(newCollection, true);
425460

426461
const promises = [];
427462

@@ -435,11 +470,14 @@ function mergeCollection<TKey extends CollectionKeyBase, TMap>(collectionKey: TK
435470
promises.push(Storage.multiSet(keyValuePairsForNewCollection));
436471
}
437472

473+
// finalMergedCollection contains all the keys that were merged, without the keys of incompatible updates
474+
const finalMergedCollection = {...existingKeyCollection, ...newCollection};
475+
438476
// Prefill cache if necessary by calling get() on any existing keys and then merge original data to cache
439477
// and update all subscribers
440478
const promiseUpdate = Promise.all(existingKeys.map(OnyxUtils.get)).then(() => {
441-
cache.merge(mergedCollection);
442-
return OnyxUtils.scheduleNotifyCollectionSubscribers(collectionKey, mergedCollection);
479+
cache.merge(finalMergedCollection);
480+
return OnyxUtils.scheduleNotifyCollectionSubscribers(collectionKey, finalMergedCollection);
443481
});
444482

445483
return Promise.all(promises)

lib/OnyxUtils.ts

Lines changed: 29 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -401,10 +401,10 @@ function addAllSafeEvictionKeysToRecentlyAccessedList(): Promise<void> {
401401
});
402402
}
403403

404-
function getCachedCollection<TKey extends CollectionKeyBase>(collectionKey: TKey): NonNullable<OnyxCollection<KeyValueMapping[TKey]>> {
405-
const collectionMemberKeys = Array.from(cache.getAllKeys()).filter((storedKey) => isCollectionMemberKey(collectionKey, storedKey));
404+
function getCachedCollection<TKey extends CollectionKeyBase>(collectionKey: TKey, collectionMemberKeys?: string[]): NonNullable<OnyxCollection<KeyValueMapping[TKey]>> {
405+
const resolvedCollectionMemberKeys = collectionMemberKeys || Array.from(cache.getAllKeys()).filter((storedKey) => isCollectionMemberKey(collectionKey, storedKey));
406406

407-
return collectionMemberKeys.reduce((prev: NonNullable<OnyxCollection<KeyValueMapping[TKey]>>, key) => {
407+
return resolvedCollectionMemberKeys.reduce((prev: NonNullable<OnyxCollection<KeyValueMapping[TKey]>>, key) => {
408408
const cachedValue = cache.getValue(key);
409409
if (!cachedValue) {
410410
return prev;
@@ -453,6 +453,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
453453
// We prepare the "cached collection" which is the entire collection + the new partial data that
454454
// was merged in via mergeCollection().
455455
const cachedCollection = getCachedCollection(collectionKey);
456+
const cachedCollectionWithoutNestedNulls = utils.removeNestedNullValues(cachedCollection) as Record<string, unknown>;
456457

457458
// Regular Onyx.connect() subscriber found.
458459
if (typeof subscriber.callback === 'function') {
@@ -464,7 +465,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
464465
// send the whole cached collection.
465466
if (isSubscribedToCollectionKey) {
466467
if (subscriber.waitForCollectionCallback) {
467-
subscriber.callback(cachedCollection);
468+
subscriber.callback(cachedCollectionWithoutNestedNulls);
468469
continue;
469470
}
470471

@@ -473,7 +474,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
473474
const dataKeys = Object.keys(partialCollection ?? {});
474475
for (let j = 0; j < dataKeys.length; j++) {
475476
const dataKey = dataKeys[j];
476-
subscriber.callback(cachedCollection[dataKey], dataKey);
477+
subscriber.callback(cachedCollectionWithoutNestedNulls[dataKey], dataKey);
477478
}
478479
continue;
479480
}
@@ -482,7 +483,7 @@ function keysChanged<TKey extends CollectionKeyBase>(
482483
// notify them with the cached data for that key only.
483484
if (isSubscribedToCollectionMemberKey) {
484485
const subscriberCallback = subscriber.callback as DefaultConnectCallback<TKey>;
485-
subscriberCallback(cachedCollection[subscriber.key], subscriber.key as TKey);
486+
subscriberCallback(cachedCollectionWithoutNestedNulls[subscriber.key], subscriber.key as TKey);
486487
continue;
487488
}
488489

@@ -621,13 +622,16 @@ function keyChanged<TKey extends OnyxKey>(
621622
}
622623
if (isCollectionKey(subscriber.key) && subscriber.waitForCollectionCallback) {
623624
const cachedCollection = getCachedCollection(subscriber.key);
624-
cachedCollection[key] = data;
625-
subscriber.callback(cachedCollection);
625+
const cachedCollectionWithoutNestedNulls = utils.removeNestedNullValues(cachedCollection) as Record<string, unknown>;
626+
627+
cachedCollectionWithoutNestedNulls[key] = data;
628+
subscriber.callback(cachedCollectionWithoutNestedNulls);
626629
continue;
627630
}
628631

632+
const dataWithoutNestedNulls = utils.removeNestedNullValues(data);
629633
const subscriberCallback = subscriber.callback as DefaultConnectCallback<TKey>;
630-
subscriberCallback(data, key);
634+
subscriberCallback(dataWithoutNestedNulls, key);
631635
continue;
632636
}
633637

@@ -752,7 +756,8 @@ function sendDataToConnection<TKey extends OnyxKey>(mapping: Mapping<TKey>, val:
752756
return;
753757
}
754758

755-
(mapping as DefaultConnectOptions<TKey>).callback?.(val, matchedKey as TKey);
759+
const valuesWithoutNestedNulls = utils.removeNestedNullValues(val);
760+
(mapping as DefaultConnectOptions<TKey>).callback?.(valuesWithoutNestedNulls, matchedKey as TKey);
756761
}
757762

758763
/**
@@ -963,11 +968,12 @@ type RemoveNullValuesOutput = {
963968

964969
/**
965970
* Removes a key from storage if the value is null.
966-
* Otherwise removes all nested null values in objects and returns the object
971+
* Otherwise removes all nested null values in objects,
972+
* if shouldRemoveNestedNulls is true and returns the object.
967973
*
968974
* @returns The value without null values and a boolean "wasRemoved", which indicates if the key got removed completely
969975
*/
970-
function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullValuesOutput {
976+
function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>, shouldRemoveNestedNulls = true): RemoveNullValuesOutput {
971977
if (value === null) {
972978
remove(key);
973979
return {value, wasRemoved: true};
@@ -976,7 +982,7 @@ function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullVa
976982
// We can remove all null values in an object by merging it with itself
977983
// utils.fastMerge recursively goes through the object and removes all null values
978984
// Passing two identical objects as source and target to fastMerge will not change it, but only remove the null values
979-
return {value: utils.removeNestedNullValues(value as Record<string, unknown>), wasRemoved: false};
985+
return {value: shouldRemoveNestedNulls ? utils.removeNestedNullValues(value as Record<string, unknown>) : (value as Record<string, unknown>), wasRemoved: false};
980986
}
981987

982988
/**
@@ -986,38 +992,34 @@ function removeNullValues(key: OnyxKey, value: OnyxValue<OnyxKey>): RemoveNullVa
986992
987993
* @return an array of key - value pairs <[key, value]>
988994
*/
989-
function prepareKeyValuePairsForStorage(data: Record<OnyxKey, OnyxValue<OnyxKey>>): Array<[OnyxKey, OnyxValue<OnyxKey>]> {
990-
const keyValuePairs: Array<[OnyxKey, OnyxValue<OnyxKey>]> = [];
991-
992-
Object.entries(data).forEach(([key, value]) => {
993-
const {value: valueAfterRemoving, wasRemoved} = removeNullValues(key, value);
995+
function prepareKeyValuePairsForStorage(data: Record<OnyxKey, OnyxValue<OnyxKey>>, shouldRemoveNestedNulls: boolean): Array<[OnyxKey, OnyxValue<OnyxKey>]> {
996+
return Object.entries(data).reduce<Array<[OnyxKey, OnyxValue<OnyxKey>]>>((pairs, [key, value]) => {
997+
const {value: valueAfterRemoving, wasRemoved} = removeNullValues(key, value, shouldRemoveNestedNulls);
994998

995-
if (wasRemoved) {
996-
return;
999+
if (!wasRemoved) {
1000+
pairs.push([key, valueAfterRemoving]);
9971001
}
9981002

999-
keyValuePairs.push([key, valueAfterRemoving]);
1000-
});
1001-
1002-
return keyValuePairs;
1003+
return pairs;
1004+
}, []);
10031005
}
10041006

10051007
/**
10061008
* Merges an array of changes with an existing value
10071009
*
10081010
* @param changes Array of changes that should be applied to the existing value
10091011
*/
1010-
function applyMerge(existingValue: OnyxValue<OnyxKey>, changes: Array<OnyxValue<OnyxKey>>, shouldRemoveNullObjectValues: boolean): OnyxValue<OnyxKey> {
1012+
function applyMerge(existingValue: OnyxValue<OnyxKey>, changes: Array<OnyxValue<OnyxKey>>, shouldRemoveNestedNulls: boolean): OnyxValue<OnyxKey> {
10111013
const lastChange = changes?.at(-1);
10121014

10131015
if (Array.isArray(lastChange)) {
10141016
return lastChange;
10151017
}
10161018

1017-
if (changes.some((change) => typeof change === 'object')) {
1019+
if (changes.some((change) => change && typeof change === 'object')) {
10181020
// Object values are then merged one after the other
10191021
return changes.reduce(
1020-
(modifiedData, change) => utils.fastMerge(modifiedData as Record<OnyxKey, unknown>, change as Record<OnyxKey, unknown>, shouldRemoveNullObjectValues),
1022+
(modifiedData, change) => utils.fastMerge(modifiedData as Record<OnyxKey, unknown>, change as Record<OnyxKey, unknown>, shouldRemoveNestedNulls),
10211023
existingValue || {},
10221024
);
10231025
}

0 commit comments

Comments
 (0)