From 230ef572840abc15bbb8ba25da96ae5057c1fa33 Mon Sep 17 00:00:00 2001 From: Pavlo Tsimura Date: Wed, 19 Jun 2024 17:38:02 +0200 Subject: [PATCH] TEMP: add dist --- dist/DevTools.d.ts | 42 + dist/DevTools.js | 69 ++ dist/Logger.d.ts | 22 + dist/Logger.js | 33 + dist/Onyx.d.ts | 137 +++ dist/Onyx.js | 697 +++++++++++++ dist/OnyxCache.d.ts | 94 ++ dist/OnyxCache.js | 180 ++++ dist/OnyxUtils.d.ts | 251 +++++ dist/OnyxUtils.js | 973 ++++++++++++++++++ dist/PerformanceUtils.d.ts | 8 + dist/PerformanceUtils.js | 53 + dist/Str.d.ts | 17 + dist/Str.js | 18 + dist/batch.d.ts | 2 + dist/batch.js | 4 + dist/batch.native.d.ts | 2 + dist/batch.native.js | 4 + dist/createDeferredTask.d.ts | 11 + dist/createDeferredTask.js | 15 + dist/index.d.ts | 10 + dist/index.js | 12 + dist/logMessages.d.ts | 4 + dist/logMessages.js | 8 + dist/storage/InstanceSync/index.d.ts | 16 + dist/storage/InstanceSync/index.js | 22 + dist/storage/InstanceSync/index.web.d.ts | 29 + dist/storage/InstanceSync/index.web.js | 61 ++ dist/storage/__mocks__/index.d.ts | 25 + dist/storage/__mocks__/index.js | 47 + dist/storage/index.d.ts | 6 + dist/storage/index.js | 187 ++++ dist/storage/platforms/index.d.ts | 2 + dist/storage/platforms/index.js | 7 + dist/storage/platforms/index.native.d.ts | 2 + dist/storage/platforms/index.native.js | 7 + dist/storage/providers/IDBKeyValProvider.d.ts | 3 + dist/storage/providers/IDBKeyValProvider.js | 91 ++ .../storage/providers/MemoryOnlyProvider.d.ts | 9 + dist/storage/providers/MemoryOnlyProvider.js | 120 +++ dist/storage/providers/NoopProvider.d.ts | 3 + dist/storage/providers/NoopProvider.js | 83 ++ dist/storage/providers/SQLiteProvider.d.ts | 3 + dist/storage/providers/SQLiteProvider.js | 101 ++ dist/storage/providers/types.d.ts | 72 ++ dist/storage/providers/types.js | 2 + dist/types.d.ts | 375 +++++++ dist/types.js | 2 + dist/useLiveRef.d.ts | 7 + dist/useLiveRef.js | 13 + dist/useOnyx.d.ts | 41 + dist/useOnyx.js | 126 +++ dist/usePrevious.d.ts | 5 + dist/usePrevious.js | 14 + dist/utils.d.ts | 49 + dist/utils.js | 175 ++++ dist/withOnyx.d.ts | 141 +++ dist/withOnyx.js | 325 ++++++ dist/withOnyx/index.d.ts | 15 + dist/withOnyx/index.js | 322 ++++++ dist/withOnyx/types.d.ts | 141 +++ dist/withOnyx/types.js | 2 + lib/Onyx.ts | 20 +- 63 files changed, 5327 insertions(+), 10 deletions(-) create mode 100644 dist/DevTools.d.ts create mode 100644 dist/DevTools.js create mode 100644 dist/Logger.d.ts create mode 100644 dist/Logger.js create mode 100644 dist/Onyx.d.ts create mode 100644 dist/Onyx.js create mode 100644 dist/OnyxCache.d.ts create mode 100644 dist/OnyxCache.js create mode 100644 dist/OnyxUtils.d.ts create mode 100644 dist/OnyxUtils.js create mode 100644 dist/PerformanceUtils.d.ts create mode 100644 dist/PerformanceUtils.js create mode 100644 dist/Str.d.ts create mode 100644 dist/Str.js create mode 100644 dist/batch.d.ts create mode 100644 dist/batch.js create mode 100644 dist/batch.native.d.ts create mode 100644 dist/batch.native.js create mode 100644 dist/createDeferredTask.d.ts create mode 100644 dist/createDeferredTask.js create mode 100644 dist/index.d.ts create mode 100644 dist/index.js create mode 100644 dist/logMessages.d.ts create mode 100644 dist/logMessages.js create mode 100644 dist/storage/InstanceSync/index.d.ts create mode 100644 dist/storage/InstanceSync/index.js create mode 100644 dist/storage/InstanceSync/index.web.d.ts create mode 100644 dist/storage/InstanceSync/index.web.js create mode 100644 dist/storage/__mocks__/index.d.ts create mode 100644 dist/storage/__mocks__/index.js create mode 100644 dist/storage/index.d.ts create mode 100644 dist/storage/index.js create mode 100644 dist/storage/platforms/index.d.ts create mode 100644 dist/storage/platforms/index.js create mode 100644 dist/storage/platforms/index.native.d.ts create mode 100644 dist/storage/platforms/index.native.js create mode 100644 dist/storage/providers/IDBKeyValProvider.d.ts create mode 100644 dist/storage/providers/IDBKeyValProvider.js create mode 100644 dist/storage/providers/MemoryOnlyProvider.d.ts create mode 100644 dist/storage/providers/MemoryOnlyProvider.js create mode 100644 dist/storage/providers/NoopProvider.d.ts create mode 100644 dist/storage/providers/NoopProvider.js create mode 100644 dist/storage/providers/SQLiteProvider.d.ts create mode 100644 dist/storage/providers/SQLiteProvider.js create mode 100644 dist/storage/providers/types.d.ts create mode 100644 dist/storage/providers/types.js create mode 100644 dist/types.d.ts create mode 100644 dist/types.js create mode 100644 dist/useLiveRef.d.ts create mode 100644 dist/useLiveRef.js create mode 100644 dist/useOnyx.d.ts create mode 100644 dist/useOnyx.js create mode 100644 dist/usePrevious.d.ts create mode 100644 dist/usePrevious.js create mode 100644 dist/utils.d.ts create mode 100644 dist/utils.js create mode 100644 dist/withOnyx.d.ts create mode 100644 dist/withOnyx.js create mode 100644 dist/withOnyx/index.d.ts create mode 100644 dist/withOnyx/index.js create mode 100644 dist/withOnyx/types.d.ts create mode 100644 dist/withOnyx/types.js diff --git a/dist/DevTools.d.ts b/dist/DevTools.d.ts new file mode 100644 index 00000000..b54d9083 --- /dev/null +++ b/dist/DevTools.d.ts @@ -0,0 +1,42 @@ +type DevtoolsOptions = { + maxAge?: number; + name?: string; + postTimelineUpdate?: () => void; + preAction?: () => void; + logTrace?: boolean; + remote?: boolean; +}; +type DevtoolsSubscriber = (message: { + type: string; + payload: unknown; + state: string; +}) => void; +type DevtoolsConnection = { + send(data: Record, state: Record): void; + init(state: Record): void; + unsubscribe(): void; + subscribe(cb: DevtoolsSubscriber): () => void; +}; +declare class DevTools { + private remoteDev?; + private state; + private defaultState; + constructor(); + connectViaExtension(options?: DevtoolsOptions): DevtoolsConnection | undefined; + /** + * Registers an action that updated the current state of the storage + * + * @param type - name of the action + * @param payload - data written to the storage + * @param stateChanges - partial state that got updated after the changes + */ + registerAction(type: string, payload: unknown, stateChanges?: Record | null): void; + initState(initialState?: Record): void; + /** + * This clears the internal state of the DevTools, preserving the keys included in `keysToPreserve` + */ + clearState(keysToPreserve?: string[]): void; +} +declare const _default: DevTools; +export default _default; +export type { DevtoolsConnection }; diff --git a/dist/DevTools.js b/dist/DevTools.js new file mode 100644 index 00000000..0e5193aa --- /dev/null +++ b/dist/DevTools.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const ERROR_LABEL = 'Onyx DevTools - Error: '; +class DevTools { + constructor() { + this.remoteDev = this.connectViaExtension(); + this.state = {}; + this.defaultState = {}; + } + connectViaExtension(options) { + try { + // We don't want to augment the window type in a library code, so we use type assertion instead + // eslint-disable-next-line no-underscore-dangle, @typescript-eslint/no-explicit-any + const reduxDevtools = typeof window === 'undefined' ? undefined : window.__REDUX_DEVTOOLS_EXTENSION__; + if ((options === null || options === void 0 ? void 0 : options.remote) || !reduxDevtools) { + return; + } + return reduxDevtools.connect(options); + } + catch (e) { + console.error(ERROR_LABEL, e); + } + } + /** + * Registers an action that updated the current state of the storage + * + * @param type - name of the action + * @param payload - data written to the storage + * @param stateChanges - partial state that got updated after the changes + */ + registerAction(type, payload, stateChanges = {}) { + try { + if (!this.remoteDev) { + return; + } + const newState = Object.assign(Object.assign({}, this.state), stateChanges); + this.remoteDev.send({ type, payload }, newState); + this.state = newState; + } + catch (e) { + console.error(ERROR_LABEL, e); + } + } + initState(initialState = {}) { + try { + if (!this.remoteDev) { + return; + } + this.remoteDev.init(initialState); + this.state = initialState; + this.defaultState = initialState; + } + catch (e) { + console.error(ERROR_LABEL, e); + } + } + /** + * This clears the internal state of the DevTools, preserving the keys included in `keysToPreserve` + */ + clearState(keysToPreserve = []) { + const newState = Object.entries(this.state).reduce((obj, [key, value]) => { + // eslint-disable-next-line no-param-reassign + obj[key] = keysToPreserve.includes(key) ? value : this.defaultState[key]; + return obj; + }, {}); + this.registerAction('CLEAR', undefined, newState); + } +} +exports.default = new DevTools(); diff --git a/dist/Logger.d.ts b/dist/Logger.d.ts new file mode 100644 index 00000000..744868ba --- /dev/null +++ b/dist/Logger.d.ts @@ -0,0 +1,22 @@ +type LogData = { + message: string; + level: 'alert' | 'info' | 'hmmm'; +}; +type LoggerCallback = (data: LogData) => void; +/** + * Register the logging callback + */ +declare function registerLogger(callback: LoggerCallback): void; +/** + * Send an alert message to the logger + */ +declare function logAlert(message: string): void; +/** + * Send an info message to the logger + */ +declare function logInfo(message: string): void; +/** + * Send an hmmm message to the logger + */ +declare function logHmmm(message: string): void; +export { registerLogger, logInfo, logAlert, logHmmm }; diff --git a/dist/Logger.js b/dist/Logger.js new file mode 100644 index 00000000..509a1b1c --- /dev/null +++ b/dist/Logger.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.logHmmm = exports.logAlert = exports.logInfo = exports.registerLogger = void 0; +// eslint-disable-next-line @typescript-eslint/no-empty-function +let logger = () => { }; +/** + * Register the logging callback + */ +function registerLogger(callback) { + logger = callback; +} +exports.registerLogger = registerLogger; +/** + * Send an alert message to the logger + */ +function logAlert(message) { + logger({ message: `[Onyx] ${message}`, level: 'alert' }); +} +exports.logAlert = logAlert; +/** + * Send an info message to the logger + */ +function logInfo(message) { + logger({ message: `[Onyx] ${message}`, level: 'info' }); +} +exports.logInfo = logInfo; +/** + * Send an hmmm message to the logger + */ +function logHmmm(message) { + logger({ message: `[Onyx] ${message}`, level: 'hmmm' }); +} +exports.logHmmm = logHmmm; diff --git a/dist/Onyx.d.ts b/dist/Onyx.d.ts new file mode 100644 index 00000000..c44c6b7a --- /dev/null +++ b/dist/Onyx.d.ts @@ -0,0 +1,137 @@ +import * as Logger from './Logger'; +import type { CollectionKeyBase, ConnectOptions, InitOptions, Mapping, OnyxKey, OnyxMergeCollectionInput, OnyxMergeInput, OnyxMultiSetInput, OnyxSetInput, OnyxUpdate } from './types'; +/** Initialize the store with actions and listening for storage events */ +declare function init({ keys, initialKeyStates, safeEvictionKeys, maxCachedKeysCount, shouldSyncMultipleInstances, debugSetState, }: InitOptions): void; +/** + * Subscribes a react component's state directly to a store key + * + * @example + * const connectionID = Onyx.connect({ + * key: ONYXKEYS.SESSION, + * callback: onSessionChange, + * }); + * + * @param mapping the mapping information to connect Onyx to the components state + * @param mapping.key ONYXKEY to subscribe to + * @param [mapping.statePropertyName] the name of the property in the state to connect the data to + * @param [mapping.withOnyxInstance] whose setState() method will be called with any changed data + * This is used by React components to connect to Onyx + * @param [mapping.callback] a method that will be called with changed data + * This is used by any non-React code to connect to Onyx + * @param [mapping.initWithStoredValues] If set to false, then no data will be prefilled into the + * component + * @param [mapping.waitForCollectionCallback] If set to true, it will return the entire collection to the callback as a single object + * @param [mapping.selector] THIS PARAM IS ONLY USED WITH withOnyx(). If included, this will be used to subscribe to a subset of an Onyx key's data. + * The sourceData and withOnyx state are passed to the selector and should return the simplified data. Using this setting on `withOnyx` can have very positive + * performance benefits because the component will only re-render when the subset of data changes. Otherwise, any change of data on any property would normally + * cause the component to re-render (and that can be expensive from a performance standpoint). + * @param [mapping.initialValue] THIS PARAM IS ONLY USED WITH withOnyx(). + * If included, this will be passed to the component so that something can be rendered while data is being fetched from the DB. + * Note that it will not cause the component to have the loading prop set to true. + * @returns an ID to use when calling disconnect + */ +declare function connect(connectOptions: ConnectOptions): number; +/** + * Remove the listener for a react component + * @example + * Onyx.disconnect(connectionID); + * + * @param connectionID unique id returned by call to Onyx.connect() + */ +declare function disconnect(connectionID: number, keyToRemoveFromEvictionBlocklist?: OnyxKey): void; +/** + * Write a value to our store with the given key + * + * @param key ONYXKEY to set + * @param value value to store + */ +declare function set(key: TKey, value: OnyxSetInput): Promise; +/** + * Sets multiple keys and values + * + * @example Onyx.multiSet({'key1': 'a', 'key2': 'b'}); + * + * @param data object keyed by ONYXKEYS and the values to set + */ +declare function multiSet(data: OnyxMultiSetInput): Promise; +/** + * Merge a new value into an existing value at a key. + * + * The types of values that can be merged are `Object` and `Array`. To set another type of value use `Onyx.set()`. + * Values of type `Object` get merged with the old value, whilst for `Array`'s we simply replace the current value with the new one. + * + * Calls to `Onyx.merge()` are batched so that any calls performed in a single tick will stack in a queue and get + * applied in the order they were called. Note: `Onyx.set()` calls do not work this way so use caution when mixing + * `Onyx.merge()` and `Onyx.set()`. + * + * @example + * Onyx.merge(ONYXKEYS.EMPLOYEE_LIST, ['Joe']); // -> ['Joe'] + * Onyx.merge(ONYXKEYS.EMPLOYEE_LIST, ['Jack']); // -> ['Joe', 'Jack'] + * Onyx.merge(ONYXKEYS.POLICY, {id: 1}); // -> {id: 1} + * Onyx.merge(ONYXKEYS.POLICY, {name: 'My Workspace'}); // -> {id: 1, name: 'My Workspace'} + */ +declare function merge(key: TKey, changes: OnyxMergeInput): Promise; +/** + * Merges a collection based on their keys + * + * @example + * + * Onyx.mergeCollection(ONYXKEYS.COLLECTION.REPORT, { + * [`${ONYXKEYS.COLLECTION.REPORT}1`]: report1, + * [`${ONYXKEYS.COLLECTION.REPORT}2`]: report2, + * }); + * + * @param collectionKey e.g. `ONYXKEYS.COLLECTION.REPORT` + * @param collection Object collection keyed by individual collection member keys and values + */ +declare function mergeCollection(collectionKey: TKey, collection: OnyxMergeCollectionInput): Promise; +/** + * Clear out all the data in the store + * + * Note that calling Onyx.clear() and then Onyx.set() on a key with a default + * key state may store an unexpected value in Storage. + * + * E.g. + * Onyx.clear(); + * Onyx.set(ONYXKEYS.DEFAULT_KEY, 'default'); + * Storage.getItem(ONYXKEYS.DEFAULT_KEY) + * .then((storedValue) => console.log(storedValue)); + * null is logged instead of the expected 'default' + * + * Onyx.set() might call Storage.setItem() before Onyx.clear() calls + * Storage.setItem(). Use Onyx.merge() instead if possible. Onyx.merge() calls + * Onyx.get(key) before calling Storage.setItem() via Onyx.set(). + * Storage.setItem() from Onyx.clear() will have already finished and the merged + * value will be saved to storage after the default value. + * + * @param keysToPreserve is a list of ONYXKEYS that should not be cleared with the rest of the data + */ +declare function clear(keysToPreserve?: OnyxKey[]): Promise; +/** + * Insert API responses and lifecycle data into Onyx + * + * @param data An array of objects with update expressions + * @returns resolves when all operations are complete + */ +declare function update(data: OnyxUpdate[]): Promise; +declare const Onyx: { + readonly METHOD: { + readonly SET: "set"; + readonly MERGE: "merge"; + readonly MERGE_COLLECTION: "mergecollection"; + readonly MULTI_SET: "multiset"; + readonly CLEAR: "clear"; + }; + readonly connect: typeof connect; + readonly disconnect: typeof disconnect; + readonly set: typeof set; + readonly multiSet: typeof multiSet; + readonly merge: typeof merge; + readonly mergeCollection: typeof mergeCollection; + readonly update: typeof update; + readonly clear: typeof clear; + readonly init: typeof init; + readonly registerLogger: typeof Logger.registerLogger; +}; +export default Onyx; +export type { OnyxUpdate, Mapping, ConnectOptions }; diff --git a/dist/Onyx.js b/dist/Onyx.js new file mode 100644 index 00000000..bce9c27e --- /dev/null +++ b/dist/Onyx.js @@ -0,0 +1,697 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* eslint-disable no-continue */ +const underscore_1 = __importDefault(require("underscore")); +const pick_1 = __importDefault(require("lodash/pick")); +const Logger = __importStar(require("./Logger")); +const OnyxCache_1 = __importDefault(require("./OnyxCache")); +const createDeferredTask_1 = __importDefault(require("./createDeferredTask")); +const PerformanceUtils = __importStar(require("./PerformanceUtils")); +const storage_1 = __importDefault(require("./storage")); +const utils_1 = __importDefault(require("./utils")); +const DevTools_1 = __importDefault(require("./DevTools")); +const OnyxUtils_1 = __importDefault(require("./OnyxUtils")); +const logMessages_1 = __importDefault(require("./logMessages")); +// Keeps track of the last connectionID that was used so we can keep incrementing it +let lastConnectionID = 0; +// Connections can be made before `Onyx.init`. They would wait for this task before resolving +const deferredInitTask = (0, createDeferredTask_1.default)(); +/** Initialize the store with actions and listening for storage events */ +function init({ keys = {}, initialKeyStates = {}, safeEvictionKeys = [], maxCachedKeysCount = 1000, shouldSyncMultipleInstances = Boolean(global.localStorage), debugSetState = false, }) { + var _a; + storage_1.default.init(); + if (shouldSyncMultipleInstances) { + (_a = storage_1.default.keepInstancesSync) === null || _a === void 0 ? void 0 : _a.call(storage_1.default, (key, value) => { + const prevValue = OnyxCache_1.default.get(key, false); + OnyxCache_1.default.set(key, value); + OnyxUtils_1.default.keyChanged(key, value, prevValue); + }); + } + if (debugSetState) { + PerformanceUtils.setShouldDebugSetState(true); + } + if (maxCachedKeysCount > 0) { + OnyxCache_1.default.setRecentKeysLimit(maxCachedKeysCount); + } + OnyxUtils_1.default.initStoreValues(keys, initialKeyStates, safeEvictionKeys); + // Initialize all of our keys with data provided then give green light to any pending connections + Promise.all([OnyxUtils_1.default.addAllSafeEvictionKeysToRecentlyAccessedList(), OnyxUtils_1.default.initializeWithDefaultKeyStates()]).then(deferredInitTask.resolve); +} +/** + * Subscribes a react component's state directly to a store key + * + * @example + * const connectionID = Onyx.connect({ + * key: ONYXKEYS.SESSION, + * callback: onSessionChange, + * }); + * + * @param mapping the mapping information to connect Onyx to the components state + * @param mapping.key ONYXKEY to subscribe to + * @param [mapping.statePropertyName] the name of the property in the state to connect the data to + * @param [mapping.withOnyxInstance] whose setState() method will be called with any changed data + * This is used by React components to connect to Onyx + * @param [mapping.callback] a method that will be called with changed data + * This is used by any non-React code to connect to Onyx + * @param [mapping.initWithStoredValues] If set to false, then no data will be prefilled into the + * component + * @param [mapping.waitForCollectionCallback] If set to true, it will return the entire collection to the callback as a single object + * @param [mapping.selector] THIS PARAM IS ONLY USED WITH withOnyx(). If included, this will be used to subscribe to a subset of an Onyx key's data. + * The sourceData and withOnyx state are passed to the selector and should return the simplified data. Using this setting on `withOnyx` can have very positive + * performance benefits because the component will only re-render when the subset of data changes. Otherwise, any change of data on any property would normally + * cause the component to re-render (and that can be expensive from a performance standpoint). + * @param [mapping.initialValue] THIS PARAM IS ONLY USED WITH withOnyx(). + * If included, this will be passed to the component so that something can be rendered while data is being fetched from the DB. + * Note that it will not cause the component to have the loading prop set to true. + * @returns an ID to use when calling disconnect + */ +function connect(connectOptions) { + const mapping = connectOptions; + const connectionID = lastConnectionID++; + const callbackToStateMapping = OnyxUtils_1.default.getCallbackToStateMapping(); + callbackToStateMapping[connectionID] = mapping; + callbackToStateMapping[connectionID].connectionID = connectionID; + if (mapping.initWithStoredValues === false) { + return connectionID; + } + // Commit connection only after init passes + deferredInitTask.promise + .then(() => OnyxUtils_1.default.addKeyToRecentlyAccessedIfNeeded(mapping)) + .then(() => { + // Performance improvement + // If the mapping is connected to an onyx key that is not a collection + // we can skip the call to getAllKeys() and return an array with a single item + if (Boolean(mapping.key) && typeof mapping.key === 'string' && !mapping.key.endsWith('_') && OnyxCache_1.default.getAllKeys().has(mapping.key)) { + return new Set([mapping.key]); + } + return OnyxUtils_1.default.getAllKeys(); + }) + .then((keys) => { + // We search all the keys in storage to see if any are a "match" for the subscriber we are connecting so that we + // can send data back to the subscriber. Note that multiple keys can match as a subscriber could either be + // subscribed to a "collection key" or a single key. + const matchingKeys = Array.from(keys).filter((key) => OnyxUtils_1.default.isKeyMatch(mapping.key, key)); + // If the key being connected to does not exist we initialize the value with null. For subscribers that connected + // directly via connect() they will simply get a null value sent to them without any information about which key matched + // since there are none matched. In withOnyx() we wait for all connected keys to return a value before rendering the child + // component. This null value will be filtered out so that the connected component can utilize defaultProps. + if (matchingKeys.length === 0) { + if (mapping.key && !OnyxUtils_1.default.isCollectionKey(mapping.key)) { + OnyxCache_1.default.addNullishStorageKey(mapping.key); + } + // Here we cannot use batching because the nullish value is expected to be set immediately for default props + // or they will be undefined. + OnyxUtils_1.default.sendDataToConnection(mapping, null, undefined, false); + return; + } + // When using a callback subscriber we will either trigger the provided callback for each key we find or combine all values + // into an object and just make a single call. The latter behavior is enabled by providing a waitForCollectionCallback key + // combined with a subscription to a collection key. + if (typeof mapping.callback === 'function') { + if (OnyxUtils_1.default.isCollectionKey(mapping.key)) { + if (mapping.waitForCollectionCallback) { + OnyxUtils_1.default.getCollectionDataAndSendAsObject(matchingKeys, mapping); + return; + } + // We did not opt into using waitForCollectionCallback mode so the callback is called for every matching key. + // eslint-disable-next-line @typescript-eslint/prefer-for-of + for (let i = 0; i < matchingKeys.length; i++) { + OnyxUtils_1.default.get(matchingKeys[i]).then((val) => OnyxUtils_1.default.sendDataToConnection(mapping, val, matchingKeys[i], true)); + } + return; + } + // If we are not subscribed to a collection key then there's only a single key to send an update for. + OnyxUtils_1.default.get(mapping.key).then((val) => OnyxUtils_1.default.sendDataToConnection(mapping, val, mapping.key, true)); + return; + } + // If we have a withOnyxInstance that means a React component has subscribed via the withOnyx() HOC and we need to + // group collection key member data into an object. + if ('withOnyxInstance' in mapping && mapping.withOnyxInstance) { + if (OnyxUtils_1.default.isCollectionKey(mapping.key)) { + OnyxUtils_1.default.getCollectionDataAndSendAsObject(matchingKeys, mapping); + return; + } + // If the subscriber is not using a collection key then we just send a single value back to the subscriber + OnyxUtils_1.default.get(mapping.key).then((val) => OnyxUtils_1.default.sendDataToConnection(mapping, val, mapping.key, true)); + return; + } + console.error('Warning: Onyx.connect() was found without a callback or withOnyxInstance'); + }); + // The connectionID is returned back to the caller so that it can be used to clean up the connection when it's no longer needed + // by calling Onyx.disconnect(connectionID). + return connectionID; +} +/** + * Remove the listener for a react component + * @example + * Onyx.disconnect(connectionID); + * + * @param connectionID unique id returned by call to Onyx.connect() + */ +function disconnect(connectionID, keyToRemoveFromEvictionBlocklist) { + const callbackToStateMapping = OnyxUtils_1.default.getCallbackToStateMapping(); + if (!callbackToStateMapping[connectionID]) { + return; + } + // Remove this key from the eviction block list as we are no longer + // subscribing to it and it should be safe to delete again + if (keyToRemoveFromEvictionBlocklist) { + OnyxUtils_1.default.removeFromEvictionBlockList(keyToRemoveFromEvictionBlocklist, connectionID); + } + delete callbackToStateMapping[connectionID]; +} +/** + * Write a value to our store with the given key + * + * @param key ONYXKEY to set + * @param value value to store + */ +function set(key, value) { + // When we use Onyx.set to set a key we want to clear the current delta changes from Onyx.merge that were queued + // before the value was set. If Onyx.merge is currently reading the old value from storage, it will then not apply the changes. + if (OnyxUtils_1.default.hasPendingMergeForKey(key)) { + delete OnyxUtils_1.default.getMergeQueue()[key]; + } + // Onyx.set will ignore `undefined` values as inputs, therefore we can return early. + if (value === undefined) { + return Promise.resolve(); + } + const existingValue = OnyxCache_1.default.get(key, false); + // If the existing value as well as the new value are null, we can return early. + if (existingValue === undefined && value === null) { + return Promise.resolve(); + } + // Check if the value is compatible with the existing value in the storage + const { isCompatible, existingValueType, newValueType } = utils_1.default.checkCompatibilityWithExistingValue(value, existingValue); + if (!isCompatible) { + Logger.logAlert(logMessages_1.default.incompatibleUpdateAlert(key, 'set', existingValueType, newValueType)); + return Promise.resolve(); + } + // If the value is null, we remove the key from storage + const { value: valueAfterRemoving, wasRemoved } = OnyxUtils_1.default.removeNullValues(key, value); + const logSetCall = (hasChanged = true) => { + // Logging properties only since values could be sensitive things we don't want to log + Logger.logInfo(`set called for key: ${key}${underscore_1.default.isObject(value) ? ` properties: ${underscore_1.default.keys(value).join(',')}` : ''} hasChanged: ${hasChanged}`); + }; + // Calling "OnyxUtils.removeNullValues" removes the key from storage and cache and updates the subscriber. + // Therefore, we don't need to further broadcast and update the value so we can return early. + if (wasRemoved) { + logSetCall(); + return Promise.resolve(); + } + const valueWithoutNullValues = valueAfterRemoving; + const hasChanged = OnyxCache_1.default.hasValueChanged(key, valueWithoutNullValues); + logSetCall(hasChanged); + // This approach prioritizes fast UI changes without waiting for data to be stored in device storage. + const updatePromise = OnyxUtils_1.default.broadcastUpdate(key, valueWithoutNullValues, hasChanged); + // If the value has not changed or the key got removed, calling Storage.setItem() would be redundant and a waste of performance, so return early instead. + if (!hasChanged) { + return updatePromise; + } + return storage_1.default.setItem(key, valueWithoutNullValues) + .catch((error) => OnyxUtils_1.default.evictStorageAndRetry(error, set, key, valueWithoutNullValues)) + .then(() => { + OnyxUtils_1.default.sendActionToDevTools(OnyxUtils_1.default.METHOD.SET, key, valueWithoutNullValues); + return updatePromise; + }); +} +/** + * Sets multiple keys and values + * + * @example Onyx.multiSet({'key1': 'a', 'key2': 'b'}); + * + * @param data object keyed by ONYXKEYS and the values to set + */ +function multiSet(data) { + const keyValuePairsToSet = OnyxUtils_1.default.prepareKeyValuePairsForStorage(data, true); + const updatePromises = keyValuePairsToSet.map(([key, value]) => { + const prevValue = OnyxCache_1.default.get(key, false); + // Update cache and optimistically inform subscribers on the next tick + OnyxCache_1.default.set(key, value); + return OnyxUtils_1.default.scheduleSubscriberUpdate(key, value, prevValue); + }); + return storage_1.default.multiSet(keyValuePairsToSet) + .catch((error) => OnyxUtils_1.default.evictStorageAndRetry(error, multiSet, data)) + .then(() => { + OnyxUtils_1.default.sendActionToDevTools(OnyxUtils_1.default.METHOD.MULTI_SET, undefined, data); + return Promise.all(updatePromises); + }) + .then(() => undefined); +} +/** + * Merge a new value into an existing value at a key. + * + * The types of values that can be merged are `Object` and `Array`. To set another type of value use `Onyx.set()`. + * Values of type `Object` get merged with the old value, whilst for `Array`'s we simply replace the current value with the new one. + * + * Calls to `Onyx.merge()` are batched so that any calls performed in a single tick will stack in a queue and get + * applied in the order they were called. Note: `Onyx.set()` calls do not work this way so use caution when mixing + * `Onyx.merge()` and `Onyx.set()`. + * + * @example + * Onyx.merge(ONYXKEYS.EMPLOYEE_LIST, ['Joe']); // -> ['Joe'] + * Onyx.merge(ONYXKEYS.EMPLOYEE_LIST, ['Jack']); // -> ['Joe', 'Jack'] + * Onyx.merge(ONYXKEYS.POLICY, {id: 1}); // -> {id: 1} + * Onyx.merge(ONYXKEYS.POLICY, {name: 'My Workspace'}); // -> {id: 1, name: 'My Workspace'} + */ +function merge(key, changes) { + const mergeQueue = OnyxUtils_1.default.getMergeQueue(); + const mergeQueuePromise = OnyxUtils_1.default.getMergeQueuePromise(); + // Top-level undefined values are ignored + // Therefore, we need to prevent adding them to the merge queue + if (changes === undefined) { + return mergeQueue[key] ? mergeQueuePromise[key] : Promise.resolve(); + } + // Merge attempts are batched together. The delta should be applied after a single call to get() to prevent a race condition. + // Using the initial value from storage in subsequent merge attempts will lead to an incorrect final merged value. + if (mergeQueue[key]) { + mergeQueue[key].push(changes); + return mergeQueuePromise[key]; + } + mergeQueue[key] = [changes]; + mergeQueuePromise[key] = OnyxUtils_1.default.get(key).then((existingValue) => { + // Calls to Onyx.set after a merge will terminate the current merge process and clear the merge queue + if (mergeQueue[key] == null) { + return Promise.resolve(); + } + try { + // We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge) + // We don't want to remove null values from the "batchedDeltaChanges", because SQLite uses them to remove keys from storage natively. + const validChanges = mergeQueue[key].filter((change) => { + const { isCompatible, existingValueType, newValueType } = utils_1.default.checkCompatibilityWithExistingValue(change, existingValue); + if (!isCompatible) { + Logger.logAlert(logMessages_1.default.incompatibleUpdateAlert(key, 'merge', existingValueType, newValueType)); + } + return isCompatible; + }); + if (!validChanges.length) { + return Promise.resolve(); + } + const batchedDeltaChanges = OnyxUtils_1.default.applyMerge(undefined, validChanges, false); + // Case (1): When there is no existing value in storage, we want to set the value instead of merge it. + // Case (2): The presence of a top-level `null` in the merge queue instructs us to drop the whole existing value. + // In this case, we can't simply merge the batched changes with the existing value, because then the null in the merge queue would have no effect + const shouldSetValue = !existingValue || mergeQueue[key].includes(null); + // Clean up the write queue, so we don't apply these changes again + delete mergeQueue[key]; + delete mergeQueuePromise[key]; + const logMergeCall = (hasChanged = true) => { + // Logging properties only since values could be sensitive things we don't want to log + Logger.logInfo(`merge called for key: ${key}${underscore_1.default.isObject(batchedDeltaChanges) ? ` properties: ${underscore_1.default.keys(batchedDeltaChanges).join(',')}` : ''} hasChanged: ${hasChanged}`); + }; + // If the batched changes equal null, we want to remove the key from storage, to reduce storage size + const { wasRemoved } = OnyxUtils_1.default.removeNullValues(key, batchedDeltaChanges); + // Calling "OnyxUtils.removeNullValues" removes the key from storage and cache and updates the subscriber. + // Therefore, we don't need to further broadcast and update the value so we can return early. + if (wasRemoved) { + logMergeCall(); + return Promise.resolve(); + } + // For providers that can't handle delta changes, we need to merge the batched changes with the existing value beforehand. + // The "preMergedValue" will be directly "set" in storage instead of being merged + // Therefore we merge the batched changes with the existing value to get the final merged value that will be stored. + // We can remove null values from the "preMergedValue", because "null" implicates that the user wants to remove a value from storage. + const preMergedValue = OnyxUtils_1.default.applyMerge(shouldSetValue ? undefined : existingValue, [batchedDeltaChanges], true); + // In cache, we don't want to remove the key if it's null to improve performance and speed up the next merge. + const hasChanged = OnyxCache_1.default.hasValueChanged(key, preMergedValue); + logMergeCall(hasChanged); + // This approach prioritizes fast UI changes without waiting for data to be stored in device storage. + const updatePromise = OnyxUtils_1.default.broadcastUpdate(key, preMergedValue, hasChanged); + // If the value has not changed, calling Storage.setItem() would be redundant and a waste of performance, so return early instead. + if (!hasChanged) { + return updatePromise; + } + return storage_1.default.mergeItem(key, batchedDeltaChanges, preMergedValue, shouldSetValue).then(() => { + OnyxUtils_1.default.sendActionToDevTools(OnyxUtils_1.default.METHOD.MERGE, key, changes, preMergedValue); + return updatePromise; + }); + } + catch (error) { + Logger.logAlert(`An error occurred while applying merge for key: ${key}, Error: ${error}`); + return Promise.resolve(); + } + }); + return mergeQueuePromise[key]; +} +/** + * Merges a collection based on their keys + * + * @example + * + * Onyx.mergeCollection(ONYXKEYS.COLLECTION.REPORT, { + * [`${ONYXKEYS.COLLECTION.REPORT}1`]: report1, + * [`${ONYXKEYS.COLLECTION.REPORT}2`]: report2, + * }); + * + * @param collectionKey e.g. `ONYXKEYS.COLLECTION.REPORT` + * @param collection Object collection keyed by individual collection member keys and values + */ +function mergeCollection(collectionKey, collection) { + // Gracefully handle bad mergeCollection updates, so it doesn't block the merge queue + if (!OnyxUtils_1.default.isValidMergeCollection(collectionKey, collection)) { + return Promise.resolve(); + } + const mergedCollection = collection; + return OnyxUtils_1.default.getAllKeys() + .then((persistedKeys) => { + // Split to keys that exist in storage and keys that don't + const keys = Object.keys(mergedCollection).filter((key) => { + if (mergedCollection[key] === null) { + OnyxUtils_1.default.remove(key); + return false; + } + return true; + }); + const existingKeys = keys.filter((key) => persistedKeys.has(key)); + const cachedCollectionForExistingKeys = OnyxUtils_1.default.getCachedCollection(collectionKey, existingKeys); + const newKeys = keys.filter((key) => !persistedKeys.has(key)); + const existingKeyCollection = existingKeys.reduce((obj, key) => { + const { isCompatible, existingValueType, newValueType } = utils_1.default.checkCompatibilityWithExistingValue(mergedCollection[key], cachedCollectionForExistingKeys[key]); + if (!isCompatible) { + Logger.logAlert(logMessages_1.default.incompatibleUpdateAlert(key, 'mergeCollection', existingValueType, newValueType)); + return obj; + } + // eslint-disable-next-line no-param-reassign + obj[key] = mergedCollection[key]; + return obj; + }, {}); + const newCollection = newKeys.reduce((obj, key) => { + // eslint-disable-next-line no-param-reassign + obj[key] = mergedCollection[key]; + return obj; + }, {}); + // When (multi-)merging the values with the existing values in storage, + // we don't want to remove nested null values from the data that we pass to the storage layer, + // because the storage layer uses them to remove nested keys from storage natively. + const keyValuePairsForExistingCollection = OnyxUtils_1.default.prepareKeyValuePairsForStorage(existingKeyCollection, false); + // We can safely remove nested null values when using (multi-)set, + // because we will simply overwrite the existing values in storage. + const keyValuePairsForNewCollection = OnyxUtils_1.default.prepareKeyValuePairsForStorage(newCollection, true); + const promises = []; + // We need to get the previously existing values so we can compare the new ones + // against them, to avoid unnecessary subscriber updates. + const previousCollectionPromise = Promise.all(existingKeys.map((key) => OnyxUtils_1.default.get(key).then((value) => [key, value]))).then(Object.fromEntries); + // New keys will be added via multiSet while existing keys will be updated using multiMerge + // This is because setting a key that doesn't exist yet with multiMerge will throw errors + if (keyValuePairsForExistingCollection.length > 0) { + promises.push(storage_1.default.multiMerge(keyValuePairsForExistingCollection)); + } + if (keyValuePairsForNewCollection.length > 0) { + promises.push(storage_1.default.multiSet(keyValuePairsForNewCollection)); + } + // finalMergedCollection contains all the keys that were merged, without the keys of incompatible updates + const finalMergedCollection = Object.assign(Object.assign({}, existingKeyCollection), newCollection); + // Prefill cache if necessary by calling get() on any existing keys and then merge original data to cache + // and update all subscribers + const promiseUpdate = previousCollectionPromise.then((previousCollection) => { + OnyxCache_1.default.merge(finalMergedCollection); + return OnyxUtils_1.default.scheduleNotifyCollectionSubscribers(collectionKey, finalMergedCollection, previousCollection); + }); + return Promise.all(promises) + .catch((error) => OnyxUtils_1.default.evictStorageAndRetry(error, mergeCollection, collectionKey, mergedCollection)) + .then(() => { + OnyxUtils_1.default.sendActionToDevTools(OnyxUtils_1.default.METHOD.MERGE_COLLECTION, undefined, mergedCollection); + return promiseUpdate; + }); + }) + .then(() => undefined); +} +/** + * Clear out all the data in the store + * + * Note that calling Onyx.clear() and then Onyx.set() on a key with a default + * key state may store an unexpected value in Storage. + * + * E.g. + * Onyx.clear(); + * Onyx.set(ONYXKEYS.DEFAULT_KEY, 'default'); + * Storage.getItem(ONYXKEYS.DEFAULT_KEY) + * .then((storedValue) => console.log(storedValue)); + * null is logged instead of the expected 'default' + * + * Onyx.set() might call Storage.setItem() before Onyx.clear() calls + * Storage.setItem(). Use Onyx.merge() instead if possible. Onyx.merge() calls + * Onyx.get(key) before calling Storage.setItem() via Onyx.set(). + * Storage.setItem() from Onyx.clear() will have already finished and the merged + * value will be saved to storage after the default value. + * + * @param keysToPreserve is a list of ONYXKEYS that should not be cleared with the rest of the data + */ +function clear(keysToPreserve = []) { + return OnyxUtils_1.default.getAllKeys() + .then((keys) => { + OnyxCache_1.default.clearNullishStorageKeys(); + const keysToBeClearedFromStorage = []; + const keyValuesToResetAsCollection = {}; + const keyValuesToResetIndividually = {}; + // The only keys that should not be cleared are: + // 1. Anything specifically passed in keysToPreserve (because some keys like language preferences, offline + // status, or activeClients need to remain in Onyx even when signed out) + // 2. Any keys with a default state (because they need to remain in Onyx as their default, and setting them + // to null would cause unknown behavior) + keys.forEach((key) => { + var _a; + const isKeyToPreserve = keysToPreserve.includes(key); + const defaultKeyStates = OnyxUtils_1.default.getDefaultKeyStates(); + const isDefaultKey = key in defaultKeyStates; + // If the key is being removed or reset to default: + // 1. Update it in the cache + // 2. Figure out whether it is a collection key or not, + // since collection key subscribers need to be updated differently + if (!isKeyToPreserve) { + const oldValue = OnyxCache_1.default.get(key); + const newValue = (_a = defaultKeyStates[key]) !== null && _a !== void 0 ? _a : null; + if (newValue !== oldValue) { + OnyxCache_1.default.set(key, newValue); + const collectionKey = key.substring(0, key.indexOf('_') + 1); + if (collectionKey) { + if (!keyValuesToResetAsCollection[collectionKey]) { + keyValuesToResetAsCollection[collectionKey] = {}; + } + keyValuesToResetAsCollection[collectionKey][key] = newValue !== null && newValue !== void 0 ? newValue : undefined; + } + else { + keyValuesToResetIndividually[key] = newValue !== null && newValue !== void 0 ? newValue : undefined; + } + } + } + if (isKeyToPreserve || isDefaultKey) { + return; + } + // If it isn't preserved and doesn't have a default, we'll remove it + keysToBeClearedFromStorage.push(key); + }); + const updatePromises = []; + // Notify the subscribers for each key/value group so they can receive the new values + Object.entries(keyValuesToResetIndividually).forEach(([key, value]) => { + updatePromises.push(OnyxUtils_1.default.scheduleSubscriberUpdate(key, value, OnyxCache_1.default.get(key, false))); + }); + Object.entries(keyValuesToResetAsCollection).forEach(([key, value]) => { + updatePromises.push(OnyxUtils_1.default.scheduleNotifyCollectionSubscribers(key, value)); + }); + const defaultKeyStates = OnyxUtils_1.default.getDefaultKeyStates(); + const defaultKeyValuePairs = Object.entries(Object.keys(defaultKeyStates) + .filter((key) => !keysToPreserve.includes(key)) + .reduce((obj, key) => { + // eslint-disable-next-line no-param-reassign + obj[key] = defaultKeyStates[key]; + return obj; + }, {})); + // Remove only the items that we want cleared from storage, and reset others to default + keysToBeClearedFromStorage.forEach((key) => OnyxCache_1.default.drop(key)); + return storage_1.default.removeItems(keysToBeClearedFromStorage) + .then(() => storage_1.default.multiSet(defaultKeyValuePairs)) + .then(() => { + DevTools_1.default.clearState(keysToPreserve); + return Promise.all(updatePromises); + }); + }) + .then(() => undefined); +} +function updateSnapshots(data) { + const snapshotCollectionKey = OnyxUtils_1.default.getSnapshotKey(); + if (!snapshotCollectionKey) + return; + const promises = []; + const snapshotCollection = OnyxUtils_1.default.getCachedCollection(snapshotCollectionKey); + Object.entries(snapshotCollection).forEach(([snapshotKey, snapshotValue]) => { + // Snapshots may not be present in cache. We don't know how to update them so we skip. + if (!snapshotValue) { + return; + } + let updatedData = {}; + data.forEach(({ key, value }) => { + // snapshots are normal keys so we want to skip update if they are written to Onyx + if (OnyxUtils_1.default.isCollectionMemberKey(snapshotCollectionKey, key)) { + return; + } + if (typeof snapshotValue !== 'object' || !('data' in snapshotValue)) { + return; + } + const snapshotData = snapshotValue.data; + if (!snapshotData || !snapshotData[key]) { + return; + } + updatedData = Object.assign(Object.assign({}, updatedData), { [key]: (0, pick_1.default)(value, Object.keys(snapshotData[key])) }); + }); + promises.push(() => merge(snapshotKey, { data: updatedData })); + }); + return Promise.all(promises.map((p) => p())); +} +/** + * Insert API responses and lifecycle data into Onyx + * + * @param data An array of objects with update expressions + * @returns resolves when all operations are complete + */ +function update(data) { + // First, validate the Onyx object is in the format we expect + data.forEach(({ onyxMethod, key, value }) => { + if (![OnyxUtils_1.default.METHOD.CLEAR, OnyxUtils_1.default.METHOD.SET, OnyxUtils_1.default.METHOD.MERGE, OnyxUtils_1.default.METHOD.MERGE_COLLECTION, OnyxUtils_1.default.METHOD.MULTI_SET].includes(onyxMethod)) { + throw new Error(`Invalid onyxMethod ${onyxMethod} in Onyx update.`); + } + if (onyxMethod === OnyxUtils_1.default.METHOD.MULTI_SET) { + // For multiset, we just expect the value to be an object + if (typeof value !== 'object' || Array.isArray(value) || typeof value === 'function') { + throw new Error('Invalid value provided in Onyx multiSet. Onyx multiSet value must be of type object.'); + } + } + else if (onyxMethod !== OnyxUtils_1.default.METHOD.CLEAR && typeof key !== 'string') { + throw new Error(`Invalid ${typeof key} key provided in Onyx update. Onyx key must be of type string.`); + } + }); + // The queue of operations within a single `update` call in the format of . + // This allows us to batch the operations per item and merge them into one operation in the order they were requested. + const updateQueue = {}; + const enqueueSetOperation = (key, value) => { + // If a `set` operation is enqueued, we should clear the whole queue. + // Since the `set` operation replaces the value entirely, there's no need to perform any previous operations. + // To do this, we first put `null` in the queue, which removes the existing value, and then merge the new value. + updateQueue[key] = [null, value]; + }; + const enqueueMergeOperation = (key, value) => { + if (value === null) { + // If we merge `null`, the value is removed and all the previous operations are discarded. + updateQueue[key] = [null]; + } + else if (!updateQueue[key]) { + updateQueue[key] = [value]; + } + else { + updateQueue[key].push(value); + } + }; + const promises = []; + let clearPromise = Promise.resolve(); + data.forEach(({ onyxMethod, key, value }) => { + switch (onyxMethod) { + case OnyxUtils_1.default.METHOD.SET: + enqueueSetOperation(key, value); + break; + case OnyxUtils_1.default.METHOD.MERGE: + enqueueMergeOperation(key, value); + break; + case OnyxUtils_1.default.METHOD.MERGE_COLLECTION: + if (OnyxUtils_1.default.isValidMergeCollection(key, value)) { + Object.entries(value).forEach(([entryKey, entryValue]) => enqueueMergeOperation(entryKey, entryValue)); + } + break; + case OnyxUtils_1.default.METHOD.MULTI_SET: + Object.entries(value).forEach(([entryKey, entryValue]) => enqueueSetOperation(entryKey, entryValue)); + break; + case OnyxUtils_1.default.METHOD.CLEAR: + clearPromise = clear(); + break; + default: + break; + } + }); + // Group all the collection-related keys and update each collection in a single `mergeCollection` call. + // This is needed to prevent multiple `mergeCollection` calls for the same collection and `merge` calls for the individual items of the said collection. + // This way, we ensure there is no race condition in the queued updates of the same key. + OnyxUtils_1.default.getCollectionKeys().forEach((collectionKey) => { + const collectionItemKeys = Object.keys(updateQueue).filter((key) => OnyxUtils_1.default.isKeyMatch(collectionKey, key)); + if (collectionItemKeys.length <= 1) { + // If there are no items of this collection in the updateQueue, we should skip it. + // If there is only one item, we should update it individually, therefore retain it in the updateQueue. + return; + } + const batchedCollectionUpdates = collectionItemKeys.reduce((queue, key) => { + const operations = updateQueue[key]; + // Remove the collection-related key from the updateQueue so that it won't be processed individually. + delete updateQueue[key]; + const updatedValue = OnyxUtils_1.default.applyMerge(undefined, operations, false); + if (operations[0] === null) { + // eslint-disable-next-line no-param-reassign + queue.set[key] = updatedValue; + } + else { + // eslint-disable-next-line no-param-reassign + queue.merge[key] = updatedValue; + } + return queue; + }, { + merge: {}, + set: {}, + }); + if (!utils_1.default.isEmptyObject(batchedCollectionUpdates.merge)) { + promises.push(() => mergeCollection(collectionKey, batchedCollectionUpdates.merge)); + } + if (!utils_1.default.isEmptyObject(batchedCollectionUpdates.set)) { + promises.push(() => multiSet(batchedCollectionUpdates.set)); + } + }); + Object.entries(updateQueue).forEach(([key, operations]) => { + const batchedChanges = OnyxUtils_1.default.applyMerge(undefined, operations, false); + if (operations[0] === null) { + promises.push(() => set(key, batchedChanges)); + } + else { + promises.push(() => merge(key, batchedChanges)); + } + }); + return clearPromise + .then(() => Promise.all(promises.map((p) => p()))) + .then(() => updateSnapshots(data)) + .then(() => undefined); +} +const Onyx = { + METHOD: OnyxUtils_1.default.METHOD, + connect, + disconnect, + set, + multiSet, + merge, + mergeCollection, + update, + clear, + init, + registerLogger: Logger.registerLogger, +}; +exports.default = Onyx; diff --git a/dist/OnyxCache.d.ts b/dist/OnyxCache.d.ts new file mode 100644 index 00000000..f6d39cf3 --- /dev/null +++ b/dist/OnyxCache.d.ts @@ -0,0 +1,94 @@ +import type { OnyxKey, OnyxValue } from './types'; +/** + * In memory cache providing data by reference + * Encapsulates Onyx cache related functionality + */ +declare class OnyxCache { + /** Cache of all the storage keys available in persistent storage */ + private storageKeys; + /** A list of keys where a nullish value has been fetched from storage before, but the key still exists in cache */ + private nullishStorageKeys; + /** Unique list of keys maintained in access order (most recent at the end) */ + private recentKeys; + /** A map of cached values */ + private storageMap; + /** + * Captured pending tasks for already running storage methods + * Using a map yields better performance on operations such a delete + */ + private pendingPromises; + /** Maximum size of the keys store din cache */ + private maxRecentKeysSize; + constructor(); + /** Get all the storage keys */ + getAllKeys(): Set; + /** + * Allows to set all the keys at once. + * This is useful when we are getting + * all the keys from the storage provider + * and we want to keep the cache in sync. + * + * Previously, we had to call `addKey` in a loop + * to achieve the same result. + * + * @param keys - an array of keys + */ + setAllKeys(keys: OnyxKey[]): void; + /** Saves a key in the storage keys list + * Serves to keep the result of `getAllKeys` up to date + */ + addKey(key: OnyxKey): void; + /** Used to set keys that are null/undefined in storage without adding null to the storage map */ + addNullishStorageKey(key: OnyxKey): void; + /** Used to set keys that are null/undefined in storage without adding null to the storage map */ + hasNullishStorageKey(key: OnyxKey): boolean; + /** Used to clear keys that are null/undefined in cache */ + clearNullishStorageKeys(): void; + /** Check whether cache has data for the given key */ + hasCacheForKey(key: OnyxKey): boolean; + /** + * Get a cached value from storage + * @param [shouldReindexCache] – This is an LRU cache, and by default accessing a value will make it become last in line to be evicted. This flag can be used to skip that and just access the value directly without side-effects. + */ + get(key: OnyxKey, shouldReindexCache?: boolean): OnyxValue; + /** + * Set's a key value in cache + * Adds the key to the storage keys list as well + */ + set(key: OnyxKey, value: OnyxValue): OnyxValue; + /** Forget the cached value for the given key */ + drop(key: OnyxKey): void; + /** + * Deep merge data to cache, any non existing keys will be created + * @param data - a map of (cache) key - values + */ + merge(data: Record>): void; + /** + * Check whether the given task is already running + * @param taskName - unique name given for the task + */ + hasPendingTask(taskName: string): boolean; + /** + * Use this method to prevent concurrent calls for the same thing + * Instead of calling the same task again use the existing promise + * provided from this function + * @param taskName - unique name given for the task + */ + getTaskPromise(taskName: string): Promise | OnyxKey[]> | undefined; + /** + * Capture a promise for a given task so other caller can + * hook up to the promise if it's still pending + * @param taskName - unique name for the task + */ + captureTask(taskName: string, promise: Promise>): Promise>; + /** Adds a key to the top of the recently accessed keys */ + addToAccessedKeys(key: OnyxKey): void; + /** Remove keys that don't fall into the range of recently used keys */ + removeLeastRecentlyUsedKeys(): void; + /** Set the recent keys list size */ + setRecentKeysLimit(limit: number): void; + /** Check if the value has changed */ + hasValueChanged(key: OnyxKey, value: OnyxValue): boolean; +} +declare const instance: OnyxCache; +export default instance; diff --git a/dist/OnyxCache.js b/dist/OnyxCache.js new file mode 100644 index 00000000..7a6ddc0f --- /dev/null +++ b/dist/OnyxCache.js @@ -0,0 +1,180 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fast_equals_1 = require("fast-equals"); +const bindAll_1 = __importDefault(require("lodash/bindAll")); +const utils_1 = __importDefault(require("./utils")); +/** + * In memory cache providing data by reference + * Encapsulates Onyx cache related functionality + */ +class OnyxCache { + constructor() { + /** Maximum size of the keys store din cache */ + this.maxRecentKeysSize = 0; + this.storageKeys = new Set(); + this.nullishStorageKeys = new Set(); + this.recentKeys = new Set(); + this.storageMap = {}; + this.pendingPromises = new Map(); + // bind all public methods to prevent problems with `this` + (0, bindAll_1.default)(this, 'getAllKeys', 'get', 'hasCacheForKey', 'addKey', 'addNullishStorageKey', 'hasNullishStorageKey', 'clearNullishStorageKeys', 'set', 'drop', 'merge', 'hasPendingTask', 'getTaskPromise', 'captureTask', 'removeLeastRecentlyUsedKeys', 'setRecentKeysLimit', 'setAllKeys'); + } + /** Get all the storage keys */ + getAllKeys() { + return this.storageKeys; + } + /** + * Allows to set all the keys at once. + * This is useful when we are getting + * all the keys from the storage provider + * and we want to keep the cache in sync. + * + * Previously, we had to call `addKey` in a loop + * to achieve the same result. + * + * @param keys - an array of keys + */ + setAllKeys(keys) { + this.storageKeys = new Set(keys); + } + /** Saves a key in the storage keys list + * Serves to keep the result of `getAllKeys` up to date + */ + addKey(key) { + this.storageKeys.add(key); + } + /** Used to set keys that are null/undefined in storage without adding null to the storage map */ + addNullishStorageKey(key) { + this.nullishStorageKeys.add(key); + } + /** Used to set keys that are null/undefined in storage without adding null to the storage map */ + hasNullishStorageKey(key) { + return this.nullishStorageKeys.has(key); + } + /** Used to clear keys that are null/undefined in cache */ + clearNullishStorageKeys() { + this.nullishStorageKeys = new Set(); + } + /** Check whether cache has data for the given key */ + hasCacheForKey(key) { + return this.storageMap[key] !== undefined || this.hasNullishStorageKey(key); + } + /** + * Get a cached value from storage + * @param [shouldReindexCache] – This is an LRU cache, and by default accessing a value will make it become last in line to be evicted. This flag can be used to skip that and just access the value directly without side-effects. + */ + get(key, shouldReindexCache = true) { + if (shouldReindexCache) { + this.addToAccessedKeys(key); + } + return this.storageMap[key]; + } + /** + * Set's a key value in cache + * Adds the key to the storage keys list as well + */ + set(key, value) { + this.addKey(key); + this.addToAccessedKeys(key); + // When a key is explicitly set in cache, we can remove it from the list of nullish keys, + // since it will either be set to a non nullish value or removed from the cache completely. + this.nullishStorageKeys.delete(key); + if (value === null || value === undefined) { + delete this.storageMap[key]; + return undefined; + } + this.storageMap[key] = value; + return value; + } + /** Forget the cached value for the given key */ + drop(key) { + delete this.storageMap[key]; + this.storageKeys.delete(key); + this.recentKeys.delete(key); + } + /** + * Deep merge data to cache, any non existing keys will be created + * @param data - a map of (cache) key - values + */ + merge(data) { + if (typeof data !== 'object' || Array.isArray(data)) { + throw new Error('data passed to cache.merge() must be an Object of onyx key/value pairs'); + } + this.storageMap = Object.assign({}, utils_1.default.fastMerge(this.storageMap, data)); + Object.entries(data).forEach(([key, value]) => { + this.addKey(key); + this.addToAccessedKeys(key); + if (value === null || value === undefined) { + this.addNullishStorageKey(key); + } + else { + this.nullishStorageKeys.delete(key); + } + }); + } + /** + * Check whether the given task is already running + * @param taskName - unique name given for the task + */ + hasPendingTask(taskName) { + return this.pendingPromises.get(taskName) !== undefined; + } + /** + * Use this method to prevent concurrent calls for the same thing + * Instead of calling the same task again use the existing promise + * provided from this function + * @param taskName - unique name given for the task + */ + getTaskPromise(taskName) { + return this.pendingPromises.get(taskName); + } + /** + * Capture a promise for a given task so other caller can + * hook up to the promise if it's still pending + * @param taskName - unique name for the task + */ + captureTask(taskName, promise) { + const returnPromise = promise.finally(() => { + this.pendingPromises.delete(taskName); + }); + this.pendingPromises.set(taskName, returnPromise); + return returnPromise; + } + /** Adds a key to the top of the recently accessed keys */ + addToAccessedKeys(key) { + this.recentKeys.delete(key); + this.recentKeys.add(key); + } + /** Remove keys that don't fall into the range of recently used keys */ + removeLeastRecentlyUsedKeys() { + let numKeysToRemove = this.recentKeys.size - this.maxRecentKeysSize; + if (numKeysToRemove <= 0) { + return; + } + const iterator = this.recentKeys.values(); + const temp = []; + while (numKeysToRemove > 0) { + const value = iterator.next().value; + temp.push(value); + numKeysToRemove--; + } + // eslint-disable-next-line @typescript-eslint/prefer-for-of + for (let i = 0; i < temp.length; ++i) { + delete this.storageMap[temp[i]]; + this.recentKeys.delete(temp[i]); + } + } + /** Set the recent keys list size */ + setRecentKeysLimit(limit) { + this.maxRecentKeysSize = limit; + } + /** Check if the value has changed */ + hasValueChanged(key, value) { + return !(0, fast_equals_1.deepEqual)(this.storageMap[key], value); + } +} +const instance = new OnyxCache(); +exports.default = instance; diff --git a/dist/OnyxUtils.d.ts b/dist/OnyxUtils.d.ts new file mode 100644 index 00000000..c43f9aed --- /dev/null +++ b/dist/OnyxUtils.d.ts @@ -0,0 +1,251 @@ +import type { ValueOf } from 'type-fest'; +import type Onyx from './Onyx'; +import type { CollectionKey, CollectionKeyBase, DeepRecord, KeyValueMapping, Mapping, OnyxCollection, OnyxEntry, OnyxInput, OnyxKey, OnyxMergeCollectionInput, OnyxValue, WithOnyxConnectOptions } from './types'; +declare const METHOD: { + readonly SET: "set"; + readonly MERGE: "merge"; + readonly MERGE_COLLECTION: "mergecollection"; + readonly MULTI_SET: "multiset"; + readonly CLEAR: "clear"; +}; +type OnyxMethod = ValueOf; +declare function getSnapshotKey(): OnyxKey | null; +/** + * Getter - returns the merge queue. + */ +declare function getMergeQueue(): Record>>; +/** + * Getter - returns the merge queue promise. + */ +declare function getMergeQueuePromise(): Record>; +/** + * Getter - returns the callback to state mapping. + */ +declare function getCallbackToStateMapping(): Record>; +/** + * Getter - returns the default key states. + */ +declare function getDefaultKeyStates(): Record>; +/** + * Sets the initial values for the Onyx store + * + * @param keys - `ONYXKEYS` constants object from Onyx.init() + * @param initialKeyStates - initial data to set when `init()` and `clear()` are called + * @param safeEvictionKeys - This is an array of keys (individual or collection patterns) that when provided to Onyx are flagged as "safe" for removal. + */ +declare function initStoreValues(keys: DeepRecord, initialKeyStates: Partial, safeEvictionKeys: OnyxKey[]): void; +/** + * Sends an action to DevTools extension + * + * @param method - Onyx method from METHOD + * @param key - Onyx key that was changed + * @param value - contains the change that was made by the method + * @param mergedValue - (optional) value that was written in the storage after a merge method was executed. + */ +declare function sendActionToDevTools(method: typeof METHOD.MERGE_COLLECTION | typeof METHOD.MULTI_SET, key: undefined, value: OnyxCollection, mergedValue?: undefined): void; +declare function sendActionToDevTools(method: Exclude, key: OnyxKey, value: OnyxEntry, mergedValue?: OnyxEntry): void; +/** + * We are batching together onyx updates. This helps with use cases where we schedule onyx updates after each other. + * This happens for example in the Onyx.update function, where we process API responses that might contain a lot of + * update operations. Instead of calling the subscribers for each update operation, we batch them together which will + * cause react to schedule the updates at once instead of after each other. This is mainly a performance optimization. + */ +declare function maybeFlushBatchUpdates(): Promise; +declare function batchUpdates(updates: () => void): Promise; +/** Get some data from the store */ +declare function get>(key: TKey): Promise; +/** Returns current key names stored in persisted storage */ +declare function getAllKeys(): Promise>; +/** + * Returns set of all registered collection keys + */ +declare function getCollectionKeys(): Set; +/** + * Checks to see if the subscriber's supplied key + * is associated with a collection of keys. + */ +declare function isCollectionKey(key: OnyxKey): key is CollectionKeyBase; +declare function isCollectionMemberKey(collectionKey: TCollectionKey, key: string): key is `${TCollectionKey}${string}`; +/** + * Splits a collection member key into the collection key part and the ID part. + * @param key - The collection member key to split. + * @returns A tuple where the first element is the collection part and the second element is the ID part. + */ +declare function splitCollectionMemberKey(key: TKey): [TKey extends `${infer Prefix}_${string}` ? `${Prefix}_` : never, string]; +/** + * Checks to see if a provided key is the exact configured key of our connected subscriber + * or if the provided key is a collection member key (in case our configured key is a "collection key") + */ +declare function isKeyMatch(configKey: OnyxKey, key: OnyxKey): boolean; +/** Checks to see if this key has been flagged as safe for removal. */ +declare function isSafeEvictionKey(testKey: OnyxKey): boolean; +/** + * Tries to get a value from the cache. If the value is not present in cache it will return the default value or undefined. + * If the requested key is a collection, it will return an object with all the collection members. + */ +declare function tryGetCachedValue(key: TKey, mapping?: Partial>): OnyxValue; +/** + * Remove a key from the recently accessed key list. + */ +declare function removeLastAccessedKey(key: OnyxKey): void; +/** + * Add a key to the list of recently accessed keys. The least + * recently accessed key should be at the head and the most + * recently accessed key at the tail. + */ +declare function addLastAccessedKey(key: OnyxKey): void; +/** + * Removes a key previously added to this list + * which will enable it to be deleted again. + */ +declare function removeFromEvictionBlockList(key: OnyxKey, connectionID: number): void; +/** Keys added to this list can never be deleted. */ +declare function addToEvictionBlockList(key: OnyxKey, connectionID: number): void; +/** + * Take all the keys that are safe to evict and add them to + * the recently accessed list when initializing the app. This + * enables keys that have not recently been accessed to be + * removed. + */ +declare function addAllSafeEvictionKeysToRecentlyAccessedList(): Promise; +declare function getCachedCollection(collectionKey: TKey, collectionMemberKeys?: string[]): NonNullable>; +/** + * When a collection of keys change, search for any callbacks matching the collection key and trigger those callbacks + */ +declare function keysChanged(collectionKey: TKey, partialCollection: OnyxCollection, partialPreviousCollection: OnyxCollection | undefined, notifyRegularSubscibers?: boolean, notifyWithOnyxSubscibers?: boolean): void; +/** + * When a key change happens, search for any callbacks matching the key or collection key and trigger those callbacks + * + * @example + * keyChanged(key, value, subscriber => subscriber.initWithStoredValues === false) + */ +declare function keyChanged(key: TKey, value: OnyxValue, previousValue: OnyxValue, canUpdateSubscriber?: (subscriber?: Mapping) => boolean, notifyRegularSubscibers?: boolean, notifyWithOnyxSubscibers?: boolean): void; +/** + * Sends the data obtained from the keys to the connection. It either: + * - sets state on the withOnyxInstances + * - triggers the callback function + */ +declare function sendDataToConnection(mapping: Mapping, value: OnyxValue | null, matchedKey: TKey | undefined, isBatched: boolean): void; +/** + * We check to see if this key is flagged as safe for eviction and add it to the recentlyAccessedKeys list so that when we + * run out of storage the least recently accessed key can be removed. + */ +declare function addKeyToRecentlyAccessedIfNeeded(mapping: Mapping): void; +/** + * Gets the data for a given an array of matching keys, combines them into an object, and sends the result back to the subscriber. + */ +declare function getCollectionDataAndSendAsObject(matchingKeys: CollectionKeyBase[], mapping: Mapping): void; +/** + * Schedules an update that will be appended to the macro task queue (so it doesn't update the subscribers immediately). + * + * @example + * scheduleSubscriberUpdate(key, value, subscriber => subscriber.initWithStoredValues === false) + */ +declare function scheduleSubscriberUpdate(key: TKey, value: OnyxValue, previousValue: OnyxValue, canUpdateSubscriber?: (subscriber?: Mapping) => boolean): Promise; +/** + * This method is similar to notifySubscribersOnNextTick but it is built for working specifically with collections + * so that keysChanged() is triggered for the collection and not keyChanged(). If this was not done, then the + * subscriber callbacks receive the data in a different format than they normally expect and it breaks code. + */ +declare function scheduleNotifyCollectionSubscribers(key: TKey, value: OnyxCollection, previousValue?: OnyxCollection): Promise; +/** + * Remove a key from Onyx and update the subscribers + */ +declare function remove(key: TKey): Promise; +declare function reportStorageQuota(): Promise; +/** + * If we fail to set or merge we must handle this by + * evicting some data from Onyx and then retrying to do + * whatever it is we attempted to do. + */ +declare function evictStorageAndRetry(error: Error, onyxMethod: TMethod, ...args: Parameters): Promise; +/** + * Notifies subscribers and writes current value to cache + */ +declare function broadcastUpdate(key: TKey, value: OnyxValue, hasChanged?: boolean): Promise; +declare function hasPendingMergeForKey(key: OnyxKey): boolean; +type RemoveNullValuesOutput | undefined> = { + value: Value; + wasRemoved: boolean; +}; +/** + * Removes a key from storage if the value is null. + * Otherwise removes all nested null values in objects, + * if shouldRemoveNestedNulls is true and returns the object. + * + * @returns The value without null values and a boolean "wasRemoved", which indicates if the key got removed completely + */ +declare function removeNullValues | undefined>(key: OnyxKey, value: Value, shouldRemoveNestedNulls?: boolean): RemoveNullValuesOutput; +/** + * Storage expects array like: [["@MyApp_user", value_1], ["@MyApp_key", value_2]] + * This method transforms an object like {'@MyApp_user': myUserValue, '@MyApp_key': myKeyValue} + * to an array of key-value pairs in the above format and removes key-value pairs that are being set to null + +* @return an array of key - value pairs <[key, value]> + */ +declare function prepareKeyValuePairsForStorage(data: Record>, shouldRemoveNestedNulls: boolean): Array<[OnyxKey, OnyxInput]>; +/** + * Merges an array of changes with an existing value + * + * @param changes Array of changes that should be applied to the existing value + */ +declare function applyMerge | undefined, TChange extends OnyxInput | undefined>(existingValue: TValue, changes: TChange[], shouldRemoveNestedNulls: boolean): TChange; +/** + * Merge user provided default key value pairs. + */ +declare function initializeWithDefaultKeyStates(): Promise; +/** + * Verify if the collection is valid for merging into the collection key using mergeCollection() + */ +declare function isValidMergeCollection(collectionKey: TKey, collection: OnyxMergeCollectionInput): boolean; +declare const OnyxUtils: { + METHOD: { + readonly SET: "set"; + readonly MERGE: "merge"; + readonly MERGE_COLLECTION: "mergecollection"; + readonly MULTI_SET: "multiset"; + readonly CLEAR: "clear"; + }; + getMergeQueue: typeof getMergeQueue; + getMergeQueuePromise: typeof getMergeQueuePromise; + getCallbackToStateMapping: typeof getCallbackToStateMapping; + getDefaultKeyStates: typeof getDefaultKeyStates; + initStoreValues: typeof initStoreValues; + sendActionToDevTools: typeof sendActionToDevTools; + maybeFlushBatchUpdates: typeof maybeFlushBatchUpdates; + batchUpdates: typeof batchUpdates; + get: typeof get; + getAllKeys: typeof getAllKeys; + getCollectionKeys: typeof getCollectionKeys; + isCollectionKey: typeof isCollectionKey; + isCollectionMemberKey: typeof isCollectionMemberKey; + splitCollectionMemberKey: typeof splitCollectionMemberKey; + isKeyMatch: typeof isKeyMatch; + isSafeEvictionKey: typeof isSafeEvictionKey; + tryGetCachedValue: typeof tryGetCachedValue; + removeLastAccessedKey: typeof removeLastAccessedKey; + addLastAccessedKey: typeof addLastAccessedKey; + removeFromEvictionBlockList: typeof removeFromEvictionBlockList; + addToEvictionBlockList: typeof addToEvictionBlockList; + addAllSafeEvictionKeysToRecentlyAccessedList: typeof addAllSafeEvictionKeysToRecentlyAccessedList; + getCachedCollection: typeof getCachedCollection; + keysChanged: typeof keysChanged; + keyChanged: typeof keyChanged; + sendDataToConnection: typeof sendDataToConnection; + addKeyToRecentlyAccessedIfNeeded: typeof addKeyToRecentlyAccessedIfNeeded; + getCollectionDataAndSendAsObject: typeof getCollectionDataAndSendAsObject; + scheduleSubscriberUpdate: typeof scheduleSubscriberUpdate; + scheduleNotifyCollectionSubscribers: typeof scheduleNotifyCollectionSubscribers; + remove: typeof remove; + reportStorageQuota: typeof reportStorageQuota; + evictStorageAndRetry: typeof evictStorageAndRetry; + broadcastUpdate: typeof broadcastUpdate; + hasPendingMergeForKey: typeof hasPendingMergeForKey; + removeNullValues: typeof removeNullValues; + prepareKeyValuePairsForStorage: typeof prepareKeyValuePairsForStorage; + applyMerge: typeof applyMerge; + initializeWithDefaultKeyStates: typeof initializeWithDefaultKeyStates; + getSnapshotKey: typeof getSnapshotKey; + isValidMergeCollection: typeof isValidMergeCollection; +}; +export default OnyxUtils; diff --git a/dist/OnyxUtils.js b/dist/OnyxUtils.js new file mode 100644 index 00000000..5b34ef4e --- /dev/null +++ b/dist/OnyxUtils.js @@ -0,0 +1,973 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* eslint-disable @typescript-eslint/prefer-for-of */ +/* eslint-disable no-continue */ +const fast_equals_1 = require("fast-equals"); +const clone_1 = __importDefault(require("lodash/clone")); +const DevTools_1 = __importDefault(require("./DevTools")); +const Logger = __importStar(require("./Logger")); +const OnyxCache_1 = __importDefault(require("./OnyxCache")); +const PerformanceUtils = __importStar(require("./PerformanceUtils")); +const Str = __importStar(require("./Str")); +const batch_1 = __importDefault(require("./batch")); +const storage_1 = __importDefault(require("./storage")); +const utils_1 = __importDefault(require("./utils")); +// Method constants +const METHOD = { + SET: 'set', + MERGE: 'merge', + MERGE_COLLECTION: 'mergecollection', + MULTI_SET: 'multiset', + CLEAR: 'clear', +}; +// Key/value store of Onyx key and arrays of values to merge +const mergeQueue = {}; +const mergeQueuePromise = {}; +// Holds a mapping of all the React components that want their state subscribed to a store key +const callbackToStateMapping = {}; +// Keeps a copy of the values of the onyx collection keys as a map for faster lookups +let onyxCollectionKeySet = new Set(); +// Holds a list of keys that have been directly subscribed to or recently modified from least to most recent +let recentlyAccessedKeys = []; +// Holds a list of keys that are safe to remove when we reach max storage. If a key does not match with +// whatever appears in this list it will NEVER be a candidate for eviction. +let evictionAllowList = []; +// Holds a map of keys and connectionID arrays whose keys will never be automatically evicted as +// long as we have at least one subscriber that returns false for the canEvict property. +const evictionBlocklist = {}; +// Optional user-provided key value states set when Onyx initializes or clears +let defaultKeyStates = {}; +let batchUpdatesPromise = null; +let batchUpdatesQueue = []; +let snapshotKey = null; +function getSnapshotKey() { + return snapshotKey; +} +/** + * Getter - returns the merge queue. + */ +function getMergeQueue() { + return mergeQueue; +} +/** + * Getter - returns the merge queue promise. + */ +function getMergeQueuePromise() { + return mergeQueuePromise; +} +/** + * Getter - returns the callback to state mapping. + */ +function getCallbackToStateMapping() { + return callbackToStateMapping; +} +/** + * Getter - returns the default key states. + */ +function getDefaultKeyStates() { + return defaultKeyStates; +} +/** + * Sets the initial values for the Onyx store + * + * @param keys - `ONYXKEYS` constants object from Onyx.init() + * @param initialKeyStates - initial data to set when `init()` and `clear()` are called + * @param safeEvictionKeys - This is an array of keys (individual or collection patterns) that when provided to Onyx are flagged as "safe" for removal. + */ +function initStoreValues(keys, initialKeyStates, safeEvictionKeys) { + var _a; + // We need the value of the collection keys later for checking if a + // key is a collection. We store it in a map for faster lookup. + const collectionValues = Object.values((_a = keys.COLLECTION) !== null && _a !== void 0 ? _a : {}); + onyxCollectionKeySet = collectionValues.reduce((acc, val) => { + acc.add(val); + return acc; + }, new Set()); + // Set our default key states to use when initializing and clearing Onyx data + defaultKeyStates = initialKeyStates; + DevTools_1.default.initState(initialKeyStates); + // Let Onyx know about which keys are safe to evict + evictionAllowList = safeEvictionKeys; + if (typeof keys.COLLECTION === 'object' && typeof keys.COLLECTION.SNAPSHOT === 'string') { + snapshotKey = keys.COLLECTION.SNAPSHOT; + } +} +function sendActionToDevTools(method, key, value, mergedValue = undefined) { + DevTools_1.default.registerAction(utils_1.default.formatActionName(method, key), value, key ? { [key]: mergedValue || value } : value); +} +/** + * We are batching together onyx updates. This helps with use cases where we schedule onyx updates after each other. + * This happens for example in the Onyx.update function, where we process API responses that might contain a lot of + * update operations. Instead of calling the subscribers for each update operation, we batch them together which will + * cause react to schedule the updates at once instead of after each other. This is mainly a performance optimization. + */ +function maybeFlushBatchUpdates() { + if (batchUpdatesPromise) { + return batchUpdatesPromise; + } + batchUpdatesPromise = new Promise((resolve) => { + /* We use (setTimeout, 0) here which should be called once native module calls are flushed (usually at the end of the frame) + * We may investigate if (setTimeout, 1) (which in React Native is equal to requestAnimationFrame) works even better + * then the batch will be flushed on next frame. + */ + setTimeout(() => { + const updatesCopy = batchUpdatesQueue; + batchUpdatesQueue = []; + batchUpdatesPromise = null; + (0, batch_1.default)(() => { + updatesCopy.forEach((applyUpdates) => { + applyUpdates(); + }); + }); + resolve(); + }, 0); + }); + return batchUpdatesPromise; +} +function batchUpdates(updates) { + batchUpdatesQueue.push(updates); + return maybeFlushBatchUpdates(); +} +/** + * Takes a collection of items (eg. {testKey_1:{a:'a'}, testKey_2:{b:'b'}}) + * and runs it through a reducer function to return a subset of the data according to a selector. + * The resulting collection will only contain items that are returned by the selector. + */ +function reduceCollectionWithSelector(collection, selector, withOnyxInstanceState) { + return Object.entries(collection !== null && collection !== void 0 ? collection : {}).reduce((finalCollection, [key, item]) => { + // eslint-disable-next-line no-param-reassign + finalCollection[key] = selector(item, withOnyxInstanceState); + return finalCollection; + }, {}); +} +/** Get some data from the store */ +function get(key) { + // When we already have the value in cache - resolve right away + if (OnyxCache_1.default.hasCacheForKey(key)) { + return Promise.resolve(OnyxCache_1.default.get(key)); + } + const taskName = `get:${key}`; + // When a value retrieving task for this key is still running hook to it + if (OnyxCache_1.default.hasPendingTask(taskName)) { + return OnyxCache_1.default.getTaskPromise(taskName); + } + // Otherwise retrieve the value from storage and capture a promise to aid concurrent usages + const promise = storage_1.default.getItem(key) + .then((val) => { + if (val === undefined) { + OnyxCache_1.default.addNullishStorageKey(key); + return undefined; + } + OnyxCache_1.default.set(key, val); + return val; + }) + .catch((err) => Logger.logInfo(`Unable to get item from persistent storage. Key: ${key} Error: ${err}`)); + return OnyxCache_1.default.captureTask(taskName, promise); +} +/** Returns current key names stored in persisted storage */ +function getAllKeys() { + // When we've already read stored keys, resolve right away + const cachedKeys = OnyxCache_1.default.getAllKeys(); + if (cachedKeys.size > 0) { + return Promise.resolve(cachedKeys); + } + const taskName = 'getAllKeys'; + // When a value retrieving task for all keys is still running hook to it + if (OnyxCache_1.default.hasPendingTask(taskName)) { + return OnyxCache_1.default.getTaskPromise(taskName); + } + // Otherwise retrieve the keys from storage and capture a promise to aid concurrent usages + const promise = storage_1.default.getAllKeys().then((keys) => { + OnyxCache_1.default.setAllKeys(keys); + // return the updated set of keys + return OnyxCache_1.default.getAllKeys(); + }); + return OnyxCache_1.default.captureTask(taskName, promise); +} +/** + * Returns set of all registered collection keys + */ +function getCollectionKeys() { + return onyxCollectionKeySet; +} +/** + * Checks to see if the subscriber's supplied key + * is associated with a collection of keys. + */ +function isCollectionKey(key) { + return onyxCollectionKeySet.has(key); +} +function isCollectionMemberKey(collectionKey, key) { + return Str.startsWith(key, collectionKey) && key.length > collectionKey.length; +} +/** + * Splits a collection member key into the collection key part and the ID part. + * @param key - The collection member key to split. + * @returns A tuple where the first element is the collection part and the second element is the ID part. + */ +function splitCollectionMemberKey(key) { + const underscoreIndex = key.indexOf('_'); + if (underscoreIndex === -1) { + throw new Error(`Invalid ${key} key provided, only collection keys are allowed.`); + } + return [key.substring(0, underscoreIndex + 1), key.substring(underscoreIndex + 1)]; +} +/** + * Checks to see if a provided key is the exact configured key of our connected subscriber + * or if the provided key is a collection member key (in case our configured key is a "collection key") + */ +function isKeyMatch(configKey, key) { + return isCollectionKey(configKey) ? Str.startsWith(key, configKey) : configKey === key; +} +/** Checks to see if this key has been flagged as safe for removal. */ +function isSafeEvictionKey(testKey) { + return evictionAllowList.some((key) => isKeyMatch(key, testKey)); +} +/** + * Tries to get a value from the cache. If the value is not present in cache it will return the default value or undefined. + * If the requested key is a collection, it will return an object with all the collection members. + */ +function tryGetCachedValue(key, mapping) { + let val = OnyxCache_1.default.get(key); + if (isCollectionKey(key)) { + const allCacheKeys = OnyxCache_1.default.getAllKeys(); + // It is possible we haven't loaded all keys yet so we do not know if the + // collection actually exists. + if (allCacheKeys.size === 0) { + return; + } + const matchingKeys = Array.from(allCacheKeys).filter((k) => k.startsWith(key)); + const values = matchingKeys.reduce((finalObject, matchedKey) => { + const cachedValue = OnyxCache_1.default.get(matchedKey); + if (cachedValue) { + // This is permissible because we're in the process of constructing the final object in a reduce function. + // eslint-disable-next-line no-param-reassign + finalObject[matchedKey] = cachedValue; + } + return finalObject; + }, {}); + val = values; + } + if (mapping === null || mapping === void 0 ? void 0 : mapping.selector) { + const state = mapping.withOnyxInstance ? mapping.withOnyxInstance.state : undefined; + if (isCollectionKey(key)) { + return reduceCollectionWithSelector(val, mapping.selector, state); + } + return mapping.selector(val, state); + } + return val; +} +/** + * Remove a key from the recently accessed key list. + */ +function removeLastAccessedKey(key) { + recentlyAccessedKeys = recentlyAccessedKeys.filter((recentlyAccessedKey) => recentlyAccessedKey !== key); +} +/** + * Add a key to the list of recently accessed keys. The least + * recently accessed key should be at the head and the most + * recently accessed key at the tail. + */ +function addLastAccessedKey(key) { + // Only specific keys belong in this list since we cannot remove an entire collection. + if (isCollectionKey(key) || !isSafeEvictionKey(key)) { + return; + } + removeLastAccessedKey(key); + recentlyAccessedKeys.push(key); +} +/** + * Removes a key previously added to this list + * which will enable it to be deleted again. + */ +function removeFromEvictionBlockList(key, connectionID) { + var _a, _b, _c; + evictionBlocklist[key] = (_b = (_a = evictionBlocklist[key]) === null || _a === void 0 ? void 0 : _a.filter((evictionKey) => evictionKey !== connectionID)) !== null && _b !== void 0 ? _b : []; + // Remove the key if there are no more subscribers + if (((_c = evictionBlocklist[key]) === null || _c === void 0 ? void 0 : _c.length) === 0) { + delete evictionBlocklist[key]; + } +} +/** Keys added to this list can never be deleted. */ +function addToEvictionBlockList(key, connectionID) { + removeFromEvictionBlockList(key, connectionID); + if (!evictionBlocklist[key]) { + evictionBlocklist[key] = []; + } + evictionBlocklist[key].push(connectionID); +} +/** + * Take all the keys that are safe to evict and add them to + * the recently accessed list when initializing the app. This + * enables keys that have not recently been accessed to be + * removed. + */ +function addAllSafeEvictionKeysToRecentlyAccessedList() { + return getAllKeys().then((keys) => { + evictionAllowList.forEach((safeEvictionKey) => { + keys.forEach((key) => { + if (!isKeyMatch(safeEvictionKey, key)) { + return; + } + addLastAccessedKey(key); + }); + }); + }); +} +function getCachedCollection(collectionKey, collectionMemberKeys) { + const allKeys = collectionMemberKeys || OnyxCache_1.default.getAllKeys(); + const collection = {}; + // forEach exists on both Set and Array + allKeys.forEach((key) => { + // If we don't have collectionMemberKeys array then we have to check whether a key is a collection member key. + // Because in that case the keys will be coming from `cache.getAllKeys()` and we need to filter out the keys that + // are not part of the collection. + if (!collectionMemberKeys && !isCollectionMemberKey(collectionKey, key)) { + return; + } + const cachedValue = OnyxCache_1.default.get(key); + if (cachedValue === undefined && !OnyxCache_1.default.hasNullishStorageKey(key)) { + return; + } + collection[key] = OnyxCache_1.default.get(key); + }); + return collection; +} +/** + * When a collection of keys change, search for any callbacks matching the collection key and trigger those callbacks + */ +function keysChanged(collectionKey, partialCollection, partialPreviousCollection, notifyRegularSubscibers = true, notifyWithOnyxSubscibers = true) { + // We prepare the "cached collection" which is the entire collection + the new partial data that + // was merged in via mergeCollection(). + const cachedCollection = getCachedCollection(collectionKey); + const previousCollection = partialPreviousCollection !== null && partialPreviousCollection !== void 0 ? partialPreviousCollection : {}; + // We are iterating over all subscribers similar to keyChanged(). However, we are looking for subscribers who are subscribing to either a collection key or + // individual collection key member for the collection that is being updated. It is important to note that the collection parameter cane be a PARTIAL collection + // and does not represent all of the combined keys and values for a collection key. It is just the "new" data that was merged in via mergeCollection(). + const stateMappingKeys = Object.keys(callbackToStateMapping); + for (let i = 0; i < stateMappingKeys.length; i++) { + const subscriber = callbackToStateMapping[stateMappingKeys[i]]; + if (!subscriber) { + continue; + } + // Skip iteration if we do not have a collection key or a collection member key on this subscriber + if (!Str.startsWith(subscriber.key, collectionKey)) { + continue; + } + /** + * e.g. Onyx.connect({key: ONYXKEYS.COLLECTION.REPORT, callback: ...}); + */ + const isSubscribedToCollectionKey = subscriber.key === collectionKey; + /** + * e.g. Onyx.connect({key: `${ONYXKEYS.COLLECTION.REPORT}{reportID}`, callback: ...}); + */ + const isSubscribedToCollectionMemberKey = isCollectionMemberKey(collectionKey, subscriber.key); + // Regular Onyx.connect() subscriber found. + if (typeof subscriber.callback === 'function') { + if (!notifyRegularSubscibers) { + continue; + } + // If they are subscribed to the collection key and using waitForCollectionCallback then we'll + // send the whole cached collection. + if (isSubscribedToCollectionKey) { + if (subscriber.waitForCollectionCallback) { + subscriber.callback(cachedCollection); + continue; + } + // If they are not using waitForCollectionCallback then we notify the subscriber with + // the new merged data but only for any keys in the partial collection. + const dataKeys = Object.keys(partialCollection !== null && partialCollection !== void 0 ? partialCollection : {}); + for (let j = 0; j < dataKeys.length; j++) { + const dataKey = dataKeys[j]; + if ((0, fast_equals_1.deepEqual)(cachedCollection[dataKey], previousCollection[dataKey])) { + continue; + } + subscriber.callback(cachedCollection[dataKey], dataKey); + } + continue; + } + // And if the subscriber is specifically only tracking a particular collection member key then we will + // notify them with the cached data for that key only. + if (isSubscribedToCollectionMemberKey) { + if ((0, fast_equals_1.deepEqual)(cachedCollection[subscriber.key], previousCollection[subscriber.key])) { + continue; + } + const subscriberCallback = subscriber.callback; + subscriberCallback(cachedCollection[subscriber.key], subscriber.key); + continue; + } + continue; + } + // React component subscriber found. + if ('withOnyxInstance' in subscriber && subscriber.withOnyxInstance) { + if (!notifyWithOnyxSubscibers) { + continue; + } + // We are subscribed to a collection key so we must update the data in state with the new + // collection member key values from the partial update. + if (isSubscribedToCollectionKey) { + // If the subscriber has a selector, then the component's state must only be updated with the data + // returned by the selector. + const collectionSelector = subscriber.selector; + if (collectionSelector) { + subscriber.withOnyxInstance.setStateProxy((prevState) => { + const previousData = prevState[subscriber.statePropertyName]; + const newData = reduceCollectionWithSelector(cachedCollection, collectionSelector, subscriber.withOnyxInstance.state); + if ((0, fast_equals_1.deepEqual)(previousData, newData)) { + return null; + } + return { + [subscriber.statePropertyName]: newData, + }; + }); + continue; + } + subscriber.withOnyxInstance.setStateProxy((prevState) => { + var _a; + const prevCollection = (_a = prevState === null || prevState === void 0 ? void 0 : prevState[subscriber.statePropertyName]) !== null && _a !== void 0 ? _a : {}; + const finalCollection = (0, clone_1.default)(prevCollection); + const dataKeys = Object.keys(partialCollection !== null && partialCollection !== void 0 ? partialCollection : {}); + for (let j = 0; j < dataKeys.length; j++) { + const dataKey = dataKeys[j]; + finalCollection[dataKey] = cachedCollection[dataKey]; + } + if ((0, fast_equals_1.deepEqual)(prevCollection, finalCollection)) { + return null; + } + PerformanceUtils.logSetStateCall(subscriber, prevState === null || prevState === void 0 ? void 0 : prevState[subscriber.statePropertyName], finalCollection, 'keysChanged', collectionKey); + return { + [subscriber.statePropertyName]: finalCollection, + }; + }); + continue; + } + // If a React component is only interested in a single key then we can set the cached value directly to the state name. + if (isSubscribedToCollectionMemberKey) { + if ((0, fast_equals_1.deepEqual)(cachedCollection[subscriber.key], previousCollection[subscriber.key])) { + continue; + } + // However, we only want to update this subscriber if the partial data contains a change. + // Otherwise, we would update them with a value they already have and trigger an unnecessary re-render. + const dataFromCollection = partialCollection === null || partialCollection === void 0 ? void 0 : partialCollection[subscriber.key]; + if (dataFromCollection === undefined) { + continue; + } + // If the subscriber has a selector, then the component's state must only be updated with the data + // returned by the selector and the state should only change when the subset of data changes from what + // it was previously. + const selector = subscriber.selector; + if (selector) { + subscriber.withOnyxInstance.setStateProxy((prevState) => { + const prevData = prevState[subscriber.statePropertyName]; + const newData = selector(cachedCollection[subscriber.key], subscriber.withOnyxInstance.state); + if ((0, fast_equals_1.deepEqual)(prevData, newData)) { + return null; + } + PerformanceUtils.logSetStateCall(subscriber, prevData, newData, 'keysChanged', collectionKey); + return { + [subscriber.statePropertyName]: newData, + }; + }); + continue; + } + subscriber.withOnyxInstance.setStateProxy((prevState) => { + const prevData = prevState[subscriber.statePropertyName]; + const newData = cachedCollection[subscriber.key]; + // Avoids triggering unnecessary re-renders when feeding empty objects + if (utils_1.default.isEmptyObject(newData) && utils_1.default.isEmptyObject(prevData)) { + return null; + } + if ((0, fast_equals_1.deepEqual)(prevData, newData)) { + return null; + } + PerformanceUtils.logSetStateCall(subscriber, prevData, newData, 'keysChanged', collectionKey); + return { + [subscriber.statePropertyName]: newData, + }; + }); + } + } + } +} +/** + * When a key change happens, search for any callbacks matching the key or collection key and trigger those callbacks + * + * @example + * keyChanged(key, value, subscriber => subscriber.initWithStoredValues === false) + */ +function keyChanged(key, value, previousValue, canUpdateSubscriber = () => true, notifyRegularSubscibers = true, notifyWithOnyxSubscibers = true) { + // Add or remove this key from the recentlyAccessedKeys lists + if (value !== null) { + addLastAccessedKey(key); + } + else { + removeLastAccessedKey(key); + } + // We are iterating over all subscribers to see if they are interested in the key that has just changed. If the subscriber's key is a collection key then we will + // notify them if the key that changed is a collection member. Or if it is a regular key notify them when there is an exact match. Depending on whether the subscriber + // was connected via withOnyx we will call setState() directly on the withOnyx instance. If it is a regular connection we will pass the data to the provided callback. + const stateMappingKeys = Object.keys(callbackToStateMapping); + for (let i = 0; i < stateMappingKeys.length; i++) { + const subscriber = callbackToStateMapping[stateMappingKeys[i]]; + if (!subscriber || !isKeyMatch(subscriber.key, key) || !canUpdateSubscriber(subscriber)) { + continue; + } + // Subscriber is a regular call to connect() and provided a callback + if (typeof subscriber.callback === 'function') { + if (!notifyRegularSubscibers) { + continue; + } + if (isCollectionKey(subscriber.key) && subscriber.waitForCollectionCallback) { + const cachedCollection = getCachedCollection(subscriber.key); + cachedCollection[key] = value; + subscriber.callback(cachedCollection); + continue; + } + const subscriberCallback = subscriber.callback; + subscriberCallback(value, key); + continue; + } + // Subscriber connected via withOnyx() HOC + if ('withOnyxInstance' in subscriber && subscriber.withOnyxInstance) { + if (!notifyWithOnyxSubscibers) { + continue; + } + const selector = subscriber.selector; + // Check if we are subscribing to a collection key and overwrite the collection member key value in state + if (isCollectionKey(subscriber.key)) { + // If the subscriber has a selector, then the consumer of this data must only be given the data + // returned by the selector and only when the selected data has changed. + if (selector) { + subscriber.withOnyxInstance.setStateProxy((prevState) => { + const prevWithOnyxData = prevState[subscriber.statePropertyName]; + const newWithOnyxData = { + [key]: selector(value, subscriber.withOnyxInstance.state), + }; + const prevDataWithNewData = Object.assign(Object.assign({}, prevWithOnyxData), newWithOnyxData); + if ((0, fast_equals_1.deepEqual)(prevWithOnyxData, prevDataWithNewData)) { + return null; + } + PerformanceUtils.logSetStateCall(subscriber, prevWithOnyxData, newWithOnyxData, 'keyChanged', key); + return { + [subscriber.statePropertyName]: prevDataWithNewData, + }; + }); + continue; + } + subscriber.withOnyxInstance.setStateProxy((prevState) => { + const prevCollection = prevState[subscriber.statePropertyName] || {}; + const newCollection = Object.assign(Object.assign({}, prevCollection), { [key]: value }); + if ((0, fast_equals_1.deepEqual)(prevCollection, newCollection)) { + return null; + } + PerformanceUtils.logSetStateCall(subscriber, prevCollection, newCollection, 'keyChanged', key); + return { + [subscriber.statePropertyName]: newCollection, + }; + }); + continue; + } + // If the subscriber has a selector, then the component's state must only be updated with the data + // returned by the selector and only if the selected data has changed. + if (selector) { + subscriber.withOnyxInstance.setStateProxy(() => { + const prevValue = selector(previousValue, subscriber.withOnyxInstance.state); + const newValue = selector(value, subscriber.withOnyxInstance.state); + if ((0, fast_equals_1.deepEqual)(prevValue, newValue)) { + return null; + } + return { + [subscriber.statePropertyName]: newValue, + }; + }); + continue; + } + // If we did not match on a collection key then we just set the new data to the state property + subscriber.withOnyxInstance.setStateProxy((prevState) => { + const prevWithOnyxValue = prevState[subscriber.statePropertyName]; + // Avoids triggering unnecessary re-renders when feeding empty objects + if (utils_1.default.isEmptyObject(value) && utils_1.default.isEmptyObject(prevWithOnyxValue)) { + return null; + } + if (prevWithOnyxValue === value) { + return null; + } + PerformanceUtils.logSetStateCall(subscriber, previousValue, value, 'keyChanged', key); + return { + [subscriber.statePropertyName]: value, + }; + }); + continue; + } + console.error('Warning: Found a matching subscriber to a key that changed, but no callback or withOnyxInstance could be found.'); + } +} +/** + * Sends the data obtained from the keys to the connection. It either: + * - sets state on the withOnyxInstances + * - triggers the callback function + */ +function sendDataToConnection(mapping, value, matchedKey, isBatched) { + var _a, _b; + // If the mapping no longer exists then we should not send any data. + // This means our subscriber disconnected or withOnyx wrapped component unmounted. + if (!callbackToStateMapping[mapping.connectionID]) { + return; + } + if ('withOnyxInstance' in mapping && mapping.withOnyxInstance) { + let newData = value; + // If the mapping has a selector, then the component's state must only be updated with the data + // returned by the selector. + if (mapping.selector) { + if (isCollectionKey(mapping.key)) { + newData = reduceCollectionWithSelector(value, mapping.selector, mapping.withOnyxInstance.state); + } + else { + newData = mapping.selector(value, mapping.withOnyxInstance.state); + } + } + PerformanceUtils.logSetStateCall(mapping, null, newData, 'sendDataToConnection'); + if (isBatched) { + batchUpdates(() => mapping.withOnyxInstance.setWithOnyxState(mapping.statePropertyName, newData)); + } + else { + mapping.withOnyxInstance.setWithOnyxState(mapping.statePropertyName, newData); + } + return; + } + // When there are no matching keys in "Onyx.connect", we pass null to "sendDataToConnection" explicitly, + // to allow the withOnyx instance to set the value in the state initially and therefore stop the loading state once all + // required keys have been set. + // If we would pass undefined to setWithOnyxInstance instead, withOnyx would not set the value in the state. + // withOnyx will internally replace null values with undefined and never pass null values to wrapped components. + // For regular callbacks, we never want to pass null values, but always just undefined if a value is not set in cache or storage. + (_b = (_a = mapping).callback) === null || _b === void 0 ? void 0 : _b.call(_a, value === null ? undefined : value, matchedKey); +} +/** + * We check to see if this key is flagged as safe for eviction and add it to the recentlyAccessedKeys list so that when we + * run out of storage the least recently accessed key can be removed. + */ +function addKeyToRecentlyAccessedIfNeeded(mapping) { + if (!isSafeEvictionKey(mapping.key)) { + return; + } + // Try to free some cache whenever we connect to a safe eviction key + OnyxCache_1.default.removeLeastRecentlyUsedKeys(); + if ('withOnyxInstance' in mapping && mapping.withOnyxInstance && !isCollectionKey(mapping.key)) { + // All React components subscribing to a key flagged as a safe eviction key must implement the canEvict property. + if (mapping.canEvict === undefined) { + throw new Error(`Cannot subscribe to safe eviction key '${mapping.key}' without providing a canEvict value.`); + } + addLastAccessedKey(mapping.key); + } +} +/** + * Gets the data for a given an array of matching keys, combines them into an object, and sends the result back to the subscriber. + */ +function getCollectionDataAndSendAsObject(matchingKeys, mapping) { + // Keys that are not in the cache + const missingKeys = []; + // Tasks that are pending + const pendingTasks = []; + // Keys for the tasks that are pending + const pendingKeys = []; + // We are going to combine all the data from the matching keys into a single object + const data = {}; + /** + * We are going to iterate over all the matching keys and check if we have the data in the cache. + * If we do then we add it to the data object. If we do not then we check if there is a pending task + * for the key. If there is then we add the promise to the pendingTasks array and the key to the pendingKeys + * array. If there is no pending task then we add the key to the missingKeys array. + * + * These missingKeys will be later to use to multiGet the data from the storage. + */ + matchingKeys.forEach((key) => { + const cacheValue = OnyxCache_1.default.get(key); + if (cacheValue) { + data[key] = cacheValue; + return; + } + const pendingKey = `get:${key}`; + if (OnyxCache_1.default.hasPendingTask(pendingKey)) { + pendingTasks.push(OnyxCache_1.default.getTaskPromise(pendingKey)); + pendingKeys.push(key); + } + else { + missingKeys.push(key); + } + }); + Promise.all(pendingTasks) + // We are going to wait for all the pending tasks to resolve and then add the data to the data object. + .then((values) => { + values.forEach((value, index) => { + data[pendingKeys[index]] = value; + }); + return Promise.resolve(); + }) + // We are going to get the missing keys using multiGet from the storage. + .then(() => { + if (missingKeys.length === 0) { + return Promise.resolve(undefined); + } + return storage_1.default.multiGet(missingKeys); + }) + // We are going to add the data from the missing keys to the data object and also merge it to the cache. + .then((values) => { + if (!values || values.length === 0) { + return Promise.resolve(); + } + // temp object is used to merge the missing data into the cache + const temp = {}; + values.forEach(([key, value]) => { + data[key] = value; + temp[key] = value; + }); + OnyxCache_1.default.merge(temp); + return Promise.resolve(); + }) + // We are going to send the data to the subscriber. + .finally(() => { + sendDataToConnection(mapping, data, undefined, true); + }); +} +/** + * Schedules an update that will be appended to the macro task queue (so it doesn't update the subscribers immediately). + * + * @example + * scheduleSubscriberUpdate(key, value, subscriber => subscriber.initWithStoredValues === false) + */ +function scheduleSubscriberUpdate(key, value, previousValue, canUpdateSubscriber = () => true) { + const promise = Promise.resolve().then(() => keyChanged(key, value, previousValue, canUpdateSubscriber, true, false)); + batchUpdates(() => keyChanged(key, value, previousValue, canUpdateSubscriber, false, true)); + return Promise.all([maybeFlushBatchUpdates(), promise]).then(() => undefined); +} +/** + * This method is similar to notifySubscribersOnNextTick but it is built for working specifically with collections + * so that keysChanged() is triggered for the collection and not keyChanged(). If this was not done, then the + * subscriber callbacks receive the data in a different format than they normally expect and it breaks code. + */ +function scheduleNotifyCollectionSubscribers(key, value, previousValue) { + const promise = Promise.resolve().then(() => keysChanged(key, value, previousValue, true, false)); + batchUpdates(() => keysChanged(key, value, previousValue, false, true)); + return Promise.all([maybeFlushBatchUpdates(), promise]).then(() => undefined); +} +/** + * Remove a key from Onyx and update the subscribers + */ +function remove(key) { + const prevValue = OnyxCache_1.default.get(key, false); + OnyxCache_1.default.drop(key); + scheduleSubscriberUpdate(key, undefined, prevValue); + return storage_1.default.removeItem(key).then(() => undefined); +} +function reportStorageQuota() { + return storage_1.default.getDatabaseSize() + .then(({ bytesUsed, bytesRemaining }) => { + Logger.logInfo(`Storage Quota Check -- bytesUsed: ${bytesUsed} bytesRemaining: ${bytesRemaining}`); + }) + .catch((dbSizeError) => { + Logger.logAlert(`Unable to get database size. Error: ${dbSizeError}`); + }); +} +/** + * If we fail to set or merge we must handle this by + * evicting some data from Onyx and then retrying to do + * whatever it is we attempted to do. + */ +function evictStorageAndRetry(error, onyxMethod, ...args) { + Logger.logInfo(`Failed to save to storage. Error: ${error}. onyxMethod: ${onyxMethod.name}`); + if (error && Str.startsWith(error.message, "Failed to execute 'put' on 'IDBObjectStore'")) { + Logger.logAlert('Attempted to set invalid data set in Onyx. Please ensure all data is serializable.'); + throw error; + } + // Find the first key that we can remove that has no subscribers in our blocklist + const keyForRemoval = recentlyAccessedKeys.find((key) => !evictionBlocklist[key]); + if (!keyForRemoval) { + // If we have no acceptable keys to remove then we are possibly trying to save mission critical data. If this is the case, + // then we should stop retrying as there is not much the user can do to fix this. Instead of getting them stuck in an infinite loop we + // will allow this write to be skipped. + Logger.logAlert('Out of storage. But found no acceptable keys to remove.'); + return reportStorageQuota(); + } + // Remove the least recently viewed key that is not currently being accessed and retry. + Logger.logInfo(`Out of storage. Evicting least recently accessed key (${keyForRemoval}) and retrying.`); + reportStorageQuota(); + // @ts-expect-error No overload matches this call. + return remove(keyForRemoval).then(() => onyxMethod(...args)); +} +/** + * Notifies subscribers and writes current value to cache + */ +function broadcastUpdate(key, value, hasChanged) { + const prevValue = OnyxCache_1.default.get(key, false); + // Update subscribers if the cached value has changed, or when the subscriber specifically requires + // all updates regardless of value changes (indicated by initWithStoredValues set to false). + if (hasChanged) { + OnyxCache_1.default.set(key, value); + } + else { + OnyxCache_1.default.addToAccessedKeys(key); + } + return scheduleSubscriberUpdate(key, value, prevValue, (subscriber) => hasChanged || (subscriber === null || subscriber === void 0 ? void 0 : subscriber.initWithStoredValues) === false).then(() => undefined); +} +function hasPendingMergeForKey(key) { + return !!mergeQueue[key]; +} +/** + * Removes a key from storage if the value is null. + * Otherwise removes all nested null values in objects, + * if shouldRemoveNestedNulls is true and returns the object. + * + * @returns The value without null values and a boolean "wasRemoved", which indicates if the key got removed completely + */ +function removeNullValues(key, value, shouldRemoveNestedNulls = true) { + if (value === null) { + remove(key); + return { value, wasRemoved: true }; + } + if (value === undefined) { + return { value, wasRemoved: false }; + } + // We can remove all null values in an object by merging it with itself + // utils.fastMerge recursively goes through the object and removes all null values + // Passing two identical objects as source and target to fastMerge will not change it, but only remove the null values + return { value: shouldRemoveNestedNulls ? utils_1.default.removeNestedNullValues(value) : value, wasRemoved: false }; +} +/** + * Storage expects array like: [["@MyApp_user", value_1], ["@MyApp_key", value_2]] + * This method transforms an object like {'@MyApp_user': myUserValue, '@MyApp_key': myKeyValue} + * to an array of key-value pairs in the above format and removes key-value pairs that are being set to null + +* @return an array of key - value pairs <[key, value]> + */ +function prepareKeyValuePairsForStorage(data, shouldRemoveNestedNulls) { + return Object.entries(data).reduce((pairs, [key, value]) => { + const { value: valueAfterRemoving, wasRemoved } = removeNullValues(key, value, shouldRemoveNestedNulls); + if (!wasRemoved && valueAfterRemoving !== undefined) { + pairs.push([key, valueAfterRemoving]); + } + return pairs; + }, []); +} +/** + * Merges an array of changes with an existing value + * + * @param changes Array of changes that should be applied to the existing value + */ +function applyMerge(existingValue, changes, shouldRemoveNestedNulls) { + const lastChange = changes === null || changes === void 0 ? void 0 : changes.at(-1); + if (Array.isArray(lastChange)) { + return lastChange; + } + if (changes.some((change) => change && typeof change === 'object')) { + // Object values are then merged one after the other + return changes.reduce((modifiedData, change) => utils_1.default.fastMerge(modifiedData, change, shouldRemoveNestedNulls), (existingValue || {})); + } + // If we have anything else we can't merge it so we'll + // simply return the last value that was queued + return lastChange; +} +/** + * Merge user provided default key value pairs. + */ +function initializeWithDefaultKeyStates() { + return storage_1.default.multiGet(Object.keys(defaultKeyStates)).then((pairs) => { + const existingDataAsObject = Object.fromEntries(pairs); + const merged = utils_1.default.fastMerge(existingDataAsObject, defaultKeyStates); + OnyxCache_1.default.merge(merged !== null && merged !== void 0 ? merged : {}); + Object.entries(merged !== null && merged !== void 0 ? merged : {}).forEach(([key, value]) => keyChanged(key, value, existingDataAsObject)); + }); +} +/** + * Verify if the collection is valid for merging into the collection key using mergeCollection() + */ +function isValidMergeCollection(collectionKey, collection) { + if (typeof collection !== 'object' || Array.isArray(collection) || utils_1.default.isEmptyObject(collection)) { + Logger.logInfo('mergeCollection() called with invalid or empty value. Skipping this update.'); + return false; + } + // Confirm all the collection keys belong to the same parent + let hasCollectionKeyCheckFailed = false; + Object.keys(collection).forEach((dataKey) => { + if (OnyxUtils.isKeyMatch(collectionKey, dataKey)) { + return; + } + if (process.env.NODE_ENV === 'development') { + throw new Error(`Provided collection doesn't have all its data belonging to the same parent. CollectionKey: ${collectionKey}, DataKey: ${dataKey}`); + } + hasCollectionKeyCheckFailed = true; + Logger.logAlert(`Provided collection doesn't have all its data belonging to the same parent. CollectionKey: ${collectionKey}, DataKey: ${dataKey}`); + }); + return !hasCollectionKeyCheckFailed; +} +const OnyxUtils = { + METHOD, + getMergeQueue, + getMergeQueuePromise, + getCallbackToStateMapping, + getDefaultKeyStates, + initStoreValues, + sendActionToDevTools, + maybeFlushBatchUpdates, + batchUpdates, + get, + getAllKeys, + getCollectionKeys, + isCollectionKey, + isCollectionMemberKey, + splitCollectionMemberKey, + isKeyMatch, + isSafeEvictionKey, + tryGetCachedValue, + removeLastAccessedKey, + addLastAccessedKey, + removeFromEvictionBlockList, + addToEvictionBlockList, + addAllSafeEvictionKeysToRecentlyAccessedList, + getCachedCollection, + keysChanged, + keyChanged, + sendDataToConnection, + addKeyToRecentlyAccessedIfNeeded, + getCollectionDataAndSendAsObject, + scheduleSubscriberUpdate, + scheduleNotifyCollectionSubscribers, + remove, + reportStorageQuota, + evictStorageAndRetry, + broadcastUpdate, + hasPendingMergeForKey, + removeNullValues, + prepareKeyValuePairsForStorage, + applyMerge, + initializeWithDefaultKeyStates, + getSnapshotKey, + isValidMergeCollection, +}; +exports.default = OnyxUtils; diff --git a/dist/PerformanceUtils.d.ts b/dist/PerformanceUtils.d.ts new file mode 100644 index 00000000..f02e7fd9 --- /dev/null +++ b/dist/PerformanceUtils.d.ts @@ -0,0 +1,8 @@ +import type { OnyxKey } from './types'; +import type { Mapping } from './Onyx'; +declare function setShouldDebugSetState(debug: boolean): void; +/** + * Provide insights into why a setState() call occurred by diffing the before and after values. + */ +declare function logSetStateCall(mapping: Mapping, previousValue: unknown, newValue: unknown, caller: string, keyThatChanged?: string): void; +export { logSetStateCall, setShouldDebugSetState }; diff --git a/dist/PerformanceUtils.js b/dist/PerformanceUtils.js new file mode 100644 index 00000000..fe7a3ddb --- /dev/null +++ b/dist/PerformanceUtils.js @@ -0,0 +1,53 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.setShouldDebugSetState = exports.logSetStateCall = void 0; +const transform_1 = __importDefault(require("lodash/transform")); +const fast_equals_1 = require("fast-equals"); +let debugSetState = false; +function setShouldDebugSetState(debug) { + debugSetState = debug; +} +exports.setShouldDebugSetState = setShouldDebugSetState; +/** + * Deep diff between two objects. Useful for figuring out what changed about an object from one render to the next so + * that state and props updates can be optimized. + */ +function diffObject(object, base) { + return (0, transform_1.default)(object, (result, value, key) => { + if ((0, fast_equals_1.deepEqual)(value, base[key])) { + return; + } + if (typeof value === 'object' && typeof base[key] === 'object') { + // eslint-disable-next-line no-param-reassign + result[key] = diffObject(value, base[key]); + } + else { + // eslint-disable-next-line no-param-reassign + result[key] = value; + } + }); +} +/** + * Provide insights into why a setState() call occurred by diffing the before and after values. + */ +function logSetStateCall(mapping, previousValue, newValue, caller, keyThatChanged) { + if (!debugSetState) { + return; + } + const logParams = {}; + if (keyThatChanged) { + logParams.keyThatChanged = keyThatChanged; + } + if (newValue && previousValue && typeof newValue === 'object' && typeof previousValue === 'object') { + logParams.difference = diffObject(previousValue, newValue); + } + else { + logParams.previousValue = previousValue; + logParams.newValue = newValue; + } + console.debug(`[Onyx-Debug] ${'displayName' in mapping && mapping.displayName} setState() called. Subscribed to key '${mapping.key}' (${caller})`, logParams); +} +exports.logSetStateCall = logSetStateCall; diff --git a/dist/Str.d.ts b/dist/Str.d.ts new file mode 100644 index 00000000..360373d9 --- /dev/null +++ b/dist/Str.d.ts @@ -0,0 +1,17 @@ +/** + * Returns true if the haystack begins with the needle + * + * @param haystack The full string to be searched + * @param needle The case-sensitive string to search for + * @return Returns true if the haystack starts with the needle. + */ +declare function startsWith(haystack: string, needle: string): boolean; +/** + * Checks if parameter is a string or function. + * If it is a string, then we will just return it. + * If it is a function, then we will call it with + * any additional arguments and return the result. + */ +declare function result(parameter: string): string; +declare function result unknown, TArgs extends unknown[]>(parameter: TFunction, ...args: TArgs): ReturnType; +export { startsWith, result }; diff --git a/dist/Str.js b/dist/Str.js new file mode 100644 index 00000000..7d528661 --- /dev/null +++ b/dist/Str.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.result = exports.startsWith = void 0; +/** + * Returns true if the haystack begins with the needle + * + * @param haystack The full string to be searched + * @param needle The case-sensitive string to search for + * @return Returns true if the haystack starts with the needle. + */ +function startsWith(haystack, needle) { + return typeof haystack === 'string' && typeof needle === 'string' && haystack.startsWith(needle); +} +exports.startsWith = startsWith; +function result(parameter, ...args) { + return typeof parameter === 'function' ? parameter(...args) : parameter; +} +exports.result = result; diff --git a/dist/batch.d.ts b/dist/batch.d.ts new file mode 100644 index 00000000..e62ad1ee --- /dev/null +++ b/dist/batch.d.ts @@ -0,0 +1,2 @@ +import { unstable_batchedUpdates } from 'react-dom'; +export default unstable_batchedUpdates; diff --git a/dist/batch.js b/dist/batch.js new file mode 100644 index 00000000..eebf4df8 --- /dev/null +++ b/dist/batch.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const react_dom_1 = require("react-dom"); +exports.default = react_dom_1.unstable_batchedUpdates; diff --git a/dist/batch.native.d.ts b/dist/batch.native.d.ts new file mode 100644 index 00000000..cb6d4057 --- /dev/null +++ b/dist/batch.native.d.ts @@ -0,0 +1,2 @@ +import { unstable_batchedUpdates } from 'react-native'; +export default unstable_batchedUpdates; diff --git a/dist/batch.native.js b/dist/batch.native.js new file mode 100644 index 00000000..000abdbb --- /dev/null +++ b/dist/batch.native.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const react_native_1 = require("react-native"); +exports.default = react_native_1.unstable_batchedUpdates; diff --git a/dist/createDeferredTask.d.ts b/dist/createDeferredTask.d.ts new file mode 100644 index 00000000..31608efb --- /dev/null +++ b/dist/createDeferredTask.d.ts @@ -0,0 +1,11 @@ +type DeferredTask = { + promise: Promise; + resolve?: () => void; +}; +/** + * Create a deferred task that can be resolved when we call `resolve()` + * The returned promise will complete when we call `resolve` + * Useful when we want to wait for a tasks that is resolved from an external action + */ +export default function createDeferredTask(): DeferredTask; +export {}; diff --git a/dist/createDeferredTask.js b/dist/createDeferredTask.js new file mode 100644 index 00000000..d1d8dd21 --- /dev/null +++ b/dist/createDeferredTask.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Create a deferred task that can be resolved when we call `resolve()` + * The returned promise will complete when we call `resolve` + * Useful when we want to wait for a tasks that is resolved from an external action + */ +function createDeferredTask() { + const deferred = {}; + deferred.promise = new Promise((res) => { + deferred.resolve = res; + }); + return deferred; +} +exports.default = createDeferredTask; diff --git a/dist/index.d.ts b/dist/index.d.ts new file mode 100644 index 00000000..87d6c42b --- /dev/null +++ b/dist/index.d.ts @@ -0,0 +1,10 @@ +import type { ConnectOptions, OnyxUpdate } from './Onyx'; +import Onyx from './Onyx'; +import type { CustomTypeOptions, KeyValueMapping, NullishDeep, OnyxCollection, OnyxEntry, OnyxKey, OnyxValue, Selector, OnyxInputValue, OnyxCollectionInputValue, OnyxInput, OnyxSetInput, OnyxMultiSetInput, OnyxMergeInput, OnyxMergeCollectionInput } from './types'; +import type { FetchStatus, ResultMetadata, UseOnyxResult } from './useOnyx'; +import useOnyx from './useOnyx'; +import withOnyx from './withOnyx'; +import type { WithOnyxState } from './withOnyx/types'; +export default Onyx; +export { useOnyx, withOnyx }; +export type { ConnectOptions, CustomTypeOptions, FetchStatus, KeyValueMapping, NullishDeep, OnyxCollection, OnyxEntry, OnyxKey, OnyxInputValue, OnyxCollectionInputValue, OnyxInput, OnyxSetInput, OnyxMultiSetInput, OnyxMergeInput, OnyxMergeCollectionInput, OnyxUpdate, OnyxValue, ResultMetadata, Selector, UseOnyxResult, WithOnyxState, }; diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 00000000..c1e2ba3a --- /dev/null +++ b/dist/index.js @@ -0,0 +1,12 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.withOnyx = exports.useOnyx = void 0; +const Onyx_1 = __importDefault(require("./Onyx")); +const useOnyx_1 = __importDefault(require("./useOnyx")); +exports.useOnyx = useOnyx_1.default; +const withOnyx_1 = __importDefault(require("./withOnyx")); +exports.withOnyx = withOnyx_1.default; +exports.default = Onyx_1.default; diff --git a/dist/logMessages.d.ts b/dist/logMessages.d.ts new file mode 100644 index 00000000..93ebeb77 --- /dev/null +++ b/dist/logMessages.d.ts @@ -0,0 +1,4 @@ +declare const logMessages: { + incompatibleUpdateAlert: (key: string, operation: string, existingValueType?: string, newValueType?: string) => string; +}; +export default logMessages; diff --git a/dist/logMessages.js b/dist/logMessages.js new file mode 100644 index 00000000..76f48136 --- /dev/null +++ b/dist/logMessages.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const logMessages = { + incompatibleUpdateAlert: (key, operation, existingValueType, newValueType) => { + return `Warning: Trying to apply "${operation}" with ${newValueType !== null && newValueType !== void 0 ? newValueType : 'unknown'} type to ${existingValueType !== null && existingValueType !== void 0 ? existingValueType : 'unknown'} type in the key "${key}"`; + }, +}; +exports.default = logMessages; diff --git a/dist/storage/InstanceSync/index.d.ts b/dist/storage/InstanceSync/index.d.ts new file mode 100644 index 00000000..9f9718dc --- /dev/null +++ b/dist/storage/InstanceSync/index.d.ts @@ -0,0 +1,16 @@ +/** + * This is used to keep multiple browser tabs in sync, therefore only needed on web + * On native platforms, we omit this syncing logic by setting this to mock implementation. + */ +declare const InstanceSync: { + shouldBeUsed: boolean; + init: (...args: any[]) => void; + setItem: (...args: any[]) => void; + removeItem: (...args: any[]) => void; + removeItems: (...args: any[]) => void; + multiMerge: (...args: any[]) => void; + multiSet: (...args: any[]) => void; + mergeItem: (...args: any[]) => void; + clear: void>(callback: T) => Promise; +}; +export default InstanceSync; diff --git a/dist/storage/InstanceSync/index.js b/dist/storage/InstanceSync/index.js new file mode 100644 index 00000000..1a3a6cbf --- /dev/null +++ b/dist/storage/InstanceSync/index.js @@ -0,0 +1,22 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const noop_1 = __importDefault(require("lodash/noop")); +/** + * This is used to keep multiple browser tabs in sync, therefore only needed on web + * On native platforms, we omit this syncing logic by setting this to mock implementation. + */ +const InstanceSync = { + shouldBeUsed: false, + init: noop_1.default, + setItem: noop_1.default, + removeItem: noop_1.default, + removeItems: noop_1.default, + multiMerge: noop_1.default, + multiSet: noop_1.default, + mergeItem: noop_1.default, + clear: (callback) => Promise.resolve(callback()), +}; +exports.default = InstanceSync; diff --git a/dist/storage/InstanceSync/index.web.d.ts b/dist/storage/InstanceSync/index.web.d.ts new file mode 100644 index 00000000..579ddee8 --- /dev/null +++ b/dist/storage/InstanceSync/index.web.d.ts @@ -0,0 +1,29 @@ +/** + * The InstancesSync object provides data-changed events like the ones that exist + * when using LocalStorage APIs in the browser. These events are great because multiple tabs can listen for when + * data changes and then stay up-to-date with everything happening in Onyx. + */ +import type { OnyxKey } from '../../types'; +import type { KeyList, OnStorageKeyChanged } from '../providers/types'; +import type StorageProvider from '../providers/types'; +/** + * Raise an event through `localStorage` to let other tabs know a value changed + * @param {String} onyxKey + */ +declare function raiseStorageSyncEvent(onyxKey: OnyxKey): void; +declare function raiseStorageSyncManyKeysEvent(onyxKeys: KeyList): void; +declare const InstanceSync: { + shouldBeUsed: boolean; + /** + * @param {Function} onStorageKeyChanged Storage synchronization mechanism keeping all opened tabs in sync + */ + init: (onStorageKeyChanged: OnStorageKeyChanged, store: StorageProvider) => void; + setItem: typeof raiseStorageSyncEvent; + removeItem: typeof raiseStorageSyncEvent; + removeItems: typeof raiseStorageSyncManyKeysEvent; + multiMerge: typeof raiseStorageSyncManyKeysEvent; + multiSet: typeof raiseStorageSyncManyKeysEvent; + mergeItem: typeof raiseStorageSyncEvent; + clear: (clearImplementation: () => void) => Promise; +}; +export default InstanceSync; diff --git a/dist/storage/InstanceSync/index.web.js b/dist/storage/InstanceSync/index.web.js new file mode 100644 index 00000000..b45a9e67 --- /dev/null +++ b/dist/storage/InstanceSync/index.web.js @@ -0,0 +1,61 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const NoopProvider_1 = __importDefault(require("../providers/NoopProvider")); +const SYNC_ONYX = 'SYNC_ONYX'; +/** + * Raise an event through `localStorage` to let other tabs know a value changed + * @param {String} onyxKey + */ +function raiseStorageSyncEvent(onyxKey) { + global.localStorage.setItem(SYNC_ONYX, onyxKey); + global.localStorage.removeItem(SYNC_ONYX); +} +function raiseStorageSyncManyKeysEvent(onyxKeys) { + onyxKeys.forEach((onyxKey) => { + raiseStorageSyncEvent(onyxKey); + }); +} +let storage = NoopProvider_1.default; +const InstanceSync = { + shouldBeUsed: true, + /** + * @param {Function} onStorageKeyChanged Storage synchronization mechanism keeping all opened tabs in sync + */ + init: (onStorageKeyChanged, store) => { + storage = store; + // This listener will only be triggered by events coming from other tabs + global.addEventListener('storage', (event) => { + // Ignore events that don't originate from the SYNC_ONYX logic + if (event.key !== SYNC_ONYX || !event.newValue) { + return; + } + const onyxKey = event.newValue; + storage.getItem(onyxKey).then((value) => onStorageKeyChanged(onyxKey, value)); + }); + }, + setItem: raiseStorageSyncEvent, + removeItem: raiseStorageSyncEvent, + removeItems: raiseStorageSyncManyKeysEvent, + multiMerge: raiseStorageSyncManyKeysEvent, + multiSet: raiseStorageSyncManyKeysEvent, + mergeItem: raiseStorageSyncEvent, + clear: (clearImplementation) => { + let allKeys; + // The keys must be retrieved before storage is cleared or else the list of keys would be empty + return storage + .getAllKeys() + .then((keys) => { + allKeys = keys; + }) + .then(() => clearImplementation()) + .then(() => { + // Now that storage is cleared, the storage sync event can happen which is a more atomic action + // for other browser tabs + raiseStorageSyncManyKeysEvent(allKeys); + }); + }, +}; +exports.default = InstanceSync; diff --git a/dist/storage/__mocks__/index.d.ts b/dist/storage/__mocks__/index.d.ts new file mode 100644 index 00000000..4342dad2 --- /dev/null +++ b/dist/storage/__mocks__/index.d.ts @@ -0,0 +1,25 @@ +/// +declare const StorageMock: { + init: jest.Mock; + getItem: jest.Mock, [key: any]>; + multiGet: jest.Mock, [keys: import("../providers/types").KeyList]>; + setItem: jest.Mock, [key: any, value: unknown]>; + multiSet: jest.Mock, [pairs: import("../providers/types").KeyValuePairList]>; + mergeItem: jest.Mock, [key: any, deltaChanges: unknown, preMergedValue: unknown, shouldSetValue?: boolean | undefined]>; + multiMerge: jest.Mock, [pairs: import("../providers/types").KeyValuePairList]>; + removeItem: jest.Mock, [key: string]>; + removeItems: jest.Mock, [keys: import("../providers/types").KeyList]>; + clear: jest.Mock, []>; + getAllKeys: jest.Mock, []>; + getDatabaseSize: jest.Mock, []>; + keepInstancesSync: jest.Mock; + mockSet: (key: string, value: unknown) => Promise; + getMockStore: jest.Mock<{ + [x: string]: unknown; + }, []>; + setMockStore: jest.Mock; +}; +export default StorageMock; diff --git a/dist/storage/__mocks__/index.js b/dist/storage/__mocks__/index.js new file mode 100644 index 00000000..be154d02 --- /dev/null +++ b/dist/storage/__mocks__/index.js @@ -0,0 +1,47 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const MemoryOnlyProvider_1 = __importStar(require("../providers/MemoryOnlyProvider")); +const init = jest.fn(MemoryOnlyProvider_1.default.init); +init(); +const StorageMock = { + init, + getItem: jest.fn(MemoryOnlyProvider_1.default.getItem), + multiGet: jest.fn(MemoryOnlyProvider_1.default.multiGet), + setItem: jest.fn(MemoryOnlyProvider_1.default.setItem), + multiSet: jest.fn(MemoryOnlyProvider_1.default.multiSet), + mergeItem: jest.fn(MemoryOnlyProvider_1.default.mergeItem), + multiMerge: jest.fn(MemoryOnlyProvider_1.default.multiMerge), + removeItem: jest.fn(MemoryOnlyProvider_1.default.removeItem), + removeItems: jest.fn(MemoryOnlyProvider_1.default.removeItems), + clear: jest.fn(MemoryOnlyProvider_1.default.clear), + getAllKeys: jest.fn(MemoryOnlyProvider_1.default.getAllKeys), + getDatabaseSize: jest.fn(MemoryOnlyProvider_1.default.getDatabaseSize), + keepInstancesSync: jest.fn(), + mockSet: MemoryOnlyProvider_1.mockSet, + getMockStore: jest.fn(() => MemoryOnlyProvider_1.mockStore), + setMockStore: jest.fn((data) => (0, MemoryOnlyProvider_1.setMockStore)(data)), +}; +exports.default = StorageMock; diff --git a/dist/storage/index.d.ts b/dist/storage/index.d.ts new file mode 100644 index 00000000..a01da701 --- /dev/null +++ b/dist/storage/index.d.ts @@ -0,0 +1,6 @@ +import type StorageProvider from './providers/types'; +type Storage = { + getStorageProvider: () => StorageProvider; +} & Omit; +declare const Storage: Storage; +export default Storage; diff --git a/dist/storage/index.js b/dist/storage/index.js new file mode 100644 index 00000000..53b5f348 --- /dev/null +++ b/dist/storage/index.js @@ -0,0 +1,187 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const Logger = __importStar(require("../Logger")); +const platforms_1 = __importDefault(require("./platforms")); +const InstanceSync_1 = __importDefault(require("./InstanceSync")); +const MemoryOnlyProvider_1 = __importDefault(require("./providers/MemoryOnlyProvider")); +let provider = platforms_1.default; +let shouldKeepInstancesSync = false; +let finishInitalization; +const initPromise = new Promise((resolve) => { + finishInitalization = resolve; +}); +/** + * Degrade performance by removing the storage provider and only using cache + */ +function degradePerformance(error) { + Logger.logHmmm(`Error while using ${provider.name}. Falling back to only using cache and dropping storage.\n Error: ${error.message}\n Stack: ${error.stack}\n Cause: ${error.cause}`); + console.error(error); + provider = MemoryOnlyProvider_1.default; +} +/** + * Runs a piece of code and degrades performance if certain errors are thrown + */ +function tryOrDegradePerformance(fn, waitForInitialization = true) { + return new Promise((resolve, reject) => { + const promise = waitForInitialization ? initPromise : Promise.resolve(); + promise.then(() => { + try { + resolve(fn()); + } + catch (error) { + // Test for known critical errors that the storage provider throws, e.g. when storage is full + if (error instanceof Error) { + // IndexedDB error when storage is full (https://github.com/Expensify/App/issues/29403) + if (error.message.includes('Internal error opening backing store for indexedDB.open')) { + degradePerformance(error); + } + // catch the error if DB connection can not be established/DB can not be created + if (error.message.includes('IDBKeyVal store could not be created')) { + degradePerformance(error); + } + } + reject(error); + } + }); + }); +} +const Storage = { + /** + * Returns the storage provider currently in use + */ + getStorageProvider() { + return provider; + }, + /** + * Initializes all providers in the list of storage providers + * and enables fallback providers if necessary + */ + init() { + tryOrDegradePerformance(provider.init, false).finally(() => { + finishInitalization(); + }); + }, + /** + * Get the value of a given key or return `null` if it's not available + */ + getItem: (key) => tryOrDegradePerformance(() => provider.getItem(key)), + /** + * Get multiple key-value pairs for the give array of keys in a batch + */ + multiGet: (keys) => tryOrDegradePerformance(() => provider.multiGet(keys)), + /** + * Sets the value for a given key. The only requirement is that the value should be serializable to JSON string + */ + setItem: (key, value) => tryOrDegradePerformance(() => { + const promise = provider.setItem(key, value); + if (shouldKeepInstancesSync) { + return promise.then(() => InstanceSync_1.default.setItem(key)); + } + return promise; + }), + /** + * Stores multiple key-value pairs in a batch + */ + multiSet: (pairs) => tryOrDegradePerformance(() => { + const promise = provider.multiSet(pairs); + if (shouldKeepInstancesSync) { + return promise.then(() => InstanceSync_1.default.multiSet(pairs.map((pair) => pair[0]))); + } + return promise; + }), + /** + * Merging an existing value with a new one + */ + mergeItem: (key, deltaChanges, preMergedValue, shouldSetValue = false) => tryOrDegradePerformance(() => { + const promise = provider.mergeItem(key, deltaChanges, preMergedValue, shouldSetValue); + if (shouldKeepInstancesSync) { + return promise.then(() => InstanceSync_1.default.mergeItem(key)); + } + return promise; + }), + /** + * Multiple merging of existing and new values in a batch + * This function also removes all nested null values from an object. + */ + multiMerge: (pairs) => tryOrDegradePerformance(() => { + const promise = provider.multiMerge(pairs); + if (shouldKeepInstancesSync) { + return promise.then(() => InstanceSync_1.default.multiMerge(pairs.map((pair) => pair[0]))); + } + return promise; + }), + /** + * Removes given key and its value + */ + removeItem: (key) => tryOrDegradePerformance(() => { + const promise = provider.removeItem(key); + if (shouldKeepInstancesSync) { + return promise.then(() => InstanceSync_1.default.removeItem(key)); + } + return promise; + }), + /** + * Remove given keys and their values + */ + removeItems: (keys) => tryOrDegradePerformance(() => { + const promise = provider.removeItems(keys); + if (shouldKeepInstancesSync) { + return promise.then(() => InstanceSync_1.default.removeItems(keys)); + } + return promise; + }), + /** + * Clears everything + */ + clear: () => tryOrDegradePerformance(() => { + if (shouldKeepInstancesSync) { + return InstanceSync_1.default.clear(() => provider.clear()); + } + return provider.clear(); + }), + /** + * Returns all available keys + */ + getAllKeys: () => tryOrDegradePerformance(() => provider.getAllKeys()), + /** + * Gets the total bytes of the store + */ + getDatabaseSize: () => tryOrDegradePerformance(() => provider.getDatabaseSize()), + /** + * @param onStorageKeyChanged - Storage synchronization mechanism keeping all opened tabs in sync (web only) + */ + keepInstancesSync(onStorageKeyChanged) { + // If InstanceSync shouldn't be used, it means we're on a native platform and we don't need to keep instances in sync + if (!InstanceSync_1.default.shouldBeUsed) + return; + shouldKeepInstancesSync = true; + InstanceSync_1.default.init(onStorageKeyChanged, this); + }, +}; +exports.default = Storage; diff --git a/dist/storage/platforms/index.d.ts b/dist/storage/platforms/index.d.ts new file mode 100644 index 00000000..2a1ba4c8 --- /dev/null +++ b/dist/storage/platforms/index.d.ts @@ -0,0 +1,2 @@ +import WebStorage from '../providers/IDBKeyValProvider'; +export default WebStorage; diff --git a/dist/storage/platforms/index.js b/dist/storage/platforms/index.js new file mode 100644 index 00000000..5f096f04 --- /dev/null +++ b/dist/storage/platforms/index.js @@ -0,0 +1,7 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const IDBKeyValProvider_1 = __importDefault(require("../providers/IDBKeyValProvider")); +exports.default = IDBKeyValProvider_1.default; diff --git a/dist/storage/platforms/index.native.d.ts b/dist/storage/platforms/index.native.d.ts new file mode 100644 index 00000000..e07606ec --- /dev/null +++ b/dist/storage/platforms/index.native.d.ts @@ -0,0 +1,2 @@ +import NativeStorage from '../providers/SQLiteProvider'; +export default NativeStorage; diff --git a/dist/storage/platforms/index.native.js b/dist/storage/platforms/index.native.js new file mode 100644 index 00000000..197174ae --- /dev/null +++ b/dist/storage/platforms/index.native.js @@ -0,0 +1,7 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const SQLiteProvider_1 = __importDefault(require("../providers/SQLiteProvider")); +exports.default = SQLiteProvider_1.default; diff --git a/dist/storage/providers/IDBKeyValProvider.d.ts b/dist/storage/providers/IDBKeyValProvider.d.ts new file mode 100644 index 00000000..e1e83444 --- /dev/null +++ b/dist/storage/providers/IDBKeyValProvider.d.ts @@ -0,0 +1,3 @@ +import type StorageProvider from './types'; +declare const provider: StorageProvider; +export default provider; diff --git a/dist/storage/providers/IDBKeyValProvider.js b/dist/storage/providers/IDBKeyValProvider.js new file mode 100644 index 00000000..4811463e --- /dev/null +++ b/dist/storage/providers/IDBKeyValProvider.js @@ -0,0 +1,91 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const idb_keyval_1 = require("idb-keyval"); +const utils_1 = __importDefault(require("../../utils")); +// We don't want to initialize the store while the JS bundle loads as idb-keyval will try to use global.indexedDB +// which might not be available in certain environments that load the bundle (e.g. electron main process). +let idbKeyValStore; +const provider = { + /** + * The name of the provider that can be printed to the logs + */ + name: 'IDBKeyValProvider', + /** + * Initializes the storage provider + */ + init() { + const newIdbKeyValStore = (0, idb_keyval_1.createStore)('OnyxDB', 'keyvaluepairs'); + if (newIdbKeyValStore == null) + throw Error('IDBKeyVal store could not be created'); + idbKeyValStore = newIdbKeyValStore; + }, + setItem: (key, value) => { + if (value === null) { + provider.removeItem(key); + } + return (0, idb_keyval_1.set)(key, value, idbKeyValStore); + }, + multiGet: (keysParam) => (0, idb_keyval_1.getMany)(keysParam, idbKeyValStore).then((values) => values.map((value, index) => [keysParam[index], value])), + multiMerge: (pairs) => idbKeyValStore('readwrite', (store) => { + // Note: we are using the manual store transaction here, to fit the read and update + // of the items in one transaction to achieve best performance. + const getValues = Promise.all(pairs.map(([key]) => (0, idb_keyval_1.promisifyRequest)(store.get(key)))); + return getValues.then((values) => { + const pairsWithoutNull = pairs.filter(([key, value]) => { + if (value === null) { + provider.removeItem(key); + return false; + } + return true; + }); + const upsertMany = pairsWithoutNull.map(([key, value], index) => { + const prev = values[index]; + const newValue = utils_1.default.fastMerge(prev, value); + return (0, idb_keyval_1.promisifyRequest)(store.put(newValue, key)); + }); + return Promise.all(upsertMany); + }); + }), + mergeItem(key, _deltaChanges, preMergedValue) { + // Since Onyx also merged the existing value with the changes, we can just set the value directly + return provider.setItem(key, preMergedValue); + }, + multiSet: (pairs) => { + const pairsWithoutNull = pairs.filter(([key, value]) => { + if (value === null) { + provider.removeItem(key); + return false; + } + return true; + }); + return (0, idb_keyval_1.setMany)(pairsWithoutNull, idbKeyValStore); + }, + clear: () => (0, idb_keyval_1.clear)(idbKeyValStore), + getAllKeys: () => (0, idb_keyval_1.keys)(idbKeyValStore), + getItem: (key) => (0, idb_keyval_1.get)(key, idbKeyValStore) + // idb-keyval returns undefined for missing items, but this needs to return null so that idb-keyval does the same thing as SQLiteStorage. + .then((val) => (val === undefined ? null : val)), + removeItem: (key) => (0, idb_keyval_1.del)(key, idbKeyValStore), + removeItems: (keysParam) => (0, idb_keyval_1.delMany)(keysParam, idbKeyValStore), + getDatabaseSize() { + if (!window.navigator || !window.navigator.storage) { + throw new Error('StorageManager browser API unavailable'); + } + return window.navigator.storage + .estimate() + .then((value) => { + var _a, _b, _c; + return ({ + bytesUsed: (_a = value.usage) !== null && _a !== void 0 ? _a : 0, + bytesRemaining: ((_b = value.quota) !== null && _b !== void 0 ? _b : 0) - ((_c = value.usage) !== null && _c !== void 0 ? _c : 0), + }); + }) + .catch((error) => { + throw new Error(`Unable to estimate web storage quota. Original error: ${error}`); + }); + }, +}; +exports.default = provider; diff --git a/dist/storage/providers/MemoryOnlyProvider.d.ts b/dist/storage/providers/MemoryOnlyProvider.d.ts new file mode 100644 index 00000000..cd52951d --- /dev/null +++ b/dist/storage/providers/MemoryOnlyProvider.d.ts @@ -0,0 +1,9 @@ +import type StorageProvider from './types'; +import type { OnyxKey, OnyxValue } from '../../types'; +type Store = Record>; +declare let store: Store; +declare const set: (key: OnyxKey, value: OnyxValue) => Promise; +declare const provider: StorageProvider; +declare const setMockStore: (data: Store) => void; +export default provider; +export { store as mockStore, set as mockSet, setMockStore }; diff --git a/dist/storage/providers/MemoryOnlyProvider.js b/dist/storage/providers/MemoryOnlyProvider.js new file mode 100644 index 00000000..a2a55b90 --- /dev/null +++ b/dist/storage/providers/MemoryOnlyProvider.js @@ -0,0 +1,120 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.setMockStore = exports.mockSet = exports.mockStore = void 0; +const underscore_1 = __importDefault(require("underscore")); +const utils_1 = __importDefault(require("../../utils")); +// eslint-disable-next-line import/no-mutable-exports +let store = {}; +exports.mockStore = store; +const setInternal = (key, value) => { + store[key] = value; + return Promise.resolve(value); +}; +const isJestRunning = typeof jest !== 'undefined'; +const set = isJestRunning ? jest.fn(setInternal) : setInternal; +exports.mockSet = set; +const provider = { + /** + * The name of the provider that can be printed to the logs + */ + name: 'MemoryOnlyProvider', + /** + * Initializes the storage provider + */ + init() { + // do nothing + }, + /** + * Get the value of a given key or return `null` if it's not available in memory + */ + getItem(key) { + const value = store[key]; + return Promise.resolve(value === undefined ? null : value); + }, + /** + * Get multiple key-value pairs for the give array of keys in a batch. + */ + multiGet(keys) { + const getPromises = underscore_1.default.map(keys, (key) => new Promise((resolve) => { + this.getItem(key).then((value) => resolve([key, value])); + })); + return Promise.all(getPromises); + }, + /** + * Sets the value for a given key. The only requirement is that the value should be serializable to JSON string + */ + setItem(key, value) { + set(key, value); + return Promise.resolve(); + }, + /** + * Stores multiple key-value pairs in a batch + */ + multiSet(pairs) { + const setPromises = underscore_1.default.map(pairs, ([key, value]) => this.setItem(key, value)); + return Promise.all(setPromises).then(() => undefined); + }, + /** + * Merging an existing value with a new one + */ + mergeItem(key, _deltaChanges, preMergedValue) { + // Since Onyx already merged the existing value with the changes, we can just set the value directly + return this.setItem(key, preMergedValue); + }, + /** + * Multiple merging of existing and new values in a batch + * This function also removes all nested null values from an object. + */ + multiMerge(pairs) { + underscore_1.default.forEach(pairs, ([key, value]) => { + const existingValue = store[key]; + const newValue = utils_1.default.fastMerge(existingValue, value); + set(key, newValue); + }); + return Promise.resolve([]); + }, + /** + * Remove given key and it's value from memory + */ + removeItem(key) { + delete store[key]; + return Promise.resolve(); + }, + /** + * Remove given keys and their values from memory + */ + removeItems(keys) { + underscore_1.default.each(keys, (key) => { + delete store[key]; + }); + return Promise.resolve(); + }, + /** + * Clear everything from memory + */ + clear() { + exports.mockStore = store = {}; + return Promise.resolve(); + }, + /** + * Returns all keys available in memory + */ + getAllKeys() { + return Promise.resolve(underscore_1.default.keys(store)); + }, + /** + * Gets the total bytes of the store. + * `bytesRemaining` will always be `Number.POSITIVE_INFINITY` since we don't have a hard limit on memory. + */ + getDatabaseSize() { + return Promise.resolve({ bytesRemaining: Number.POSITIVE_INFINITY, bytesUsed: 0 }); + }, +}; +const setMockStore = (data) => { + exports.mockStore = store = data; +}; +exports.setMockStore = setMockStore; +exports.default = provider; diff --git a/dist/storage/providers/NoopProvider.d.ts b/dist/storage/providers/NoopProvider.d.ts new file mode 100644 index 00000000..e1e83444 --- /dev/null +++ b/dist/storage/providers/NoopProvider.d.ts @@ -0,0 +1,3 @@ +import type StorageProvider from './types'; +declare const provider: StorageProvider; +export default provider; diff --git a/dist/storage/providers/NoopProvider.js b/dist/storage/providers/NoopProvider.js new file mode 100644 index 00000000..1fff8d0e --- /dev/null +++ b/dist/storage/providers/NoopProvider.js @@ -0,0 +1,83 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const provider = { + /** + * The name of the provider that can be printed to the logs + */ + name: 'NoopProvider', + /** + * Initializes the storage provider + */ + init() { + // do nothing + }, + /** + * Get the value of a given key or return `null` if it's not available in memory + */ + getItem(key) { + return Promise.resolve(null); + }, + /** + * Get multiple key-value pairs for the give array of keys in a batch. + */ + multiGet() { + return Promise.resolve([]); + }, + /** + * Sets the value for a given key. The only requirement is that the value should be serializable to JSON string + */ + setItem() { + return Promise.resolve(); + }, + /** + * Stores multiple key-value pairs in a batch + */ + multiSet() { + return Promise.resolve(); + }, + /** + * Merging an existing value with a new one + */ + mergeItem() { + return Promise.resolve(); + }, + /** + * Multiple merging of existing and new values in a batch + * This function also removes all nested null values from an object. + */ + multiMerge() { + return Promise.resolve([]); + }, + /** + * Remove given key and it's value from memory + */ + removeItem() { + return Promise.resolve(); + }, + /** + * Remove given keys and their values from memory + */ + removeItems() { + return Promise.resolve(); + }, + /** + * Clear everything from memory + */ + clear() { + return Promise.resolve(); + }, + /** + * Returns all keys available in memory + */ + getAllKeys() { + return Promise.resolve([]); + }, + /** + * Gets the total bytes of the store. + * `bytesRemaining` will always be `Number.POSITIVE_INFINITY` since we don't have a hard limit on memory. + */ + getDatabaseSize() { + return Promise.resolve({ bytesRemaining: Number.POSITIVE_INFINITY, bytesUsed: 0 }); + }, +}; +exports.default = provider; diff --git a/dist/storage/providers/SQLiteProvider.d.ts b/dist/storage/providers/SQLiteProvider.d.ts new file mode 100644 index 00000000..e1e83444 --- /dev/null +++ b/dist/storage/providers/SQLiteProvider.d.ts @@ -0,0 +1,3 @@ +import type StorageProvider from './types'; +declare const provider: StorageProvider; +export default provider; diff --git a/dist/storage/providers/SQLiteProvider.js b/dist/storage/providers/SQLiteProvider.js new file mode 100644 index 00000000..de9abc67 --- /dev/null +++ b/dist/storage/providers/SQLiteProvider.js @@ -0,0 +1,101 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const react_native_quick_sqlite_1 = require("react-native-quick-sqlite"); +const react_native_device_info_1 = require("react-native-device-info"); +const utils_1 = __importDefault(require("../../utils")); +const DB_NAME = 'OnyxDB'; +let db; +const provider = { + /** + * The name of the provider that can be printed to the logs + */ + name: 'SQLiteProvider', + /** + * Initializes the storage provider + */ + init() { + db = (0, react_native_quick_sqlite_1.open)({ name: DB_NAME }); + db.execute('CREATE TABLE IF NOT EXISTS keyvaluepairs (record_key TEXT NOT NULL PRIMARY KEY , valueJSON JSON NOT NULL) WITHOUT ROWID;'); + // All of the 3 pragmas below were suggested by SQLite team. + // You can find more info about them here: https://www.sqlite.org/pragma.html + db.execute('PRAGMA CACHE_SIZE=-20000;'); + db.execute('PRAGMA synchronous=NORMAL;'); + db.execute('PRAGMA journal_mode=WAL;'); + }, + getItem(key) { + return db.executeAsync('SELECT record_key, valueJSON FROM keyvaluepairs WHERE record_key = ?;', [key]).then(({ rows }) => { + if (!rows || (rows === null || rows === void 0 ? void 0 : rows.length) === 0) { + return null; + } + const result = rows === null || rows === void 0 ? void 0 : rows.item(0); + return JSON.parse(result.valueJSON); + }); + }, + multiGet(keys) { + const placeholders = keys.map(() => '?').join(','); + const command = `SELECT record_key, valueJSON FROM keyvaluepairs WHERE record_key IN (${placeholders});`; + return db.executeAsync(command, keys).then(({ rows }) => { + // eslint-disable-next-line no-underscore-dangle + const result = rows === null || rows === void 0 ? void 0 : rows._array.map((row) => [row.record_key, JSON.parse(row.valueJSON)]); + return (result !== null && result !== void 0 ? result : []); + }); + }, + setItem(key, value) { + return db.executeAsync('REPLACE INTO keyvaluepairs (record_key, valueJSON) VALUES (?, ?);', [key, JSON.stringify(value)]); + }, + multiSet(pairs) { + const stringifiedPairs = pairs.map((pair) => [pair[0], JSON.stringify(pair[1] === undefined ? null : pair[1])]); + if (utils_1.default.isEmptyObject(stringifiedPairs)) { + return Promise.resolve(); + } + return db.executeBatchAsync([['REPLACE INTO keyvaluepairs (record_key, valueJSON) VALUES (?, json(?));', stringifiedPairs]]); + }, + multiMerge(pairs) { + // Note: We use `ON CONFLICT DO UPDATE` here instead of `INSERT OR REPLACE INTO` + // so the new JSON value is merged into the old one if there's an existing value + const query = `INSERT INTO keyvaluepairs (record_key, valueJSON) + VALUES (:key, JSON(:value)) + ON CONFLICT DO UPDATE + SET valueJSON = JSON_PATCH(valueJSON, JSON(:value)); + `; + const nonNullishPairs = pairs.filter((pair) => pair[1] !== undefined); + const queryArguments = nonNullishPairs.map((pair) => { + const value = JSON.stringify(pair[1]); + return [pair[0], value]; + }); + return db.executeBatchAsync([[query, queryArguments]]); + }, + mergeItem(key, deltaChanges, preMergedValue, shouldSetValue) { + if (shouldSetValue) { + return this.setItem(key, preMergedValue); + } + return this.multiMerge([[key, deltaChanges]]); + }, + getAllKeys: () => db.executeAsync('SELECT record_key FROM keyvaluepairs;').then(({ rows }) => { + // eslint-disable-next-line no-underscore-dangle + const result = rows === null || rows === void 0 ? void 0 : rows._array.map((row) => row.record_key); + return (result !== null && result !== void 0 ? result : []); + }), + removeItem: (key) => db.executeAsync('DELETE FROM keyvaluepairs WHERE record_key = ?;', [key]), + removeItems: (keys) => { + const placeholders = keys.map(() => '?').join(','); + const query = `DELETE FROM keyvaluepairs WHERE record_key IN (${placeholders});`; + return db.executeAsync(query, keys); + }, + clear: () => db.executeAsync('DELETE FROM keyvaluepairs;', []), + getDatabaseSize() { + return Promise.all([db.executeAsync('PRAGMA page_size;'), db.executeAsync('PRAGMA page_count;'), (0, react_native_device_info_1.getFreeDiskStorage)()]).then(([pageSizeResult, pageCountResult, bytesRemaining]) => { + var _a, _b; + const pageSize = (_a = pageSizeResult.rows) === null || _a === void 0 ? void 0 : _a.item(0).page_size; + const pageCount = (_b = pageCountResult.rows) === null || _b === void 0 ? void 0 : _b.item(0).page_count; + return { + bytesUsed: pageSize * pageCount, + bytesRemaining, + }; + }); + }, +}; +exports.default = provider; diff --git a/dist/storage/providers/types.d.ts b/dist/storage/providers/types.d.ts new file mode 100644 index 00000000..9d6adab6 --- /dev/null +++ b/dist/storage/providers/types.d.ts @@ -0,0 +1,72 @@ +import type { BatchQueryResult, QueryResult } from 'react-native-quick-sqlite'; +import type { OnyxKey, OnyxValue } from '../../types'; +type KeyValuePair = [OnyxKey, OnyxValue]; +type KeyList = OnyxKey[]; +type KeyValuePairList = KeyValuePair[]; +type OnStorageKeyChanged = (key: TKey, value: OnyxValue) => void; +type StorageProvider = { + /** + * The name of the provider that can be printed to the logs + */ + name: string; + /** + * Initializes the storage provider + */ + init: () => void; + /** + * Gets the value of a given key or return `null` if it's not available in storage + */ + getItem: (key: TKey) => Promise>; + /** + * Get multiple key-value pairs for the given array of keys in a batch + */ + multiGet: (keys: KeyList) => Promise; + /** + * Sets the value for a given key. The only requirement is that the value should be serializable to JSON string + */ + setItem: (key: TKey, value: OnyxValue) => Promise; + /** + * Stores multiple key-value pairs in a batch + */ + multiSet: (pairs: KeyValuePairList) => Promise; + /** + * Multiple merging of existing and new values in a batch + */ + multiMerge: (pairs: KeyValuePairList) => Promise; + /** + * Merges an existing value with a new one by leveraging JSON_PATCH + * @param deltaChanges - the delta for a specific key + * @param preMergedValue - the pre-merged data from `Onyx.applyMerge` + * @param shouldSetValue - whether the data should be set instead of merged + */ + mergeItem: (key: TKey, deltaChanges: OnyxValue, preMergedValue: OnyxValue, shouldSetValue?: boolean) => Promise; + /** + * Returns all keys available in storage + */ + getAllKeys: () => Promise; + /** + * Removes given key and its value from storage + */ + removeItem: (key: OnyxKey) => Promise; + /** + * Removes given keys and their values from storage + */ + removeItems: (keys: KeyList) => Promise; + /** + * Clears absolutely everything from storage + */ + clear: () => Promise; + /** + * Gets the total bytes of the database file + */ + getDatabaseSize: () => Promise<{ + bytesUsed: number; + bytesRemaining: number; + }>; + /** + * @param onStorageKeyChanged Storage synchronization mechanism keeping all opened tabs in sync + */ + keepInstancesSync?: (onStorageKeyChanged: OnStorageKeyChanged) => void; +}; +export default StorageProvider; +export type { KeyList, KeyValuePair, KeyValuePairList, OnStorageKeyChanged }; diff --git a/dist/storage/providers/types.js b/dist/storage/providers/types.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/dist/storage/providers/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/dist/types.d.ts b/dist/types.d.ts new file mode 100644 index 00000000..627cadb9 --- /dev/null +++ b/dist/types.d.ts @@ -0,0 +1,375 @@ +import type { Merge } from 'type-fest'; +import type { BuiltIns } from 'type-fest/source/internal'; +import type OnyxUtils from './OnyxUtils'; +import type { WithOnyxInstance, WithOnyxState } from './withOnyx/types'; +/** + * Utility type that excludes `null` from the type `TValue`. + */ +type NonNull = TValue extends null ? never : TValue; +/** + * Utility type that excludes `undefined` from the type `TValue`. + */ +type NonUndefined = TValue extends undefined ? never : TValue; +/** + * Represents a deeply nested record. It maps keys to values, + * and those values can either be of type `TValue` or further nested `DeepRecord` instances. + */ +type DeepRecord = { + [key: string]: TValue | DeepRecord; +}; +/** + * Represents type options to configure all Onyx methods. + * It's a combination of predefined options with user-provided options (CustomTypeOptions). + * + * The options are: + * - `keys`: Represents a string union of all Onyx normal keys. + * - `collectionKeys`: Represents a string union of all Onyx collection keys. + * - `values`: Represents a Record where each key is an Onyx key and each value is its corresponding Onyx value type. + * + * The user-defined options (CustomTypeOptions) are merged into these predefined options. + * In case of conflicting properties, the ones from CustomTypeOptions are prioritized. + */ +type TypeOptions = Merge<{ + keys: string; + collectionKeys: string; + values: Record; +}, CustomTypeOptions>; +/** + * Represents the user-defined options to configure all Onyx methods. + * + * The developer can configure Onyx methods by augmenting this library and overriding CustomTypeOptions. + * + * @example + * ```ts + * // ONYXKEYS.ts + * import {ValueOf} from 'type-fest'; + * import { Account, Report } from './types'; + * + * const ONYXKEYS = { + * ACCOUNT: 'account', + * IS_SIDEBAR_LOADED: 'isSidebarLoaded', + * + * // Collection Keys + * COLLECTION: { + * REPORT: 'report_', + * }, + * } as const; + * + * type OnyxKeysMap = typeof ONYXKEYS; + * type OnyxCollectionKey = ValueOf; + * type OnyxKey = DeepValueOf>; + * + * type OnyxValues = { + * [ONYXKEYS.ACCOUNT]: Account; + * [ONYXKEYS.IS_SIDEBAR_LOADED]: boolean; + * [ONYXKEYS.COLLECTION.REPORT]: Report; + * }; + * + * export default ONYXKEYS; + * export type {OnyxKey, OnyxCollectionKey, OnyxValues}; + * + * // global.d.ts + * import {OnyxKey, OnyxCollectionKey, OnyxValues} from './ONYXKEYS'; + * + * declare module 'react-native-onyx' { + * interface CustomTypeOptions { + * keys: OnyxKey; + * collectionKeys: OnyxCollectionKey; + * values: OnyxValues; + * } + * } + * ``` + */ +interface CustomTypeOptions { +} +/** + * Represents a string union of all Onyx normal keys. + */ +type Key = TypeOptions['keys']; +/** + * Represents a string union of all Onyx collection keys. + */ +type CollectionKeyBase = TypeOptions['collectionKeys']; +/** + * Represents a literal string union of all Onyx collection keys. + * It allows appending a string after each collection key e.g. `report_some-id`. + */ +type CollectionKey = `${CollectionKeyBase}${string}`; +/** + * Represents a string union of all Onyx normal and collection keys. + */ +type OnyxKey = Key | CollectionKey; +/** + * Represents a selector function type which operates based on the provided `TKey` and `ReturnType`. + * + * A `Selector` is a function that accepts a value, the withOnyx's internal state and returns a processed value. + * This type accepts two type parameters: `TKey` and `TReturnType`. + * + * The type `TKey` extends `OnyxKey` and it is the key used to access a value in `KeyValueMapping`. + * `TReturnType` is the type of the returned value from the selector function. + */ +type Selector = (value: OnyxEntry, state?: WithOnyxState) => TReturnType; +/** + * Represents a single Onyx entry, that can be either `TOnyxValue` or `undefined` if it doesn't exist. + * + * It can be used to specify data retrieved from Onyx e.g. `withOnyx` HOC mappings. + * + * @example + * ```ts + * import Onyx, {OnyxEntry, withOnyx} from 'react-native-onyx'; + * + * type OnyxProps = { + * userAccount: OnyxEntry; + * }; + * + * type Props = OnyxProps & { + * prop1: string; + * }; + * + * function Component({prop1, userAccount}: Props) { + * // ... + * } + * + * export default withOnyx({ + * userAccount: { + * key: ONYXKEYS.ACCOUNT, + * }, + * })(Component); + * ``` + */ +type OnyxEntry = TOnyxValue | undefined; +/** + * Represents an Onyx collection of entries, that can be either a record of `TOnyxValue`s or `undefined` if it is empty or doesn't exist. + * + * It can be used to specify collection data retrieved from Onyx e.g. `withOnyx` HOC mappings. + * + * @example + * ```ts + * import Onyx, {OnyxCollection, withOnyx} from 'react-native-onyx'; + * + * type OnyxProps = { + * reports: OnyxCollection; + * }; + * + * type Props = OnyxProps & { + * prop1: string; + * }; + * + * function Component({prop1, reports}: Props) { + * // ... + * } + * + * export default withOnyx({ + * reports: { + * key: ONYXKEYS.COLLECTION.REPORT, + * }, + * })(Component); + * ``` + */ +type OnyxCollection = OnyxEntry>; +/** + * Represents a mapping of Onyx keys to values, where keys are either normal or collection Onyx keys + * and values are the corresponding values in Onyx's state. + * + * For collection keys, `KeyValueMapping` allows any string to be appended + * to the key (e.g., 'report_some-id', 'download_some-id'). + * + * The mapping is derived from the `values` property of the `TypeOptions` type. + */ +type KeyValueMapping = { + [TKey in keyof TypeOptions['values'] as TKey extends CollectionKeyBase ? `${TKey}${string}` : TKey]: TypeOptions['values'][TKey]; +}; +/** + * Represents a Onyx value that can be either a single entry or a collection of entries, depending on the `TKey` provided. + */ +type OnyxValue = string extends TKey ? unknown : TKey extends CollectionKeyBase ? OnyxCollection : OnyxEntry; +/** Utility type to extract `TOnyxValue` from `OnyxCollection` */ +type ExtractOnyxCollectionValue = TOnyxCollection extends NonNullable> ? U : never; +type NonTransformableTypes = BuiltIns | ((...args: any[]) => unknown) | Map | Set | ReadonlyMap | ReadonlySet | unknown[] | readonly unknown[]; +/** + * Create a type from another type with all keys and nested keys set to optional or null. + * + * @example + * const settings: Settings = { + * textEditor: { + * fontSize: 14; + * fontColor: '#000000'; + * fontWeight: 400; + * } + * autosave: true; + * }; + * + * const applySavedSettings = (savedSettings: NullishDeep) => { + * return {...settings, ...savedSettings}; + * } + * + * settings = applySavedSettings({textEditor: {fontWeight: 500, fontColor: null}}); + */ +type NullishDeep = T extends NonTransformableTypes ? T : T extends object ? NullishObjectDeep : unknown; +/** + * Same as `NullishDeep`, but accepts only `object`s as inputs. Internal helper for `NullishDeep`. + */ +type NullishObjectDeep = { + [KeyType in keyof ObjectType]?: NullishDeep | null; +}; +/** + * Represents a mapping between Onyx collection keys and their respective values. + * + * It helps to enforce that a Onyx collection key should not be without suffix (e.g. should always be of the form `${TKey}${string}`), + * and to map each Onyx collection key with suffix to a value of type `TValue`. + * + * Also, the `TMap` type is inferred automatically in `mergeCollection()` method and represents + * the object of collection keys/values specified in the second parameter of the method. + */ +type Collection = { + [MapK in keyof TMap]: MapK extends `${TKey}${string}` ? MapK extends `${TKey}` ? never : TValue : never; +}; +/** Represents the base options used in `Onyx.connect()` method. */ +type BaseConnectOptions = { + initWithStoredValues?: boolean; +}; +/** Represents additional options used inside withOnyx HOC */ +type WithOnyxConnectOptions = { + withOnyxInstance: WithOnyxInstance; + statePropertyName: string; + displayName: string; + initWithStoredValues?: boolean; + selector?: Selector; + canEvict?: boolean; +}; +type DefaultConnectCallback = (value: OnyxEntry, key: TKey) => void; +type CollectionConnectCallback = (value: NonUndefined>) => void; +/** Represents the callback function used in `Onyx.connect()` method with a regular key. */ +type DefaultConnectOptions = { + key: TKey; + callback?: DefaultConnectCallback; + waitForCollectionCallback?: false; +}; +/** Represents the callback function used in `Onyx.connect()` method with a collection key. */ +type CollectionConnectOptions = { + key: TKey extends CollectionKeyBase ? TKey : never; + callback?: CollectionConnectCallback; + waitForCollectionCallback: true; +}; +/** + * Represents the options used in `Onyx.connect()` method. + * The type is built from `DefaultConnectOptions`/`CollectionConnectOptions` depending on the `waitForCollectionCallback` property. + * It includes two different forms, depending on whether we are waiting for a collection callback or not. + * + * If `waitForCollectionCallback` is `true`, it expects `key` to be a Onyx collection key and `callback` will be triggered with the whole collection + * and will pass `value` as an `OnyxCollection`. + * + * If `waitForCollectionCallback` is `false` or not specified, the `key` can be any Onyx key and `callback` will be triggered with updates of each collection item + * and will pass `value` as an `OnyxEntry`. + * + * The type is also extended with `BaseConnectOptions` and `WithOnyxConnectOptions` to include additional options, depending on the context where it's used. + */ +type ConnectOptions = (CollectionConnectOptions | DefaultConnectOptions) & (BaseConnectOptions | WithOnyxConnectOptions); +type Mapping = ConnectOptions & { + connectionID: number; +}; +/** + * Represents a single Onyx input value, that can be either `TOnyxValue` or `null` if the key should be deleted. + * This type is used for data passed to Onyx e.g. in `Onyx.merge` and `Onyx.set`. + */ +type OnyxInputValue = TOnyxValue | null; +/** + * Represents an Onyx collection input, that can be either a record of `TOnyxValue`s or `null` if the key should be deleted. + */ +type OnyxCollectionInputValue = OnyxInputValue>; +/** + * Represents an input value that can be passed to Onyx methods, that can be either `TOnyxValue` or `null`. + * Setting a key to `null` will remove the key from the store. + * `undefined` is not allowed for setting values, because it will have no effect on the data. + */ +type OnyxInput = OnyxInputValue>; +/** + * Represents a mapping object where each `OnyxKey` maps to either a value of its corresponding type in `KeyValueMapping` or `null`. + * + * It's very similar to `KeyValueMapping` but this type is used for inputs to Onyx + * (set, merge, mergeCollection) and therefore accepts using `null` to remove a key from Onyx. + */ +type OnyxInputKeyValueMapping = { + [TKey in OnyxKey]: OnyxInput; +}; +/** + * This represents the value that can be passed to `Onyx.set` and to `Onyx.update` with the method "SET" + */ +type OnyxSetInput = OnyxInput; +/** + * This represents the value that can be passed to `Onyx.multiSet` and to `Onyx.update` with the method "MULTI_SET" + */ +type OnyxMultiSetInput = Partial; +/** + * This represents the value that can be passed to `Onyx.merge` and to `Onyx.update` with the method "MERGE" + */ +type OnyxMergeInput = OnyxInput; +/** + * This represents the value that can be passed to `Onyx.merge` and to `Onyx.update` with the method "MERGE" + */ +type OnyxMergeCollectionInput = Collection>, TMap>; +/** + * Represents different kinds of updates that can be passed to `Onyx.update()` method. It is a discriminated union of + * different update methods (`SET`, `MERGE`, `MERGE_COLLECTION`), each with their own key and value structure. + */ +type OnyxUpdate = { + [TKey in OnyxKey]: { + onyxMethod: typeof OnyxUtils.METHOD.SET; + key: TKey; + value: OnyxSetInput; + } | { + onyxMethod: typeof OnyxUtils.METHOD.MULTI_SET; + key: TKey; + value: OnyxMultiSetInput; + } | { + onyxMethod: typeof OnyxUtils.METHOD.MERGE; + key: TKey; + value: OnyxMergeInput; + } | { + onyxMethod: typeof OnyxUtils.METHOD.CLEAR; + key: TKey; + value?: undefined; + }; +}[OnyxKey] | { + [TKey in CollectionKeyBase]: { + onyxMethod: typeof OnyxUtils.METHOD.MERGE_COLLECTION; + key: TKey; + value: OnyxMergeCollectionInput; + }; +}[CollectionKeyBase]; +/** + * Represents the options used in `Onyx.init()` method. + */ +type InitOptions = { + /** `ONYXKEYS` constants object */ + keys?: DeepRecord; + /** initial data to set when `init()` and `clear()` is called */ + initialKeyStates?: Partial; + /** + * This is an array of keys (individual or collection patterns) that when provided to Onyx are flagged + * as "safe" for removal. Any components subscribing to these keys must also implement a canEvict option. See the README for more info. + */ + safeEvictionKeys?: OnyxKey[]; + /** + * Sets how many recent keys should we try to keep in cache + * Setting this to 0 would practically mean no cache + * We try to free cache when we connect to a safe eviction key + */ + maxCachedKeysCount?: number; + /** + * Auto synchronize storage events between multiple instances + * of Onyx running in different tabs/windows. Defaults to true for platforms that support local storage (web/desktop) + */ + shouldSyncMultipleInstances?: boolean; + /** Enables debugging setState() calls to connected components */ + debugSetState?: boolean; +}; +type GenericFunction = (...args: any[]) => any; +/** + * Represents a combination of Merge and Set operations that should be executed in Onyx + */ +type MixedOperationsQueue = { + merge: OnyxInputKeyValueMapping; + set: OnyxInputKeyValueMapping; +}; +export type { BaseConnectOptions, Collection, CollectionConnectCallback, CollectionConnectOptions, CollectionKey, CollectionKeyBase, ConnectOptions, CustomTypeOptions, DeepRecord, DefaultConnectCallback, DefaultConnectOptions, ExtractOnyxCollectionValue, GenericFunction, InitOptions, Key, KeyValueMapping, Mapping, NonNull, NonUndefined, OnyxInputKeyValueMapping, NullishDeep, OnyxCollection, OnyxEntry, OnyxKey, OnyxInputValue, OnyxCollectionInputValue, OnyxInput, OnyxSetInput, OnyxMultiSetInput, OnyxMergeInput, OnyxMergeCollectionInput, OnyxUpdate, OnyxValue, Selector, WithOnyxConnectOptions, MixedOperationsQueue, }; diff --git a/dist/types.js b/dist/types.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/dist/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/dist/useLiveRef.d.ts b/dist/useLiveRef.d.ts new file mode 100644 index 00000000..28f2f1b3 --- /dev/null +++ b/dist/useLiveRef.d.ts @@ -0,0 +1,7 @@ +/// +/** + * Creates a mutable reference to a value, useful when you need to + * maintain a reference to a value that may change over time without triggering re-renders. + */ +declare function useLiveRef(value: T): import("react").MutableRefObject; +export default useLiveRef; diff --git a/dist/useLiveRef.js b/dist/useLiveRef.js new file mode 100644 index 00000000..19edc54c --- /dev/null +++ b/dist/useLiveRef.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const react_1 = require("react"); +/** + * Creates a mutable reference to a value, useful when you need to + * maintain a reference to a value that may change over time without triggering re-renders. + */ +function useLiveRef(value) { + const ref = (0, react_1.useRef)(value); + ref.current = value; + return ref; +} +exports.default = useLiveRef; diff --git a/dist/useOnyx.d.ts b/dist/useOnyx.d.ts new file mode 100644 index 00000000..0b98c902 --- /dev/null +++ b/dist/useOnyx.d.ts @@ -0,0 +1,41 @@ +import type { IsEqual } from 'type-fest'; +import type { CollectionKeyBase, OnyxCollection, OnyxKey, OnyxValue, Selector } from './types'; +type BaseUseOnyxOptions = { + /** + * Determines if this key in this subscription is safe to be evicted. + */ + canEvict?: boolean; + /** + * If set to false, then no data will be prefilled into the component. + */ + initWithStoredValues?: boolean; + /** + * If set to true, data will be retrieved from cache during the first render even if there is a pending merge for the key. + */ + allowStaleData?: boolean; +}; +type UseOnyxInitialValueOption = { + /** + * This value will be returned by the hook on the first render while the data is being read from Onyx. + */ + initialValue?: TInitialValue; +}; +type UseOnyxSelectorOption = { + /** + * This will be used to subscribe to a subset of an Onyx key's data. + * Using this setting on `useOnyx` can have very positive performance benefits because the component will only re-render + * when the subset of data changes. Otherwise, any change of data on any property would normally + * cause the component to re-render (and that can be expensive from a performance standpoint). + */ + selector?: Selector; +}; +type FetchStatus = 'loading' | 'loaded'; +type CachedValue = IsEqual> extends true ? TValue : TKey extends CollectionKeyBase ? NonNullable> : TValue; +type ResultMetadata = { + status: FetchStatus; +}; +type UseOnyxResult = [CachedValue, ResultMetadata]; +declare function useOnyx>(key: TKey, options?: BaseUseOnyxOptions & UseOnyxInitialValueOption & Required>): UseOnyxResult; +declare function useOnyx>(key: TKey, options?: BaseUseOnyxOptions & UseOnyxInitialValueOption>): UseOnyxResult; +export default useOnyx; +export type { UseOnyxResult, ResultMetadata, FetchStatus }; diff --git a/dist/useOnyx.js b/dist/useOnyx.js new file mode 100644 index 00000000..0aeab9b1 --- /dev/null +++ b/dist/useOnyx.js @@ -0,0 +1,126 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fast_equals_1 = require("fast-equals"); +const react_1 = require("react"); +const OnyxUtils_1 = __importDefault(require("./OnyxUtils")); +const useLiveRef_1 = __importDefault(require("./useLiveRef")); +const usePrevious_1 = __importDefault(require("./usePrevious")); +const Onyx_1 = __importDefault(require("./Onyx")); +const OnyxCache_1 = __importDefault(require("./OnyxCache")); +function getCachedValue(key, selector) { + var _a; + return ((_a = OnyxUtils_1.default.tryGetCachedValue(key, { selector })) !== null && _a !== void 0 ? _a : undefined); +} +function useOnyx(key, options) { + const connectionIDRef = (0, react_1.useRef)(null); + const previousKey = (0, usePrevious_1.default)(key); + // Used to stabilize the selector reference and avoid unnecessary calls to `getSnapshot()`. + const selectorRef = (0, useLiveRef_1.default)(options === null || options === void 0 ? void 0 : options.selector); + // Stores the previous cached value as it's necessary to compare with the new value in `getSnapshot()`. + // We initialize it to `null` to simulate that we don't have any value from cache yet. + const cachedValueRef = (0, react_1.useRef)(null); + // Stores the previously result returned by the hook, containing the data from cache and the fetch status. + // We initialize it to `undefined` and `loading` fetch status to simulate the initial result when the hook is loading from the cache. + // However, if `initWithStoredValues` is `true` we set the fetch status to `loaded` since we want to signal that data is ready. + const resultRef = (0, react_1.useRef)([ + undefined, + { + status: (options === null || options === void 0 ? void 0 : options.initWithStoredValues) === false ? 'loaded' : 'loading', + }, + ]); + // Indicates if it's the first Onyx connection of this hook or not, as we don't want certain use cases + // in `getSnapshot()` to be satisfied several times. + const isFirstConnectionRef = (0, react_1.useRef)(true); + (0, react_1.useEffect)(() => { + // These conditions will ensure we can only handle dynamic collection member keys from the same collection. + if (previousKey === key) { + return; + } + try { + const previousCollectionKey = OnyxUtils_1.default.splitCollectionMemberKey(previousKey)[0]; + const collectionKey = OnyxUtils_1.default.splitCollectionMemberKey(key)[0]; + if (OnyxUtils_1.default.isCollectionMemberKey(previousCollectionKey, previousKey) && OnyxUtils_1.default.isCollectionMemberKey(collectionKey, key) && previousCollectionKey === collectionKey) { + return; + } + } + catch (e) { + throw new Error(`'${previousKey}' key can't be changed to '${key}'. useOnyx() only supports dynamic keys if they are both collection member keys from the same collection e.g. from 'collection_id1' to 'collection_id2'.`); + } + throw new Error(`'${previousKey}' key can't be changed to '${key}'. useOnyx() only supports dynamic keys if they are both collection member keys from the same collection e.g. from 'collection_id1' to 'collection_id2'.`); + }, [previousKey, key]); + const getSnapshot = (0, react_1.useCallback)(() => { + var _a, _b; + // We get the value from the cache, supplying a selector too in case it's defined. + // If `newValue` is `undefined` it means that the cache doesn't have a value for that key yet. + // If `newValue` is `null` or any other value it means that the cache does have a value for that key. + // This difference between `undefined` and other values is crucial and it's used to address the following + // conditions and use cases. + let newValue = getCachedValue(key, selectorRef.current); + const hasCacheForKey = OnyxCache_1.default.hasCacheForKey(key); + // Since the fetch status can be different given the use cases below, we define the variable right away. + let newFetchStatus; + // If we have pending merge operations for the key during the first connection, we set the new value to `undefined` + // and fetch status to `loading` to simulate that it is still being loaded until we have the most updated data. + // If `allowStaleData` is `true` this logic will be ignored and cached value will be used, even if it's stale data. + if (isFirstConnectionRef.current && OnyxUtils_1.default.hasPendingMergeForKey(key) && !(options === null || options === void 0 ? void 0 : options.allowStaleData)) { + newValue = undefined; + newFetchStatus = 'loading'; + } + // If data is not present in cache and `initialValue` is set during the first connection, + // we set the new value to `initialValue` and fetch status to `loaded` since we already have some data to return to the consumer. + if (isFirstConnectionRef.current && !hasCacheForKey && (options === null || options === void 0 ? void 0 : options.initialValue) !== undefined) { + newValue = ((_a = options === null || options === void 0 ? void 0 : options.initialValue) !== null && _a !== void 0 ? _a : undefined); + newFetchStatus = 'loaded'; + } + // If the previously cached value is different from the new value, we update both cached value + // and the result to be returned by the hook. + // If the cache was set for the first time, we also update the cached value and the result. + const isCacheSetFirstTime = cachedValueRef.current === null && hasCacheForKey; + if (isCacheSetFirstTime || !(0, fast_equals_1.deepEqual)((_b = cachedValueRef.current) !== null && _b !== void 0 ? _b : undefined, newValue)) { + cachedValueRef.current = newValue; + resultRef.current = [cachedValueRef.current, { status: newFetchStatus !== null && newFetchStatus !== void 0 ? newFetchStatus : 'loaded' }]; + } + return resultRef.current; + }, [key, selectorRef, options === null || options === void 0 ? void 0 : options.allowStaleData, options === null || options === void 0 ? void 0 : options.initialValue]); + const subscribe = (0, react_1.useCallback)((onStoreChange) => { + connectionIDRef.current = Onyx_1.default.connect({ + key, + callback: () => { + // We don't need to update the Onyx cache again here, when `callback` is called the cache is already + // expected to be updated, so we just signal that the store changed and `getSnapshot()` can be called again. + isFirstConnectionRef.current = false; + onStoreChange(); + }, + initWithStoredValues: options === null || options === void 0 ? void 0 : options.initWithStoredValues, + waitForCollectionCallback: OnyxUtils_1.default.isCollectionKey(key), + }); + return () => { + if (!connectionIDRef.current) { + return; + } + Onyx_1.default.disconnect(connectionIDRef.current); + isFirstConnectionRef.current = false; + }; + }, [key, options === null || options === void 0 ? void 0 : options.initWithStoredValues]); + // Mimics withOnyx's checkEvictableKeys() behavior. + (0, react_1.useEffect)(() => { + if ((options === null || options === void 0 ? void 0 : options.canEvict) === undefined || !connectionIDRef.current) { + return; + } + if (!OnyxUtils_1.default.isSafeEvictionKey(key)) { + throw new Error(`canEvict can't be used on key '${key}'. This key must explicitly be flagged as safe for removal by adding it to Onyx.init({safeEvictionKeys: []}).`); + } + if (options.canEvict) { + OnyxUtils_1.default.removeFromEvictionBlockList(key, connectionIDRef.current); + } + else { + OnyxUtils_1.default.addToEvictionBlockList(key, connectionIDRef.current); + } + }, [key, options === null || options === void 0 ? void 0 : options.canEvict]); + const result = (0, react_1.useSyncExternalStore)(subscribe, getSnapshot); + return result; +} +exports.default = useOnyx; diff --git a/dist/usePrevious.d.ts b/dist/usePrevious.d.ts new file mode 100644 index 00000000..22865bea --- /dev/null +++ b/dist/usePrevious.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the previous value of the provided value. + */ +declare function usePrevious(value: T): T; +export default usePrevious; diff --git a/dist/usePrevious.js b/dist/usePrevious.js new file mode 100644 index 00000000..e2c9266e --- /dev/null +++ b/dist/usePrevious.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const react_1 = require("react"); +/** + * Returns the previous value of the provided value. + */ +function usePrevious(value) { + const ref = (0, react_1.useRef)(value); + (0, react_1.useEffect)(() => { + ref.current = value; + }, [value]); + return ref.current; +} +exports.default = usePrevious; diff --git a/dist/utils.d.ts b/dist/utils.d.ts new file mode 100644 index 00000000..302053b4 --- /dev/null +++ b/dist/utils.d.ts @@ -0,0 +1,49 @@ +import type { OnyxInput, OnyxKey } from './types'; +type EmptyObject = Record; +type EmptyValue = EmptyObject | null | undefined; +/** Checks whether the given object is an object and not null/undefined. */ +declare function isEmptyObject(obj: T | EmptyValue): obj is EmptyValue; +/** + * Merges two objects and removes null values if "shouldRemoveNestedNulls" is set to true + * + * We generally want to remove null values from objects written to disk and cache, because it decreases the amount of data stored in memory and on disk. + * On native, when merging an existing value with new changes, SQLite will use JSON_PATCH, which removes top-level nullish values. + * To be consistent with the behaviour for merge, we'll also want to remove null values for "set" operations. + */ +declare function fastMerge(target: TValue, source: TValue, shouldRemoveNestedNulls?: boolean): TValue; +/** Deep removes the nested null values from the given value. */ +declare function removeNestedNullValues | null>(value: TValue): TValue; +/** Formats the action name by uppercasing and adding the key if provided. */ +declare function formatActionName(method: string, key?: OnyxKey): string; +/** validate that the update and the existing value are compatible */ +declare function checkCompatibilityWithExistingValue(value: unknown, existingValue: unknown): { + isCompatible: boolean; + existingValueType?: string; + newValueType?: string; +}; +/** + * Picks entries from an object based on a condition. + * + * @param obj - The object to pick entries from. + * @param condition - The condition to determine which entries to pick. + * @returns The object containing only the picked entries. + */ +declare function pick(obj: Record, condition: string | string[] | ((entry: [string, TValue]) => boolean)): Record; +/** + * Omits entries from an object based on a condition. + * + * @param obj - The object to omit entries from. + * @param condition - The condition to determine which entries to omit. + * @returns The object containing only the remaining entries after omission. + */ +declare function omit(obj: Record, condition: string | string[] | ((entry: [string, TValue]) => boolean)): Record; +declare const _default: { + isEmptyObject: typeof isEmptyObject; + fastMerge: typeof fastMerge; + formatActionName: typeof formatActionName; + removeNestedNullValues: typeof removeNestedNullValues; + checkCompatibilityWithExistingValue: typeof checkCompatibilityWithExistingValue; + pick: typeof pick; + omit: typeof omit; +}; +export default _default; diff --git a/dist/utils.js b/dist/utils.js new file mode 100644 index 00000000..d8c7a1be --- /dev/null +++ b/dist/utils.js @@ -0,0 +1,175 @@ +"use strict"; +/* eslint-disable @typescript-eslint/prefer-for-of */ +Object.defineProperty(exports, "__esModule", { value: true }); +/** Checks whether the given object is an object and not null/undefined. */ +function isEmptyObject(obj) { + return typeof obj === 'object' && Object.keys(obj || {}).length === 0; +} +// Mostly copied from https://medium.com/@lubaka.a/how-to-remove-lodash-performance-improvement-b306669ad0e1 +/** + * Checks whether the given value can be merged. It has to be an object, but not an array, RegExp or Date. + */ +function isMergeableObject(value) { + const isNonNullObject = value != null ? typeof value === 'object' : false; + return isNonNullObject && Object.prototype.toString.call(value) !== '[object RegExp]' && Object.prototype.toString.call(value) !== '[object Date]' && !Array.isArray(value); +} +/** + * Merges the source object into the target object. + * @param target - The target object. + * @param source - The source object. + * @param shouldRemoveNestedNulls - If true, null object values will be removed. + * @returns - The merged object. + */ +function mergeObject(target, source, shouldRemoveNestedNulls = true) { + const destination = {}; + const targetObject = isMergeableObject(target) ? target : undefined; + // First we want to copy over all keys from the target into the destination object, + // in case "target" is a mergable object. + // If "shouldRemoveNestedNulls" is true, we want to remove null values from the merged object + // and therefore we need to omit keys where either the source or target value is null. + if (targetObject) { + const targetKeys = Object.keys(targetObject); + for (let i = 0; i < targetKeys.length; ++i) { + const key = targetKeys[i]; + const sourceValue = source === null || source === void 0 ? void 0 : source[key]; + const targetValue = targetObject === null || targetObject === void 0 ? void 0 : targetObject[key]; + // If "shouldRemoveNestedNulls" is true, we want to remove null values from the merged object. + // Therefore, if either target or source value is null, we want to prevent the key from being set. + // targetValue should techincally never be "undefined", because it will always be a value from cache or storage + // and we never set "undefined" there. Still, if there targetValue is undefined we don't want to set + // the key explicitly to prevent loose undefined values in objects in cache and storage. + const isSourceOrTargetNull = targetValue === undefined || targetValue === null || sourceValue === null; + const shouldOmitTargetKey = shouldRemoveNestedNulls && isSourceOrTargetNull; + if (!shouldOmitTargetKey) { + destination[key] = targetValue; + } + } + } + // After copying over all keys from the target object, we want to merge the source object into the destination object. + const sourceKeys = Object.keys(source); + for (let i = 0; i < sourceKeys.length; ++i) { + const key = sourceKeys[i]; + const sourceValue = source === null || source === void 0 ? void 0 : source[key]; + const targetValue = targetObject === null || targetObject === void 0 ? void 0 : targetObject[key]; + // If undefined is passed as the source value for a key, we want to generally ignore it. + // If "shouldRemoveNestedNulls" is set to true and the source value is null, + // we don't want to set/merge the source value into the merged object. + const shouldIgnoreNullSourceValue = shouldRemoveNestedNulls && sourceValue === null; + const shouldOmitSourceKey = sourceValue === undefined || shouldIgnoreNullSourceValue; + if (!shouldOmitSourceKey) { + // If the source value is a mergable object, we want to merge it into the target value. + // If "shouldRemoveNestedNulls" is true, "fastMerge" will recursively + // remove nested null values from the merged object. + // If source value is any other value we need to set the source value it directly. + if (isMergeableObject(sourceValue)) { + // If the target value is null or undefined, we need to fallback to an empty object, + // so that we can still use "fastMerge" to merge the source value, + // to ensure that nested null values are removed from the merged object. + const targetValueWithFallback = (targetValue !== null && targetValue !== void 0 ? targetValue : {}); + destination[key] = fastMerge(targetValueWithFallback, sourceValue, shouldRemoveNestedNulls); + } + else { + destination[key] = sourceValue; + } + } + } + return destination; +} +/** + * Merges two objects and removes null values if "shouldRemoveNestedNulls" is set to true + * + * We generally want to remove null values from objects written to disk and cache, because it decreases the amount of data stored in memory and on disk. + * On native, when merging an existing value with new changes, SQLite will use JSON_PATCH, which removes top-level nullish values. + * To be consistent with the behaviour for merge, we'll also want to remove null values for "set" operations. + */ +function fastMerge(target, source, shouldRemoveNestedNulls = true) { + // We have to ignore arrays and nullish values here, + // otherwise "mergeObject" will throw an error, + // because it expects an object as "source" + if (Array.isArray(source) || source === null || source === undefined) { + return source; + } + return mergeObject(target, source, shouldRemoveNestedNulls); +} +/** Deep removes the nested null values from the given value. */ +function removeNestedNullValues(value) { + if (typeof value === 'object' && !Array.isArray(value)) { + const objectValue = value; + return fastMerge(objectValue, objectValue); + } + return value; +} +/** Formats the action name by uppercasing and adding the key if provided. */ +function formatActionName(method, key) { + return key ? `${method.toUpperCase()}/${key}` : method.toUpperCase(); +} +/** validate that the update and the existing value are compatible */ +function checkCompatibilityWithExistingValue(value, existingValue) { + if (!existingValue || !value) { + return { + isCompatible: true, + }; + } + const existingValueType = Array.isArray(existingValue) ? 'array' : 'non-array'; + const newValueType = Array.isArray(value) ? 'array' : 'non-array'; + if (existingValueType !== newValueType) { + return { + isCompatible: false, + existingValueType, + newValueType, + }; + } + return { + isCompatible: true, + }; +} +/** + * Filters an object based on a condition and an inclusion flag. + * + * @param obj - The object to filter. + * @param condition - The condition to apply. + * @param include - If true, include entries that match the condition; otherwise, exclude them. + * @returns The filtered object. + */ +function filterObject(obj, condition, include) { + const result = {}; + const entries = Object.entries(obj); + for (let i = 0; i < entries.length; i++) { + const [key, value] = entries[i]; + let shouldInclude; + if (Array.isArray(condition)) { + shouldInclude = condition.includes(key); + } + else if (typeof condition === 'string') { + shouldInclude = key === condition; + } + else { + shouldInclude = condition(entries[i]); + } + if (include ? shouldInclude : !shouldInclude) { + result[key] = value; + } + } + return result; +} +/** + * Picks entries from an object based on a condition. + * + * @param obj - The object to pick entries from. + * @param condition - The condition to determine which entries to pick. + * @returns The object containing only the picked entries. + */ +function pick(obj, condition) { + return filterObject(obj, condition, true); +} +/** + * Omits entries from an object based on a condition. + * + * @param obj - The object to omit entries from. + * @param condition - The condition to determine which entries to omit. + * @returns The object containing only the remaining entries after omission. + */ +function omit(obj, condition) { + return filterObject(obj, condition, false); +} +exports.default = { isEmptyObject, fastMerge, formatActionName, removeNestedNullValues, checkCompatibilityWithExistingValue, pick, omit }; diff --git a/dist/withOnyx.d.ts b/dist/withOnyx.d.ts new file mode 100644 index 00000000..f665cea9 --- /dev/null +++ b/dist/withOnyx.d.ts @@ -0,0 +1,141 @@ +import {IsEqual} from 'type-fest'; +import {CollectionKeyBase, ExtractOnyxCollectionValue, KeyValueMapping, OnyxCollection, OnyxEntry, OnyxKey, Selector} from './types'; + +/** + * Represents the base mapping options between an Onyx key and the component's prop. + */ +type BaseMapping = { + canEvict?: boolean | ((props: Omit) => boolean); + initWithStoredValues?: boolean; + allowStaleData?: boolean; +}; + +type CollectionBaseMapping = { + initialValue?: OnyxCollection; +}; + +type EntryBaseMapping = { + initialValue?: OnyxEntry; +}; + +/** + * Represents the string / function `key` mapping option between an Onyx key and the component's prop. + * + * If `key` is `string`, the type of the Onyx value that is associated with `key` must match with the type of the component's prop, + * otherwise an error will be thrown. + * + * If `key` is `function`, the return type of `key` function must be a valid Onyx key and the type of the Onyx value associated + * with `key` must match with the type of the component's prop, otherwise an error will be thrown. + * + * @example + * ```ts + * // Onyx prop with `string` key + * onyxProp: { + * key: ONYXKEYS.ACCOUNT, + * }, + * + * // Onyx prop with `function` key + * onyxProp: { + * key: ({reportId}) => ONYXKEYS.ACCOUNT, + * }, + * ``` + */ +type BaseMappingKey = IsEqual extends true + ? { + key: TOnyxKey | ((props: Omit & Partial) => TOnyxKey); + } + : never; + +/** + * Represents the string `key` and `selector` mapping options between an Onyx key and the component's prop. + * + * The function signature and return type of `selector` must match with the type of the component's prop, + * otherwise an error will be thrown. + * + * @example + * ```ts + * // Onyx prop with `string` key and selector + * onyxProp: { + * key: ONYXKEYS.ACCOUNT, + * selector: (value: Account | null): string => value?.id ?? '', + * }, + * ``` + */ +type BaseMappingStringKeyAndSelector = { + key: TOnyxKey; + selector: Selector; +}; + +/** + * Represents the function `key` and `selector` mapping options between an Onyx key and the component's prop. + * + * The function signature and return type of `selector` must match with the type of the component's prop, + * otherwise an error will be thrown. + * + * @example + * ```ts + * // Onyx prop with `function` key and selector + * onyxProp: { + * key: ({reportId}) => ONYXKEYS.ACCOUNT, + * selector: (value: Account | null) => value?.id ?? '', + * }, + * ``` + */ +type BaseMappingFunctionKeyAndSelector = { + key: (props: Omit & Partial) => TOnyxKey; + selector: Selector; +}; + +/** + * Represents the mapping options between an Onyx key and the component's prop with all its possibilities. + */ +type Mapping = BaseMapping & + EntryBaseMapping & + ( + | BaseMappingKey> + | BaseMappingStringKeyAndSelector + | BaseMappingFunctionKeyAndSelector + ); + +/** + * Represents the mapping options between an Onyx collection key without suffix and the component's prop with all its possibilities. + */ +type CollectionMapping = BaseMapping & + CollectionBaseMapping & + ( + | BaseMappingKey> + | BaseMappingStringKeyAndSelector, TOnyxKey> + | BaseMappingFunctionKeyAndSelector, TOnyxKey> + ); + +/** + * Represents an union type of all the possible Onyx key mappings. + * Each `OnyxPropMapping` will be associated with its respective Onyx key, ensuring different type-safety for each object. + */ +type OnyxPropMapping = { + [TOnyxKey in OnyxKey]: Mapping; +}[OnyxKey]; + +/** + * Represents an union type of all the possible Onyx collection keys without suffix mappings. + * Each `OnyxPropCollectionMapping` will be associated with its respective Onyx key, ensuring different type-safety for each object. + */ +type OnyxPropCollectionMapping = { + [TOnyxKey in CollectionKeyBase]: CollectionMapping; +}[CollectionKeyBase]; + +/** + * @deprecated Use `useOnyx` instead of `withOnyx` whenever possible. + * + * This is a higher order component that provides the ability to map a state property directly to + * something in Onyx (a key/value store). That way, as soon as data in Onyx changes, the state will be set and the view + * will automatically change to reflect the new data. + */ +declare function withOnyx( + mapping: { + [TOnyxProp in keyof TOnyxProps]: OnyxPropMapping | OnyxPropCollectionMapping; + }, + shouldDelayUpdates?: boolean, +): (component: React.ComponentType) => React.ComponentType>; + +export default withOnyx; diff --git a/dist/withOnyx.js b/dist/withOnyx.js new file mode 100644 index 00000000..dfe93537 --- /dev/null +++ b/dist/withOnyx.js @@ -0,0 +1,325 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * This is a higher order component that provides the ability to map a state property directly to + * something in Onyx (a key/value store). That way, as soon as data in Onyx changes, the state will be set and the view + * will automatically change to reflect the new data. + */ +const prop_types_1 = __importDefault(require("prop-types")); +const react_1 = __importDefault(require("react")); +const underscore_1 = __importDefault(require("underscore")); +const Onyx_1 = __importDefault(require("./Onyx")); +const Str = __importStar(require("./Str")); +const utils_1 = __importDefault(require("./utils")); +const OnyxUtils_1 = __importDefault(require("./OnyxUtils")); +// This is a list of keys that can exist on a `mapping`, but are not directly related to loading data from Onyx. When the keys of a mapping are looped over to check +// if a key has changed, it's a good idea to skip looking at these properties since they would have unexpected results. +const mappingPropertiesToIgnoreChangesTo = ['initialValue', 'allowStaleData']; +/** + * Returns the display name of a component + * + * @param {object} component + * @returns {string} + */ +function getDisplayName(component) { + return component.displayName || component.name || 'Component'; +} +/** + * Removes all the keys from state that are unrelated to the onyx data being mapped to the component. + * + * @param {Object} state of the component + * @param {Object} onyxToStateMapping the object holding all of the mapping configuration for the component + * @returns {Object} + */ +const getOnyxDataFromState = (state, onyxToStateMapping) => underscore_1.default.pick(state, underscore_1.default.keys(onyxToStateMapping)); +function default_1(mapOnyxToState, shouldDelayUpdates = false) { + // A list of keys that must be present in tempState before we can render the WrappedComponent + const requiredKeysForInit = underscore_1.default.chain(mapOnyxToState) + .omit((config) => config.initWithStoredValues === false) + .keys() + .value(); + return (WrappedComponent) => { + const displayName = getDisplayName(WrappedComponent); + class withOnyx extends react_1.default.Component { + constructor(props) { + super(props); + this.pendingSetStates = []; + this.shouldDelayUpdates = shouldDelayUpdates; + this.setWithOnyxState = this.setWithOnyxState.bind(this); + this.flushPendingSetStates = this.flushPendingSetStates.bind(this); + // This stores all the Onyx connection IDs to be used when the component unmounts so everything can be + // disconnected. It is a key value store with the format {[mapping.key]: connectionID}. + this.activeConnectionIDs = {}; + const cachedState = underscore_1.default.reduce(mapOnyxToState, (resultObj, mapping, propertyName) => { + const key = Str.result(mapping.key, props); + let value = OnyxUtils_1.default.tryGetCachedValue(key, mapping); + if (!value && mapping.initialValue) { + value = mapping.initialValue; + } + /** + * If we have a pending merge for a key it could mean that data is being set via Onyx.merge() and someone expects a component to have this data immediately. + * + * @example + * + * Onyx.merge('report_123', value); + * Navigation.navigate(route); // Where "route" expects the "value" to be available immediately once rendered. + * + * In reality, Onyx.merge() will only update the subscriber after all merges have been batched and the previous value is retrieved via a get() (returns a promise). + * So, we won't use the cache optimization here as it will lead us to arbitrarily defer various actions in the application code. + */ + if (mapping.initWithStoredValues !== false && ((value !== undefined && !OnyxUtils_1.default.hasPendingMergeForKey(key)) || mapping.allowStaleData)) { + // eslint-disable-next-line no-param-reassign + resultObj[propertyName] = value; + } + return resultObj; + }, {}); + // If we have all the data we need, then we can render the component immediately + cachedState.loading = underscore_1.default.size(cachedState) < requiredKeysForInit.length; + // Object holding the temporary initial state for the component while we load the various Onyx keys + this.tempState = cachedState; + this.state = cachedState; + } + componentDidMount() { + const onyxDataFromState = getOnyxDataFromState(this.state, mapOnyxToState); + // Subscribe each of the state properties to the proper Onyx key + underscore_1.default.each(mapOnyxToState, (mapping, propertyName) => { + if (underscore_1.default.includes(mappingPropertiesToIgnoreChangesTo, propertyName)) { + return; + } + const key = Str.result(mapping.key, Object.assign(Object.assign({}, this.props), onyxDataFromState)); + this.connectMappingToOnyx(mapping, propertyName, key); + }); + this.checkEvictableKeys(); + } + componentDidUpdate(prevProps, prevState) { + // The whole purpose of this method is to check to see if a key that is subscribed to Onyx has changed, and then Onyx needs to be disconnected from the old + // key and connected to the new key. + // For example, a key could change if KeyB depends on data loading from Onyx for KeyA. + const isFirstTimeUpdatingAfterLoading = prevState.loading && !this.state.loading; + const onyxDataFromState = getOnyxDataFromState(this.state, mapOnyxToState); + const prevOnyxDataFromState = getOnyxDataFromState(prevState, mapOnyxToState); + underscore_1.default.each(mapOnyxToState, (mapping, propName) => { + // Some properties can be ignored because they aren't related to onyx keys and they will never change + if (underscore_1.default.includes(mappingPropertiesToIgnoreChangesTo, propName)) { + return; + } + // The previous key comes from either: + // 1) The initial key that was connected to (ie. set from `componentDidMount()`) + // 2) The updated props which caused `componentDidUpdate()` to run + // The first case cannot be used all the time because of race conditions where `componentDidUpdate()` can be triggered before connectingMappingToOnyx() is done + // (eg. if a user switches chats really quickly). In this case, it's much more stable to always look at the changes to prevProp and prevState to derive the key. + // The second case cannot be used all the time because the onyx data doesn't change the first time that `componentDidUpdate()` runs after loading. In this case, + // the `mapping.previousKey` must be used for the comparison or else this logic never detects that onyx data could have changed during the loading process. + const previousKey = isFirstTimeUpdatingAfterLoading ? mapping.previousKey : Str.result(mapping.key, Object.assign(Object.assign({}, prevProps), prevOnyxDataFromState)); + const newKey = Str.result(mapping.key, Object.assign(Object.assign({}, this.props), onyxDataFromState)); + if (previousKey !== newKey) { + Onyx_1.default.disconnect(this.activeConnectionIDs[previousKey], previousKey); + delete this.activeConnectionIDs[previousKey]; + this.connectMappingToOnyx(mapping, propName, newKey); + } + }); + this.checkEvictableKeys(); + } + componentWillUnmount() { + // Disconnect everything from Onyx + underscore_1.default.each(mapOnyxToState, (mapping) => { + const key = Str.result(mapping.key, Object.assign(Object.assign({}, this.props), getOnyxDataFromState(this.state, mapOnyxToState))); + Onyx_1.default.disconnect(this.activeConnectionIDs[key], key); + }); + } + setStateProxy(modifier) { + if (this.shouldDelayUpdates) { + this.pendingSetStates.push(modifier); + } + else { + this.setState(modifier); + } + } + /** + * This method is used by the internal raw Onyx `sendDataToConnection`, it is designed to prevent unnecessary renders while a component + * still in a "loading" (read "mounting") state. The temporary initial state is saved to the HOC instance and setState() + * only called once all the necessary data has been collected. + * + * There is however the possibility the component could have been updated by a call to setState() + * before the data was "initially" collected. A race condition. + * For example some update happened on some key, while onyx was still gathering the initial hydration data. + * This update is disptached directly to setStateProxy and therefore the component has the most up-to-date data + * + * This is a design flaw in Onyx itself as dispatching updates before initial hydration is not a correct event flow. + * We however need to workaround this issue in the HOC. The addition of initialValue makes things even more complex, + * since you cannot be really sure if the component has been updated before or after the initial hydration. Therefore if + * initialValue is there, we just check if the update is different than that and then try to handle it as best as we can. + * + * @param {String} statePropertyName + * @param {*} val + */ + setWithOnyxState(statePropertyName, val) { + const prevValue = this.state[statePropertyName]; + // If the component is not loading (read "mounting"), then we can just update the state + // There is a small race condition. + // When calling setWithOnyxState we delete the tempState object that is used to hold temporary state updates while the HOC is gathering data. + // However the loading flag is only set on the setState callback down below. setState however is an async operation that is also batched, + // therefore there is a small window of time where the loading flag is not false but the tempState is already gone + // (while the update is queued and waiting to be applied). + // This simply bypasses the loading check if the tempState is gone and the update can be safely queued with a normal setStateProxy. + if (!this.state.loading || !this.tempState) { + // Performance optimization, do not trigger update with same values + if (prevValue === val || (utils_1.default.isEmptyObject(prevValue) && utils_1.default.isEmptyObject(val))) { + return; + } + this.setStateProxy({ [statePropertyName]: val }); + return; + } + this.tempState[statePropertyName] = val; + // If some key does not have a value yet, do not update the state yet + const tempStateIsMissingKey = underscore_1.default.some(requiredKeysForInit, (key) => underscore_1.default.isUndefined(this.tempState[key])); + if (tempStateIsMissingKey) { + return; + } + const stateUpdate = Object.assign({}, this.tempState); + delete this.tempState; + // Full of hacky workarounds to prevent the race condition described above. + this.setState((prevState) => { + const finalState = underscore_1.default.reduce(stateUpdate, (result, value, key) => { + if (key === 'loading') { + return result; + } + const initialValue = mapOnyxToState[key].initialValue; + // If initialValue is there and the state contains something different it means + // an update has already been received and we can discard the value we are trying to hydrate + if (!underscore_1.default.isUndefined(initialValue) && !underscore_1.default.isUndefined(prevState[key]) && prevState[key] !== initialValue) { + // eslint-disable-next-line no-param-reassign + result[key] = prevState[key]; + // if value is already there (without initial value) then we can discard the value we are trying to hydrate + } + else if (!underscore_1.default.isUndefined(prevState[key])) { + // eslint-disable-next-line no-param-reassign + result[key] = prevState[key]; + } + else { + // eslint-disable-next-line no-param-reassign + result[key] = value; + } + return result; + }, {}); + finalState.loading = false; + return finalState; + }); + } + /** + * Makes sure each Onyx key we requested has been set to state with a value of some kind. + * We are doing this so that the wrapped component will only render when all the data + * it needs is available to it. + */ + checkEvictableKeys() { + // We will add this key to our list of recently accessed keys + // if the canEvict function returns true. This is necessary criteria + // we MUST use to specify if a key can be removed or not. + underscore_1.default.each(mapOnyxToState, (mapping) => { + if (underscore_1.default.isUndefined(mapping.canEvict)) { + return; + } + const canEvict = Str.result(mapping.canEvict, this.props); + const key = Str.result(mapping.key, this.props); + if (!OnyxUtils_1.default.isSafeEvictionKey(key)) { + throw new Error(`canEvict can't be used on key '${key}'. This key must explicitly be flagged as safe for removal by adding it to Onyx.init({safeEvictionKeys: []}).`); + } + if (canEvict) { + OnyxUtils_1.default.removeFromEvictionBlockList(key, mapping.connectionID); + } + else { + OnyxUtils_1.default.addToEvictionBlockList(key, mapping.connectionID); + } + }); + } + /** + * Takes a single mapping and binds the state of the component to the store + * + * @param {object} mapping + * @param {string|function} mapping.key key to connect to. can be a string or a + * function that takes this.props as an argument and returns a string + * @param {string} statePropertyName the name of the state property that Onyx will add the data to + * @param {boolean} [mapping.initWithStoredValues] If set to false, then no data will be prefilled into the + * component + * @param {string} key to connect to Onyx with + */ + connectMappingToOnyx(mapping, statePropertyName, key) { + // Remember what the previous key was so that key changes can be detected when data is being loaded from Onyx. This will allow + // dependent keys to finish loading their data. + // eslint-disable-next-line no-param-reassign + mapOnyxToState[statePropertyName].previousKey = key; + // eslint-disable-next-line rulesdir/prefer-onyx-connect-in-libs + this.activeConnectionIDs[key] = Onyx_1.default.connect(Object.assign(Object.assign({}, mapping), { key, + statePropertyName, withOnyxInstance: this, displayName })); + } + flushPendingSetStates() { + if (!this.shouldDelayUpdates) { + return; + } + this.shouldDelayUpdates = false; + this.pendingSetStates.forEach((modifier) => { + this.setState(modifier); + }); + this.pendingSetStates = []; + } + render() { + // Remove any null values so that React replaces them with default props + const propsToPass = underscore_1.default.omit(this.props, underscore_1.default.isNull); + if (this.state.loading) { + return null; + } + // Remove any internal state properties used by withOnyx + // that should not be passed to a wrapped component + let stateToPass = underscore_1.default.omit(this.state, 'loading'); + stateToPass = underscore_1.default.omit(stateToPass, underscore_1.default.isNull); + const stateToPassWithoutNestedNulls = utils_1.default.removeNestedNullValues(stateToPass); + // Spreading props and state is necessary in an HOC where the data cannot be predicted + return (react_1.default.createElement(WrappedComponent, Object.assign({ markReadyForHydration: this.flushPendingSetStates }, propsToPass, stateToPassWithoutNestedNulls, { ref: this.props.forwardedRef }))); + } + } + withOnyx.propTypes = { + forwardedRef: prop_types_1.default.oneOfType([ + prop_types_1.default.func, + // eslint-disable-next-line react/forbid-prop-types + prop_types_1.default.shape({ current: prop_types_1.default.object }), + ]), + }; + withOnyx.defaultProps = { + forwardedRef: undefined, + }; + withOnyx.displayName = `withOnyx(${displayName})`; + return react_1.default.forwardRef((props, ref) => { + const Component = withOnyx; + return (react_1.default.createElement(Component + // eslint-disable-next-line react/jsx-props-no-spreading + , Object.assign({}, props, { forwardedRef: ref }))); + }); + }; +} +exports.default = default_1; diff --git a/dist/withOnyx/index.d.ts b/dist/withOnyx/index.d.ts new file mode 100644 index 00000000..988a5e5b --- /dev/null +++ b/dist/withOnyx/index.d.ts @@ -0,0 +1,15 @@ +/** + * This is a higher order component that provides the ability to map a state property directly to + * something in Onyx (a key/value store). That way, as soon as data in Onyx changes, the state will be set and the view + * will automatically change to reflect the new data. + */ +import React from 'react'; +import type { MapOnyxToState } from './types'; +/** + * @deprecated Use `useOnyx` instead of `withOnyx` whenever possible. + * + * This is a higher order component that provides the ability to map a state property directly to + * something in Onyx (a key/value store). That way, as soon as data in Onyx changes, the state will be set and the view + * will automatically change to reflect the new data. + */ +export default function (mapOnyxToState: MapOnyxToState, shouldDelayUpdates?: boolean): (component: React.ComponentType) => React.ComponentType>; diff --git a/dist/withOnyx/index.js b/dist/withOnyx/index.js new file mode 100644 index 00000000..35321ab3 --- /dev/null +++ b/dist/withOnyx/index.js @@ -0,0 +1,322 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * This is a higher order component that provides the ability to map a state property directly to + * something in Onyx (a key/value store). That way, as soon as data in Onyx changes, the state will be set and the view + * will automatically change to reflect the new data. + */ +const react_1 = __importDefault(require("react")); +const Onyx_1 = __importDefault(require("../Onyx")); +const OnyxUtils_1 = __importDefault(require("../OnyxUtils")); +const Str = __importStar(require("../Str")); +const utils_1 = __importDefault(require("../utils")); +const OnyxCache_1 = __importDefault(require("../OnyxCache")); +// This is a list of keys that can exist on a `mapping`, but are not directly related to loading data from Onyx. When the keys of a mapping are looped over to check +// if a key has changed, it's a good idea to skip looking at these properties since they would have unexpected results. +const mappingPropertiesToIgnoreChangesTo = ['initialValue', 'allowStaleData']; +/** + * Returns the display name of a component + */ +function getDisplayName(component) { + return component.displayName || component.name || 'Component'; +} +/** + * Removes all the keys from state that are unrelated to the onyx data being mapped to the component. + * + * @param state of the component + * @param onyxToStateMapping the object holding all of the mapping configuration for the component + */ +function getOnyxDataFromState(state, onyxToStateMapping) { + return utils_1.default.pick(state, Object.keys(onyxToStateMapping)); +} +/** + * Utility function to return the properly typed entries of the `withOnyx` mapping object. + */ +function mapOnyxToStateEntries(mapOnyxToState) { + return Object.entries(mapOnyxToState); +} +/** + * @deprecated Use `useOnyx` instead of `withOnyx` whenever possible. + * + * This is a higher order component that provides the ability to map a state property directly to + * something in Onyx (a key/value store). That way, as soon as data in Onyx changes, the state will be set and the view + * will automatically change to reflect the new data. + */ +function default_1(mapOnyxToState, shouldDelayUpdates = false) { + // A list of keys that must be present in tempState before we can render the WrappedComponent + const requiredKeysForInit = Object.keys(utils_1.default.omit(mapOnyxToState, ([, options]) => options.initWithStoredValues === false)); + return (WrappedComponent) => { + const displayName = getDisplayName(WrappedComponent); + class withOnyx extends react_1.default.Component { + constructor(props) { + super(props); + this.pendingSetStates = []; + this.shouldDelayUpdates = shouldDelayUpdates; + this.setWithOnyxState = this.setWithOnyxState.bind(this); + this.flushPendingSetStates = this.flushPendingSetStates.bind(this); + // This stores all the Onyx connection IDs to be used when the component unmounts so everything can be + // disconnected. It is a key value store with the format {[mapping.key]: connectionID}. + this.activeConnectionIDs = {}; + const cachedState = mapOnyxToStateEntries(mapOnyxToState).reduce((resultObj, [propName, mapping]) => { + const key = Str.result(mapping.key, props); + let value = OnyxUtils_1.default.tryGetCachedValue(key, mapping); + const hasCacheForKey = OnyxCache_1.default.hasCacheForKey(key); + if (!hasCacheForKey && !value && mapping.initialValue) { + value = mapping.initialValue; + } + /** + * If we have a pending merge for a key it could mean that data is being set via Onyx.merge() and someone expects a component to have this data immediately. + * + * @example + * + * Onyx.merge('report_123', value); + * Navigation.navigate(route); // Where "route" expects the "value" to be available immediately once rendered. + * + * In reality, Onyx.merge() will only update the subscriber after all merges have been batched and the previous value is retrieved via a get() (returns a promise). + * So, we won't use the cache optimization here as it will lead us to arbitrarily defer various actions in the application code. + */ + const hasPendingMergeForKey = OnyxUtils_1.default.hasPendingMergeForKey(key); + const hasValueInCache = hasCacheForKey || value !== undefined; + const shouldSetState = mapping.initWithStoredValues !== false && ((hasValueInCache && !hasPendingMergeForKey) || !!mapping.allowStaleData); + if (shouldSetState) { + // eslint-disable-next-line no-param-reassign + resultObj[propName] = value; + } + return resultObj; + }, {}); + // If we have all the data we need, then we can render the component immediately + cachedState.loading = Object.keys(cachedState).length < requiredKeysForInit.length; + // Object holding the temporary initial state for the component while we load the various Onyx keys + this.tempState = cachedState; + this.state = cachedState; + } + componentDidMount() { + const onyxDataFromState = getOnyxDataFromState(this.state, mapOnyxToState); + // Subscribe each of the state properties to the proper Onyx key + mapOnyxToStateEntries(mapOnyxToState).forEach(([propName, mapping]) => { + if (mappingPropertiesToIgnoreChangesTo.includes(propName)) { + return; + } + const key = Str.result(mapping.key, Object.assign(Object.assign({}, this.props), onyxDataFromState)); + this.connectMappingToOnyx(mapping, propName, key); + }); + this.checkEvictableKeys(); + } + componentDidUpdate(prevProps, prevState) { + // The whole purpose of this method is to check to see if a key that is subscribed to Onyx has changed, and then Onyx needs to be disconnected from the old + // key and connected to the new key. + // For example, a key could change if KeyB depends on data loading from Onyx for KeyA. + const isFirstTimeUpdatingAfterLoading = prevState.loading && !this.state.loading; + const onyxDataFromState = getOnyxDataFromState(this.state, mapOnyxToState); + const prevOnyxDataFromState = getOnyxDataFromState(prevState, mapOnyxToState); + mapOnyxToStateEntries(mapOnyxToState).forEach(([propName, mapping]) => { + // Some properties can be ignored because they aren't related to onyx keys and they will never change + if (mappingPropertiesToIgnoreChangesTo.includes(propName)) { + return; + } + // The previous key comes from either: + // 1) The initial key that was connected to (ie. set from `componentDidMount()`) + // 2) The updated props which caused `componentDidUpdate()` to run + // The first case cannot be used all the time because of race conditions where `componentDidUpdate()` can be triggered before connectingMappingToOnyx() is done + // (eg. if a user switches chats really quickly). In this case, it's much more stable to always look at the changes to prevProp and prevState to derive the key. + // The second case cannot be used all the time because the onyx data doesn't change the first time that `componentDidUpdate()` runs after loading. In this case, + // the `mapping.previousKey` must be used for the comparison or else this logic never detects that onyx data could have changed during the loading process. + const previousKey = isFirstTimeUpdatingAfterLoading ? mapping.previousKey : Str.result(mapping.key, Object.assign(Object.assign({}, prevProps), prevOnyxDataFromState)); + const newKey = Str.result(mapping.key, Object.assign(Object.assign({}, this.props), onyxDataFromState)); + if (previousKey !== newKey) { + Onyx_1.default.disconnect(this.activeConnectionIDs[previousKey], previousKey); + delete this.activeConnectionIDs[previousKey]; + this.connectMappingToOnyx(mapping, propName, newKey); + } + }); + this.checkEvictableKeys(); + } + componentWillUnmount() { + // Disconnect everything from Onyx + mapOnyxToStateEntries(mapOnyxToState).forEach(([, mapping]) => { + const key = Str.result(mapping.key, Object.assign(Object.assign({}, this.props), getOnyxDataFromState(this.state, mapOnyxToState))); + Onyx_1.default.disconnect(this.activeConnectionIDs[key], key); + }); + } + setStateProxy(modifier) { + if (this.shouldDelayUpdates) { + this.pendingSetStates.push(modifier); + } + else { + this.setState(modifier); + } + } + /** + * This method is used by the internal raw Onyx `sendDataToConnection`, it is designed to prevent unnecessary renders while a component + * still in a "loading" (read "mounting") state. The temporary initial state is saved to the HOC instance and setState() + * only called once all the necessary data has been collected. + * + * There is however the possibility the component could have been updated by a call to setState() + * before the data was "initially" collected. A race condition. + * For example some update happened on some key, while onyx was still gathering the initial hydration data. + * This update is disptached directly to setStateProxy and therefore the component has the most up-to-date data + * + * This is a design flaw in Onyx itself as dispatching updates before initial hydration is not a correct event flow. + * We however need to workaround this issue in the HOC. The addition of initialValue makes things even more complex, + * since you cannot be really sure if the component has been updated before or after the initial hydration. Therefore if + * initialValue is there, we just check if the update is different than that and then try to handle it as best as we can. + */ + setWithOnyxState(statePropertyName, val) { + const prevVal = this.state[statePropertyName]; + // If the component is not loading (read "mounting"), then we can just update the state + // There is a small race condition. + // When calling setWithOnyxState we delete the tempState object that is used to hold temporary state updates while the HOC is gathering data. + // However the loading flag is only set on the setState callback down below. setState however is an async operation that is also batched, + // therefore there is a small window of time where the loading flag is not false but the tempState is already gone + // (while the update is queued and waiting to be applied). + // This simply bypasses the loading check if the tempState is gone and the update can be safely queued with a normal setStateProxy. + if (!this.state.loading || !this.tempState) { + // Performance optimization, do not trigger update with same values + if (prevVal === val || (utils_1.default.isEmptyObject(prevVal) && utils_1.default.isEmptyObject(val))) { + return; + } + const valueWithoutNull = val === null ? undefined : val; + this.setStateProxy({ [statePropertyName]: valueWithoutNull }); + return; + } + this.tempState[statePropertyName] = val; + // If some key does not have a value yet, do not update the state yet + const tempStateIsMissingKey = requiredKeysForInit.some((key) => { var _a; return !(key in ((_a = this.tempState) !== null && _a !== void 0 ? _a : {})); }); + if (tempStateIsMissingKey) { + return; + } + const stateUpdate = Object.assign({}, this.tempState); + delete this.tempState; + // Full of hacky workarounds to prevent the race condition described above. + this.setState((prevState) => { + const finalState = Object.keys(stateUpdate).reduce((result, _key) => { + const key = _key; + if (key === 'loading') { + return result; + } + const initialValue = mapOnyxToState[key].initialValue; + // If initialValue is there and the state contains something different it means + // an update has already been received and we can discard the value we are trying to hydrate + if (initialValue !== undefined && prevState[key] !== undefined && prevState[key] !== initialValue && prevState[key] !== null) { + // eslint-disable-next-line no-param-reassign + result[key] = prevState[key]; + } + else if (prevState[key] !== undefined && prevState[key] !== null) { + // if value is already there (without initial value) then we can discard the value we are trying to hydrate + // eslint-disable-next-line no-param-reassign + result[key] = prevState[key]; + } + else if (stateUpdate[key] !== null) { + // eslint-disable-next-line no-param-reassign + result[key] = stateUpdate[key]; + } + return result; + }, {}); + finalState.loading = false; + return finalState; + }); + } + /** + * Makes sure each Onyx key we requested has been set to state with a value of some kind. + * We are doing this so that the wrapped component will only render when all the data + * it needs is available to it. + */ + checkEvictableKeys() { + // We will add this key to our list of recently accessed keys + // if the canEvict function returns true. This is necessary criteria + // we MUST use to specify if a key can be removed or not. + mapOnyxToStateEntries(mapOnyxToState).forEach(([, mapping]) => { + if (mapping.canEvict === undefined) { + return; + } + const canEvict = !!Str.result(mapping.canEvict, this.props); + const key = Str.result(mapping.key, this.props); + if (!OnyxUtils_1.default.isSafeEvictionKey(key)) { + throw new Error(`canEvict can't be used on key '${key}'. This key must explicitly be flagged as safe for removal by adding it to Onyx.init({safeEvictionKeys: []}).`); + } + if (canEvict) { + OnyxUtils_1.default.removeFromEvictionBlockList(key, mapping.connectionID); + } + else { + OnyxUtils_1.default.addToEvictionBlockList(key, mapping.connectionID); + } + }); + } + /** + * Takes a single mapping and binds the state of the component to the store + * + * @param mapping.key key to connect to. can be a string or a + * function that takes this.props as an argument and returns a string + * @param statePropertyName the name of the state property that Onyx will add the data to + * @param [mapping.initWithStoredValues] If set to false, then no data will be prefilled into the + * component + * @param key to connect to Onyx with + */ + connectMappingToOnyx(mapping, statePropertyName, key) { + const onyxMapping = mapOnyxToState[statePropertyName]; + // Remember what the previous key was so that key changes can be detected when data is being loaded from Onyx. This will allow + // dependent keys to finish loading their data. + // eslint-disable-next-line no-param-reassign + onyxMapping.previousKey = key; + // eslint-disable-next-line rulesdir/prefer-onyx-connect-in-libs + this.activeConnectionIDs[key] = Onyx_1.default.connect(Object.assign(Object.assign({}, mapping), { key, statePropertyName: statePropertyName, withOnyxInstance: this, displayName })); + } + flushPendingSetStates() { + if (!this.shouldDelayUpdates) { + return; + } + this.shouldDelayUpdates = false; + this.pendingSetStates.forEach((modifier) => { + this.setState(modifier); + }); + this.pendingSetStates = []; + } + render() { + // Remove any null values so that React replaces them with default props + const propsToPass = utils_1.default.omit(this.props, ([, propValue]) => propValue === null); + if (this.state.loading) { + return null; + } + // Remove any internal state properties used by withOnyx + // that should not be passed to a wrapped component + const stateToPass = utils_1.default.omit(this.state, ([stateKey, stateValue]) => stateKey === 'loading' || stateValue === null); + // Spreading props and state is necessary in an HOC where the data cannot be predicted + return (react_1.default.createElement(WrappedComponent, Object.assign({ markReadyForHydration: this.flushPendingSetStates }, propsToPass, stateToPass, { ref: this.props.forwardedRef }))); + } + } + withOnyx.displayName = `withOnyx(${displayName})`; + return react_1.default.forwardRef((props, ref) => { + const Component = withOnyx; + return (react_1.default.createElement(Component + // eslint-disable-next-line react/jsx-props-no-spreading + , Object.assign({}, props, { forwardedRef: ref }))); + }); + }; +} +exports.default = default_1; diff --git a/dist/withOnyx/types.d.ts b/dist/withOnyx/types.d.ts new file mode 100644 index 00000000..98533633 --- /dev/null +++ b/dist/withOnyx/types.d.ts @@ -0,0 +1,141 @@ +import type { ForwardedRef } from 'react'; +import type { IsEqual } from 'type-fest'; +import type { CollectionKeyBase, ExtractOnyxCollectionValue, KeyValueMapping, OnyxCollection, OnyxEntry, OnyxKey, OnyxValue, Selector } from '../types'; +/** + * Represents the base mapping options between an Onyx key and the component's prop. + */ +type BaseMapping = { + canEvict?: boolean | ((props: Omit) => boolean); + initWithStoredValues?: boolean; + allowStaleData?: boolean; +}; +/** + * Represents the base mapping options when an Onyx collection key is supplied. + */ +type CollectionBaseMapping = { + initialValue?: OnyxCollection; +}; +/** + * Represents the base mapping options when an Onyx non-collection key is supplied. + */ +type EntryBaseMapping = { + initialValue?: OnyxEntry; +}; +/** + * Represents the string / function `key` mapping option between an Onyx key and the component's prop. + * + * If `key` is `string`, the type of the Onyx value that is associated with `key` must match with the type of the component's prop, + * otherwise an error will be thrown. + * + * If `key` is `function`, the return type of `key` function must be a valid Onyx key and the type of the Onyx value associated + * with `key` must match with the type of the component's prop, otherwise an error will be thrown. + * + * @example + * ```ts + * // Onyx prop with `string` key + * onyxProp: { + * key: ONYXKEYS.ACCOUNT, + * }, + * + * // Onyx prop with `function` key + * onyxProp: { + * key: ({reportId}) => ONYXKEYS.ACCOUNT, + * }, + * ``` + */ +type BaseMappingKey = IsEqual extends true ? { + key: TOnyxKey | ((props: Omit & Partial) => TOnyxKey); +} : never; +/** + * Represents the string `key` and `selector` mapping options between an Onyx key and the component's prop. + * + * The function signature and return type of `selector` must match with the type of the component's prop, + * otherwise an error will be thrown. + * + * @example + * ```ts + * // Onyx prop with `string` key and selector + * onyxProp: { + * key: ONYXKEYS.ACCOUNT, + * selector: (value: Account | null): string => value?.id ?? '', + * }, + * ``` + */ +type BaseMappingStringKeyAndSelector = { + key: TOnyxKey; + selector: Selector; +}; +/** + * Represents the function `key` and `selector` mapping options between an Onyx key and the component's prop. + * + * The function signature and return type of `selector` must match with the type of the component's prop, + * otherwise an error will be thrown. + * + * @example + * ```ts + * // Onyx prop with `function` key and selector + * onyxProp: { + * key: ({reportId}) => ONYXKEYS.ACCOUNT, + * selector: (value: Account | null) => value?.id ?? '', + * }, + * ``` + */ +type BaseMappingFunctionKeyAndSelector = { + key: (props: Omit & Partial) => TOnyxKey; + selector: Selector; +}; +/** + * Represents the mapping options between an Onyx key and the component's prop with all its possibilities. + */ +type Mapping = BaseMapping & EntryBaseMapping & (BaseMappingKey> | BaseMappingStringKeyAndSelector | BaseMappingFunctionKeyAndSelector); +/** + * Represents a superset of `Mapping` type with internal properties included. + */ +type WithOnyxMapping = Mapping & { + connectionID: number; + previousKey?: OnyxKey; +}; +/** + * Represents the mapping options between an Onyx collection key without suffix and the component's prop with all its possibilities. + */ +type CollectionMapping = BaseMapping & CollectionBaseMapping & (BaseMappingKey> | BaseMappingStringKeyAndSelector, TOnyxKey> | BaseMappingFunctionKeyAndSelector, TOnyxKey>); +/** + * Represents an union type of all the possible Onyx key mappings. + * Each `OnyxPropMapping` will be associated with its respective Onyx key, ensuring different type-safety for each object. + */ +type OnyxPropMapping = { + [TOnyxKey in OnyxKey]: Mapping; +}[OnyxKey]; +/** + * Represents an union type of all the possible Onyx collection keys without suffix mappings. + * Each `OnyxPropCollectionMapping` will be associated with its respective Onyx key, ensuring different type-safety for each object. + */ +type OnyxPropCollectionMapping = { + [TOnyxKey in CollectionKeyBase]: CollectionMapping; +}[CollectionKeyBase]; +/** + * Represents an Onyx mapping object that connects Onyx keys to component's props. + */ +type MapOnyxToState = { + [TOnyxProp in keyof TOnyxProps]: OnyxPropMapping | OnyxPropCollectionMapping; +}; +/** + * Represents the `withOnyx` internal component props. + */ +type WithOnyxProps = Omit & { + forwardedRef?: ForwardedRef; +}; +/** + * Represents the `withOnyx` internal component state. + */ +type WithOnyxState = TOnyxProps & { + loading: boolean; +}; +/** + * Represents the `withOnyx` internal component instance. + */ +type WithOnyxInstance = React.Component> & { + setStateProxy: (modifier: Record> | ((state: Record>) => OnyxValue)) => void; + setWithOnyxState: (statePropertyName: OnyxKey, value: OnyxValue) => void; +}; +export type { WithOnyxMapping, MapOnyxToState, WithOnyxProps, WithOnyxInstance, WithOnyxState }; diff --git a/dist/withOnyx/types.js b/dist/withOnyx/types.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/dist/withOnyx/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/lib/Onyx.ts b/lib/Onyx.ts index 9a03ac0f..65bfcdce 100644 --- a/lib/Onyx.ts +++ b/lib/Onyx.ts @@ -79,21 +79,21 @@ function init({ * callback: onSessionChange, * }); * - * @param connectOptions the mapping information to connect Onyx to the components state - * @param connectOptions.key ONYXKEY to subscribe to - * @param [connectOptions.statePropertyName] the name of the property in the state to connect the data to - * @param [connectOptions.withOnyxInstance] whose setState() method will be called with any changed data + * @param mapping the mapping information to connect Onyx to the components state + * @param mapping.key ONYXKEY to subscribe to + * @param [mapping.statePropertyName] the name of the property in the state to connect the data to + * @param [mapping.withOnyxInstance] whose setState() method will be called with any changed data * This is used by React components to connect to Onyx - * @param [connectOptions.callback] a method that will be called with changed data + * @param [mapping.callback] a method that will be called with changed data * This is used by any non-React code to connect to Onyx - * @param [connectOptions.initWithStoredValues] If set to false, then no data will be prefilled into the + * @param [mapping.initWithStoredValues] If set to false, then no data will be prefilled into the * component - * @param [connectOptions.waitForCollectionCallback] If set to true, it will return the entire collection to the callback as a single object - * @param [connectOptions.selector] THIS PARAM IS ONLY USED WITH withOnyx(). If included, this will be used to subscribe to a subset of an Onyx key's data. + * @param [mapping.waitForCollectionCallback] If set to true, it will return the entire collection to the callback as a single object + * @param [mapping.selector] THIS PARAM IS ONLY USED WITH withOnyx(). If included, this will be used to subscribe to a subset of an Onyx key's data. * The sourceData and withOnyx state are passed to the selector and should return the simplified data. Using this setting on `withOnyx` can have very positive * performance benefits because the component will only re-render when the subset of data changes. Otherwise, any change of data on any property would normally * cause the component to re-render (and that can be expensive from a performance standpoint). - * @param [connectOptions.initialValue] THIS PARAM IS ONLY USED WITH withOnyx(). + * @param [mapping.initialValue] THIS PARAM IS ONLY USED WITH withOnyx(). * If included, this will be passed to the component so that something can be rendered while data is being fetched from the DB. * Note that it will not cause the component to have the loading prop set to true. * @returns an ID to use when calling disconnect @@ -187,7 +187,7 @@ function connect(connectOptions: ConnectOptions): nu } /** - * Remove the listener for a React component + * Remove the listener for a react component * @example * Onyx.disconnect(connectionID); *