From 7d77faccd80dc99f00d7757578a7e9be9cb757c4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 9 Oct 2024 12:17:12 -0300 Subject: [PATCH 01/20] Draft implementation of loadData and getSnapshot methods --- CHANGES.txt | 2 + src/sdkFactory/index.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 31 +++++++ src/storages/dataLoader.ts | 94 ++++++++++++++-------- src/storages/inMemory/InMemoryStorageCS.ts | 23 +++++- src/storages/types.ts | 2 - src/types.ts | 17 ++-- 7 files changed, 124 insertions(+), 47 deletions(-) create mode 100644 src/storages/__tests__/dataLoader.spec.ts diff --git a/CHANGES.txt b/CHANGES.txt index 4c333159..0ab19c90 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,8 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. + - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 41706cc6..c09b6fd6 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -56,7 +56,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } }); - // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts new file mode 100644 index 00000000..c9f77849 --- /dev/null +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -0,0 +1,31 @@ +import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; +import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; + +import * as dataLoader from '../dataLoader'; + +test('loadData & getSnapshot', () => { + jest.spyOn(dataLoader, 'loadData'); + const onReadyFromCacheCb = jest.fn(); + // @ts-expect-error + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.setChangeNumber(123); // @ts-expect-error + serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); + serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); + + const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); + + // @ts-expect-error + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); + + // Assert + expect(dataLoader.loadData).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); + expect(preloadedData).toEqual({ + since: 123, + splitsData: [{ name: 'split1' }], + mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, + segmentsData: undefined + }); +}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 24898d68..7b44df91 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,55 +1,85 @@ import { SplitIO } from '../types'; -import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; -import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; +import { setToArray, ISet } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; /** - * Factory of client-side storage loader + * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function + * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader - * and extended with a `mySegmentsData` property. - * @returns function to preload the storage + * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader and extended with a `mySegmentsData` property. + * @param storage object containing `splits` and `segments` cache (client-side variant) + * @param userKey user key (matching key) of the provided MySegmentsCache + * + * @TODO extend to load largeSegments + * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + * @TODO add logs, and input validation in this module, in favor of size reduction. + * @TODO unit tests */ -export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { - - /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) - * - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userId user key string of the provided MySegmentsCache - * - * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - */ - return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; +export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + // Do not load data if current preloadedData is empty + if (Object.keys(preloadedData).length === 0) return; + + const { segmentsData = {}, since = -1, splitsData = [] } = preloadedData; + if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); - const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - // Do not load data if current localStorage data is more recent, - // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, - if (storedSince > since || lastUpdated < expirationTimestamp) return; + // Do not load data if current data is more recent + if (storedSince > since) return; // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data storage.splits.clear(); storage.splits.setChangeNumber(since); // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.addSplits(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName]))); + storage.splits.addSplits(splitsData.map(split => ([split.name, split]))); + } - // add mySegments data - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; + if (matchingKey) { // add mySegments data (client-side) + let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; if (!mySegmentsData) { // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const userIds = JSON.parse(segmentsData[segmentName]).added; - return Array.isArray(userIds) && userIds.indexOf(userId) > -1; + const matchingKeys = segmentsData[segmentName]; + return matchingKeys.indexOf(matchingKey) > -1; }); } storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); + } else { // add segments data (server-side) + Object.keys(segmentsData).filter(segmentName => { + const matchingKeys = segmentsData[segmentName]; + storage.segments.addToSegment(segmentName, matchingKeys); + }); + } +} + +export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { + return { + // lastUpdated: Date.now(), + since: storage.splits.getChangeNumber(), + splitsData: storage.splits.getAll(), + segmentsData: userKeys ? + undefined : // @ts-ignore accessing private prop + Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop + prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); + return prev; + }, {}), + mySegmentsData: userKeys ? + userKeys.reduce>((prev, userKey) => { + prev[getMatching(userKey)] = storage.shared ? + // Client-side segments + // @ts-ignore accessing private prop + Object.keys(storage.shared(userKey).segments.segmentCache) : + // Server-side segments + // @ts-ignore accessing private prop + Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop + return storage.segments.segmentCache[segmentName].has(userKey) ? + prev.concat(segmentName) : + prev; + }, []); + return prev; + }, {}) : + undefined }; } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 30667369..670b91f1 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,6 +7,8 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; +import { getMatching } from '../../utils/key'; +import { loadData } from '../dataLoader'; /** * InMemory storage factory for standalone client-side SplitFactory @@ -14,7 +16,7 @@ import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; * @param params parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); @@ -42,11 +44,18 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag }, // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) - shared() { + shared(matchingKey: string) { + const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); + + if (preloadedData) { + loadData(preloadedData, { segments, largeSegments }, matchingKey); + } + return { splits: this.splits, - segments: new MySegmentsCacheInMemory(), - largeSegments: new MySegmentsCacheInMemory(), + segments, + largeSegments, impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -72,6 +81,12 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag if (storage.uniqueKeys) storage.uniqueKeys.track = noopTrack; } + + if (preloadedData) { + loadData(preloadedData, storage, getMatching(params.settings.core.key)); + if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); + } + return storage; } diff --git a/src/storages/types.ts b/src/storages/types.ts index 61ab10f2..21945587 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -492,8 +492,6 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ -export type DataLoader = (storage: IStorageSync, matchingKey: string) => void - export interface IStorageFactoryParams { settings: ISettings, /** diff --git a/src/types.ts b/src/types.ts index 2a65b297..777b3258 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplitFiltersValidation } from './dtos/types'; +import { ISplit, ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -98,6 +98,7 @@ export interface ISettings { eventsFirstPushWindow: number }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, + readonly preloadedData?: SplitIO.PreloadedData, readonly integrations: Array<{ readonly type: string, (params: IIntegrationFactoryParams): IIntegration | void @@ -771,21 +772,20 @@ export namespace SplitIO { * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. * @TODO configurable expiration time policy? */ - lastUpdated: number, + // lastUpdated: number, /** * Change number of the preloaded data. * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. */ since: number, /** - * Map of feature flags to their stringified definitions. + * List of feature flag definitions. + * @TODO rename to flags */ - splitsData: { - [splitName: string]: string - }, + splitsData: ISplit[], /** * Optional map of user keys to their list of segments. - * @TODO remove when releasing first version + * @TODO rename to memberships */ mySegmentsData?: { [key: string]: string[] @@ -793,9 +793,10 @@ export namespace SplitIO { /** * Optional map of segments to their stringified definitions. * This property is ignored if `mySegmentsData` was provided. + * @TODO rename to segments */ segmentsData?: { - [segmentName: string]: string + [segmentName: string]: string[] }, } /** From b8b12cddf0351cff789a069391ba1ef42887f19f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 18:16:35 -0300 Subject: [PATCH 02/20] Update data loader to support memberships --- src/sdkFactory/index.ts | 7 ++- src/storages/__tests__/dataLoader.spec.ts | 4 +- src/storages/dataLoader.ts | 77 ++++++++++++++++------- src/storages/types.ts | 4 ++ src/types.ts | 12 ++-- 5 files changed, 70 insertions(+), 34 deletions(-) diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 5ab47ddf..0e9feda4 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -7,7 +7,7 @@ import { IBasicClient, SplitIO } from '../types'; import { validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; import { createLoggerAPI } from '../logger/sdkLogger'; import { NEW_FACTORY, RETRIEVE_MANAGER } from '../logger/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; import { objectAssign } from '../utils/lang/objectAssign'; import { strategyDebugFactory } from '../trackers/strategy/strategyDebug'; import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized'; @@ -43,7 +43,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. const storage = storageFactory({ settings, - onReadyCb: (error) => { + onReadyCb(error) { if (error) { // If storage fails to connect, SDK_READY_TIMED_OUT event is emitted immediately. Review when timeout and non-recoverable errors are reworked readiness.timeout(); @@ -52,6 +52,9 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_ARRIVED); readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, + onReadyFromCacheCb() { + readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + } }); const clients: Record = {}; diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index c9f77849..522feb99 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -11,7 +11,7 @@ test('loadData & getSnapshot', () => { const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); serverStorage.splits.setChangeNumber(123); // @ts-expect-error serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); - serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); + serverStorage.segments.update('segment1', [fullSettings.core.key as string], [], 123); const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); @@ -25,7 +25,7 @@ test('loadData & getSnapshot', () => { expect(preloadedData).toEqual({ since: 123, splitsData: [{ name: 'split1' }], - mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, + membershipsData: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } }, segmentsData: undefined }); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 7b44df91..4efabcc9 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,7 +1,8 @@ import { SplitIO } from '../types'; import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; -import { setToArray, ISet } from '../utils/lang/sets'; +import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; +import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; /** * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function @@ -37,19 +38,26 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits } if (matchingKey) { // add mySegments data (client-side) - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; - if (!mySegmentsData) { - // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds - mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - return matchingKeys.indexOf(matchingKey) > -1; - }); + let membershipsData = preloadedData.membershipsData && preloadedData.membershipsData[matchingKey]; + if (!membershipsData && segmentsData) { + membershipsData = { + ms: { + k: Object.keys(segmentsData).filter(segmentName => { + const segmentKeys = segmentsData[segmentName]; + return segmentKeys.indexOf(matchingKey) > -1; + }).map(segmentName => ({ n: segmentName })) + } + }; } - storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); + if (membershipsData) { + if (membershipsData.ms) storage.segments.resetSegments(membershipsData.ms); + if (membershipsData.ls && storage.largeSegments) storage.largeSegments.resetSegments(membershipsData.ls); + } + } else { // add segments data (server-side) - Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - storage.segments.addToSegment(segmentName, matchingKeys); + Object.keys(segmentsData).forEach(segmentName => { + const segmentKeys = segmentsData[segmentName]; + storage.segments.update(segmentName, segmentKeys, [], -1); }); } } @@ -62,22 +70,43 @@ export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[] segmentsData: userKeys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop - prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); + prev[cur] = setToArray(storage.segments.segmentCache[cur] as Set); return prev; }, {}), - mySegmentsData: userKeys ? - userKeys.reduce>((prev, userKey) => { - prev[getMatching(userKey)] = storage.shared ? + membershipsData: userKeys ? + userKeys.reduce>((prev, userKey) => { + if (storage.shared) { // Client-side segments // @ts-ignore accessing private prop - Object.keys(storage.shared(userKey).segments.segmentCache) : - // Server-side segments - // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(userKey) ? - prev.concat(segmentName) : - prev; - }, []); + const sharedStorage = storage.shared(userKey); + prev[getMatching(userKey)] = { + ms: { + // @ts-ignore accessing private prop + k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), + // cn: sharedStorage.segments.getChangeNumber() + }, + ls: sharedStorage.largeSegments ? { + // @ts-ignore accessing private prop + k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), + // cn: sharedStorage.largeSegments.getChangeNumber() + } : undefined + }; + } else { + prev[getMatching(userKey)] = { + ms: { + // Server-side segments + // @ts-ignore accessing private prop + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop + return storage.segments.segmentCache[segmentName].has(userKey) ? + prev!.concat({ n: segmentName }) : + prev; + }, []) + }, + ls: { + k: [] + } + }; + } return prev; }, {}) : undefined diff --git a/src/storages/types.ts b/src/storages/types.ts index 08a9d387..8be2c731 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -495,6 +495,10 @@ export interface IStorageFactoryParams { * It is meant for emitting SDK_READY event in consumer mode, and waiting before using the storage in the synchronizer. */ onReadyCb: (error?: any) => void, + /** + * It is meant for emitting SDK_READY_FROM_CACHE event in standalone mode with preloaded data + */ + onReadyFromCacheCb: () => void, } export type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS' | 'PLUGGABLE'; diff --git a/src/types.ts b/src/types.ts index 92a44112..9b77ced5 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplit, ISplitFiltersValidation } from './dtos/types'; +import { IMembershipsResponse, ISplit, ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -783,15 +783,15 @@ export namespace SplitIO { */ splitsData: ISplit[], /** - * Optional map of user keys to their list of segments. + * Optional map of user keys to their memberships. * @TODO rename to memberships */ - mySegmentsData?: { - [key: string]: string[] + membershipsData?: { + [key: string]: IMembershipsResponse }, /** - * Optional map of segments to their stringified definitions. - * This property is ignored if `mySegmentsData` was provided. + * Optional map of segments to their list of keys. + * This property is ignored if `membershipsData` was provided. * @TODO rename to segments */ segmentsData?: { From 325ecdaf586d6d2d0a9e667890694b42bad2ae25 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 18 Oct 2024 18:25:47 -0300 Subject: [PATCH 03/20] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 8bc19544..6cf6d6f5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.2", "license": "Apache-2.0", "dependencies": { "tslib": "^2.3.1" diff --git a/package.json b/package.json index 870e561c..4ddeb4c8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.0.0-rc.1", + "version": "2.0.0-rc.2", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From 60ccbf8e77994b55a3835558af1d3c2d4593bc0f Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 22 Aug 2025 19:08:32 -0300 Subject: [PATCH 04/20] Add RBSegments --- src/storages/__tests__/dataLoader.spec.ts | 5 +++- src/storages/dataLoader.ts | 25 +++++++++++++++---- .../inLocalStorage/RBSegmentsCacheInLocal.ts | 4 +++ .../inMemory/RBSegmentsCacheInMemory.ts | 4 +++ src/storages/types.ts | 1 + types/splitio.d.ts | 7 ++++-- 6 files changed, 38 insertions(+), 8 deletions(-) diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index f7ba9e97..d1ab77c8 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -9,7 +9,8 @@ test('loadData & getSnapshot', () => { const onReadyFromCacheCb = jest.fn(); // @ts-expect-error const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); // @ts-expect-error - serverStorage.splits.update([{ name: 'split1' }], [], 123); + serverStorage.splits.update([{ name: 'split1' }], [], 123); // @ts-expect-error + serverStorage.rbSegments.update([{ name: 'rbs1' }], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string], [], 123); const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); @@ -24,6 +25,8 @@ test('loadData & getSnapshot', () => { expect(preloadedData).toEqual({ since: 123, flags: [{ name: 'split1' }], + rbSince: 321, + rbSegments: [{ name: 'rbs1' }], memberships: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } }, segments: undefined }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 7550de05..c51986b9 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,8 +1,8 @@ import SplitIO from '../../types/splitio'; -import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; -import { IMembershipsResponse, IMySegmentsResponse, ISplit } from '../dtos/types'; +import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; /** * @@ -10,16 +10,17 @@ import { IMembershipsResponse, IMySegmentsResponse, ISplit } from '../dtos/types * @param storage - object containing `splits` and `segments` cache (client-side variant) * @param userKey - user key (matching key) of the provided MySegmentsCache * + * @TODO load data even if current data is more recent? * @TODO extend to load largeSegments * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. * @TODO add logs, and input validation in this module, in favor of size reduction. * @TODO unit tests */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { +export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; - const { segments = {}, since = -1, flags = [] } = preloadedData; + const { segments = {}, since = -1, flags = [], rbSince = -1, rbSegments = [] } = preloadedData; if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); @@ -34,6 +35,19 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits storage.splits.update(flags as ISplit[], [], since); } + if (storage.rbSegments) { + const storedSince = storage.rbSegments.getChangeNumber(); + + // Do not load data if current data is more recent + if (storedSince > rbSince) return; + + // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data + storage.rbSegments.clear(); + + // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data + storage.rbSegments.update(rbSegments as IRBSegment[], [], rbSince); + } + if (matchingKey) { // add memberships data (client-side) let memberships = preloadedData.memberships && preloadedData.memberships[matchingKey]; if (!memberships && segments) { @@ -61,9 +75,10 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { return { - // lastUpdated: Date.now(), since: storage.splits.getChangeNumber(), flags: storage.splits.getAll(), + rbSince: storage.rbSegments.getChangeNumber(), + rbSegments: storage.rbSegments.getAll(), segments: userKeys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop diff --git a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts index 37f6ad8e..cfc68cf5 100644 --- a/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts +++ b/src/storages/inLocalStorage/RBSegmentsCacheInLocal.ts @@ -105,6 +105,10 @@ export class RBSegmentsCacheInLocal implements IRBSegmentsCacheSync { return item && JSON.parse(item); } + getAll(): IRBSegment[] { + return this.getNames().map(key => this.get(key)!); + } + contains(names: Set): boolean { const namesArray = setToArray(names); const namesInStorage = this.getNames(); diff --git a/src/storages/inMemory/RBSegmentsCacheInMemory.ts b/src/storages/inMemory/RBSegmentsCacheInMemory.ts index 568b0deb..2b876202 100644 --- a/src/storages/inMemory/RBSegmentsCacheInMemory.ts +++ b/src/storages/inMemory/RBSegmentsCacheInMemory.ts @@ -51,6 +51,10 @@ export class RBSegmentsCacheInMemory implements IRBSegmentsCacheSync { return this.cache[name] || null; } + getAll(): IRBSegment[] { + return this.getNames().map(key => this.get(key)!); + } + contains(names: Set): boolean { const namesArray = setToArray(names); const namesInStorage = this.getNames(); diff --git a/src/storages/types.ts b/src/storages/types.ts index 1721360a..553722c7 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -235,6 +235,7 @@ export interface IRBSegmentsCacheSync extends IRBSegmentsCacheBase { update(toAdd: IRBSegment[], toRemove: IRBSegment[], changeNumber: number): boolean, get(name: string): IRBSegment | null, getChangeNumber(): number, + getAll(): IRBSegment[], clear(): void, contains(names: Set): boolean, // Used only for smart pausing in client-side standalone. Returns true if the storage contains a RBSegment using segments or large segments matchers diff --git a/types/splitio.d.ts b/types/splitio.d.ts index aa4bda79..a094f67e 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1031,14 +1031,17 @@ declare namespace SplitIO { */ type PreloadedData = { /** - * Change number of the preloaded data. - * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. + * Change number of feature flags. */ since: number; /** * List of feature flags. */ flags: Object[], + /** + * Change number of rule-based segments. + */ + rbSince?: number, /** * List of rule-based segments. */ From e618b7fb807949f28b31b2bfa48071b22c514000 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 09:33:15 -0300 Subject: [PATCH 05/20] Polishing --- CHANGES.txt | 2 +- src/storages/__tests__/dataLoader.spec.ts | 96 ++++++++++++++++------ src/storages/dataLoader.ts | 89 +++++++++----------- src/storages/inMemory/InMemoryStorageCS.ts | 48 ++++++----- src/storages/types.ts | 2 +- types/splitio.d.ts | 7 ++ 6 files changed, 147 insertions(+), 97 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 336e80c9..66d95bb4 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,5 @@ 2.5.0 (August XX, 2025) - - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `factory.getCache()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index d1ab77c8..2ef06f96 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -1,33 +1,81 @@ import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; +import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; +import { IRBSegment, ISplit } from '../../dtos/types'; import * as dataLoader from '../dataLoader'; -test('loadData & getSnapshot', () => { - jest.spyOn(dataLoader, 'loadData'); +describe('setCache & getCache', () => { + jest.spyOn(dataLoader, 'setCache'); const onReadyFromCacheCb = jest.fn(); - // @ts-expect-error - const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); // @ts-expect-error - serverStorage.splits.update([{ name: 'split1' }], [], 123); // @ts-expect-error - serverStorage.rbSegments.update([{ name: 'rbs1' }], [], 321); - serverStorage.segments.update('segment1', [fullSettings.core.key as string], [], 123); - - const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); - - // @ts-expect-error - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); - - // Assert - expect(dataLoader.loadData).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); - expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); - expect(preloadedData).toEqual({ - since: 123, - flags: [{ name: 'split1' }], - rbSince: 321, - rbSegments: [{ name: 'rbs1' }], - memberships: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } }, - segments: undefined + const onReadyCb = jest.fn(); + + const otherKey = 'otherKey'; + + // @ts-expect-error Load server-side storage + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); + serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); + serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('using preloaded data with memberships', () => { + const preloadedData = dataLoader.getCache(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setCache).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setCache).toBeCalledTimes(2); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + // Get preloaded data from client-side storage + expect(dataLoader.getCache(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(preloadedData); + expect(preloadedData).toEqual({ + since: 123, + flags: [{ name: 'split1' }], + rbSince: 321, + rbSegments: [{ name: 'rbs1' }], + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segments: undefined + }); + }); + + test('using preloaded data with segments', () => { + const preloadedData = dataLoader.getCache(loggerMock, serverStorage); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setCache).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setCache).toBeCalledTimes(2); + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + expect(preloadedData).toEqual({ + since: 123, + flags: [{ name: 'split1' }], + rbSince: 321, + rbSegments: [{ name: 'rbs1' }], + memberships: undefined, + segments: { + segment1: [fullSettings.core.key as string, otherKey] + } + }); }); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index c51986b9..e5ec31c4 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -3,58 +3,39 @@ import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSyn import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; +import { ILogger } from '../logger/types'; /** - * - * @param preloadedData - validated data - * @param storage - object containing `splits` and `segments` cache (client-side variant) - * @param userKey - user key (matching key) of the provided MySegmentsCache - * - * @TODO load data even if current data is more recent? - * @TODO extend to load largeSegments - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - * @TODO add logs, and input validation in this module, in favor of size reduction. - * @TODO unit tests + * Sets the given synchronous storage with the provided preloaded data snapshot. + * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). + * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { +export function setCache(log: ILogger, preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current preloadedData is empty if (Object.keys(preloadedData).length === 0) return; - const { segments = {}, since = -1, flags = [], rbSince = -1, rbSegments = [] } = preloadedData; + const { splits, rbSegments, segments, largeSegments } = storage; - if (storage.splits) { - const storedSince = storage.splits.getChangeNumber(); + log.debug(`set cache${matchingKey ? ` for key ${matchingKey}` : ''}`); - // Do not load data if current data is more recent - if (storedSince > since) return; - - // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data - storage.splits.clear(); - - // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.update(flags as ISplit[], [], since); + if (splits) { + splits.clear(); + splits.update(preloadedData.flags as ISplit[] || [], [], preloadedData.since || -1); } - if (storage.rbSegments) { - const storedSince = storage.rbSegments.getChangeNumber(); - - // Do not load data if current data is more recent - if (storedSince > rbSince) return; - - // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data - storage.rbSegments.clear(); - - // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.rbSegments.update(rbSegments as IRBSegment[], [], rbSince); + if (rbSegments) { + rbSegments.clear(); + rbSegments.update(preloadedData.rbSegments as IRBSegment[] || [], [], preloadedData.rbSince || -1); } + const segmentsData = preloadedData.segments || {}; if (matchingKey) { // add memberships data (client-side) let memberships = preloadedData.memberships && preloadedData.memberships[matchingKey]; - if (!memberships && segments) { + if (!memberships && segmentsData) { memberships = { ms: { - k: Object.keys(segments).filter(segmentName => { - const segmentKeys = segments[segmentName]; + k: Object.keys(segmentsData).filter(segmentName => { + const segmentKeys = segmentsData[segmentName]; return segmentKeys.indexOf(matchingKey) > -1; }).map(segmentName => ({ n: segmentName })) } @@ -62,54 +43,60 @@ export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits } if (memberships) { - if ((memberships as IMembershipsResponse).ms) storage.segments.resetSegments((memberships as IMembershipsResponse).ms!); - if ((memberships as IMembershipsResponse).ls && storage.largeSegments) storage.largeSegments.resetSegments((memberships as IMembershipsResponse).ls!); + if ((memberships as IMembershipsResponse).ms) segments.resetSegments((memberships as IMembershipsResponse).ms!); + if ((memberships as IMembershipsResponse).ls && largeSegments) largeSegments.resetSegments((memberships as IMembershipsResponse).ls!); } } else { // add segments data (server-side) - Object.keys(segments).forEach(segmentName => { - const segmentKeys = segments[segmentName]; - storage.segments.update(segmentName, segmentKeys, [], -1); + Object.keys(segmentsData).forEach(segmentName => { + const segmentKeys = segmentsData[segmentName]; + segments.update(segmentName, segmentKeys, [], -1); }); } } -export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { +/** + * Gets the preloaded data snapshot from the given synchronous storage. + * If `keys` are provided, the memberships for those keys is returned, to protect segments data. + * Otherwise, the segments data is returned. + */ +export function getCache(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { + + log.debug(`get cache${keys ? ` for keys ${keys}` : ''}`); + return { since: storage.splits.getChangeNumber(), flags: storage.splits.getAll(), rbSince: storage.rbSegments.getChangeNumber(), rbSegments: storage.rbSegments.getAll(), - segments: userKeys ? + segments: keys ? undefined : // @ts-ignore accessing private prop Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop prev[cur] = setToArray(storage.segments.segmentCache[cur] as Set); return prev; }, {}), - memberships: userKeys ? - userKeys.reduce>((prev, userKey) => { + memberships: keys ? + keys.reduce>((prev, key) => { if (storage.shared) { // Client-side segments // @ts-ignore accessing private prop - const sharedStorage = storage.shared(userKey); - prev[getMatching(userKey)] = { + const sharedStorage = storage.shared(key); + prev[getMatching(key)] = { ms: { // @ts-ignore accessing private prop k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), - // cn: sharedStorage.segments.getChangeNumber() }, ls: sharedStorage.largeSegments ? { // @ts-ignore accessing private prop k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), - // cn: sharedStorage.largeSegments.getChangeNumber() } : undefined }; } else { - prev[getMatching(userKey)] = { + prev[getMatching(key)] = { ms: { // Server-side segments // @ts-ignore accessing private prop k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(userKey) ? + return storage.segments.segmentCache[segmentName].has(key) ? prev!.concat({ n: segmentName }) : prev; }, []) diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index dacfab31..c8c47501 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -8,7 +8,7 @@ import { LOCALHOST_MODE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; import { getMatching } from '../../utils/key'; -import { loadData } from '../dataLoader'; +import { setCache } from '../dataLoader'; import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; /** @@ -17,7 +17,9 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + + const storages: Record = {}; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const rbSegments = new RBSegmentsCacheInMemory(); @@ -39,26 +41,30 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) shared(matchingKey: string) { - const segments = new MySegmentsCacheInMemory(); - const largeSegments = new MySegmentsCacheInMemory(); + if (!storages[matchingKey]) { + const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); - if (preloadedData) { - loadData(preloadedData, { segments, largeSegments }, matchingKey); - } + if (preloadedData) { + setCache(log, preloadedData, { segments, largeSegments }, matchingKey); + } - return { - splits: this.splits, - rbSegments: this.rbSegments, - segments, - largeSegments, - impressions: this.impressions, - impressionCounts: this.impressionCounts, - events: this.events, - telemetry: this.telemetry, - uniqueKeys: this.uniqueKeys, + storages[matchingKey] = { + splits: this.splits, + rbSegments: this.rbSegments, + segments, + largeSegments, + impressions: this.impressions, + impressionCounts: this.impressionCounts, + events: this.events, + telemetry: this.telemetry, + uniqueKeys: this.uniqueKeys, + + destroy() { } + }; + } - destroy() { } - }; + return storages[matchingKey]; }, }; @@ -72,9 +78,11 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag storage.uniqueKeys.track = noopTrack; } + const matchingKey = getMatching(params.settings.core.key); + storages[matchingKey] = storage; if (preloadedData) { - loadData(preloadedData, storage, getMatching(params.settings.core.key)); + setCache(log, preloadedData, storage, matchingKey); if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); } diff --git a/src/storages/types.ts b/src/storages/types.ts index 553722c7..0b3680f5 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -466,7 +466,7 @@ export interface IStorageBase< telemetry?: TTelemetryCache, uniqueKeys: TUniqueKeysCache, destroy(): void | Promise, - shared?: (matchingKey: string, onReadyCb: (error?: any) => void) => this + shared?: (matchingKey: string, onReadyCb?: (error?: any) => void) => this } export interface IStorageSync extends IStorageBase< diff --git a/types/splitio.d.ts b/types/splitio.d.ts index a094f67e..5bd3a5cc 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1603,6 +1603,13 @@ declare namespace SplitIO { * @returns The manager instance. */ manager(): IManager; + /** + * Returns the current snapshot of the SDK rollout plan in cache. + * + * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. + * @returns The current snapshot of the SDK rollout plan. + */ + getCache(keys?: SplitKey[]): PreloadedData, } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From ed482aef449f946c1cf956f98edc7bdf6782ad97 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 09:39:26 -0300 Subject: [PATCH 06/20] rc --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index aa7cf6d8..f9c7ba15 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.4.1", + "version": "2.5.0-rc.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.4.1", + "version": "2.5.0-rc.0", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 27b15da2..47c53107 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.4.1", + "version": "2.5.0-rc.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", From b70d121f94dbb6ab6c3266d484f156a01f858f6d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 09:47:47 -0300 Subject: [PATCH 07/20] Polishing --- CHANGES.txt | 1 - src/storages/types.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 66d95bb4..02a5be3e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,6 @@ 2.5.0 (August XX, 2025) - Added `factory.getCache()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. 2.4.1 (June 3, 2025) - Bugfix - Improved the Proxy fallback to flag spec version 1.2 to handle cases where the Proxy does not return an end-of-stream marker in 400 status code responses. diff --git a/src/storages/types.ts b/src/storages/types.ts index 0b3680f5..53b049ed 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -505,7 +505,7 @@ export interface IStorageFactoryParams { */ onReadyCb: (error?: any) => void, /** - * It is meant for emitting SDK_READY_FROM_CACHE event in standalone mode with preloaded data + * For emitting SDK_READY_FROM_CACHE event in consumer mode with Redis and standalone mode with preloaded data */ onReadyFromCacheCb: () => void, } From f7dd0a1c0979fa5ab9baff9b2a5c5d152937aec5 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 26 Aug 2025 12:20:32 -0300 Subject: [PATCH 08/20] Rename new methods --- CHANGES.txt | 4 +-- src/storages/__tests__/dataLoader.spec.ts | 26 +++++++++---------- src/storages/dataLoader.ts | 24 ++++++++--------- src/storages/inLocalStorage/index.ts | 6 ++--- src/storages/inMemory/InMemoryStorageCS.ts | 12 ++++----- types/splitio.d.ts | 30 +++++++++++----------- 6 files changed, 51 insertions(+), 51 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 02a5be3e..50aef19e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,6 @@ 2.5.0 (August XX, 2025) - - Added `factory.getCache()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. + - Added `factory.getRolloutPlan()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. + - Added `initialRolloutPlan` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. 2.4.1 (June 3, 2025) - Bugfix - Improved the Proxy fallback to flag spec version 1.2 to handle cases where the Proxy does not return an end-of-stream marker in 400 status code responses. diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 2ef06f96..02f556d1 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -6,8 +6,8 @@ import { IRBSegment, ISplit } from '../../dtos/types'; import * as dataLoader from '../dataLoader'; -describe('setCache & getCache', () => { - jest.spyOn(dataLoader, 'setCache'); +describe('setRolloutPlan & getRolloutPlan', () => { + jest.spyOn(dataLoader, 'setRolloutPlan'); const onReadyFromCacheCb = jest.fn(); const onReadyCb = jest.fn(); @@ -24,23 +24,23 @@ describe('setCache & getCache', () => { }); test('using preloaded data with memberships', () => { - const preloadedData = dataLoader.getCache(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); + const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setCache).toBeCalledTimes(1); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setCache).toBeCalledTimes(2); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // Get preloaded data from client-side storage - expect(dataLoader.getCache(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(preloadedData); - expect(preloadedData).toEqual({ + expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(rolloutPlanData); + expect(rolloutPlanData).toEqual({ since: 123, flags: [{ name: 'split1' }], rbSince: 321, @@ -54,20 +54,20 @@ describe('setCache & getCache', () => { }); test('using preloaded data with segments', () => { - const preloadedData = dataLoader.getCache(loggerMock, serverStorage); + const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setCache).toBeCalledTimes(1); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setCache).toBeCalledTimes(2); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); - expect(preloadedData).toEqual({ + expect(rolloutPlanData).toEqual({ since: 123, flags: [{ name: 'split1' }], rbSince: 321, diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index e5ec31c4..51340a0c 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -6,31 +6,31 @@ import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '. import { ILogger } from '../logger/types'; /** - * Sets the given synchronous storage with the provided preloaded data snapshot. + * Sets the given synchronous storage with the provided rollout plan snapshot. * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function setCache(log: ILogger, preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; +export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + // Do not load data if current rollout plan is empty + if (Object.keys(rolloutPlan).length === 0) return; const { splits, rbSegments, segments, largeSegments } = storage; - log.debug(`set cache${matchingKey ? ` for key ${matchingKey}` : ''}`); + log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); if (splits) { splits.clear(); - splits.update(preloadedData.flags as ISplit[] || [], [], preloadedData.since || -1); + splits.update(rolloutPlan.flags as ISplit[] || [], [], rolloutPlan.since || -1); } if (rbSegments) { rbSegments.clear(); - rbSegments.update(preloadedData.rbSegments as IRBSegment[] || [], [], preloadedData.rbSince || -1); + rbSegments.update(rolloutPlan.rbSegments as IRBSegment[] || [], [], rolloutPlan.rbSince || -1); } - const segmentsData = preloadedData.segments || {}; + const segmentsData = rolloutPlan.segments || {}; if (matchingKey) { // add memberships data (client-side) - let memberships = preloadedData.memberships && preloadedData.memberships[matchingKey]; + let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; if (!memberships && segmentsData) { memberships = { ms: { @@ -55,13 +55,13 @@ export function setCache(log: ILogger, preloadedData: SplitIO.PreloadedData, sto } /** - * Gets the preloaded data snapshot from the given synchronous storage. + * Gets the rollout plan snapshot from the given synchronous storage. * If `keys` are provided, the memberships for those keys is returned, to protect segments data. * Otherwise, the segments data is returned. */ -export function getCache(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { +export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.RolloutPlan { - log.debug(`get cache${keys ? ` for keys ${keys}` : ''}`); + log.debug(`storage: get feature flags and segments${keys ? ` for keys ${keys}` : ''}`); return { since: storage.splits.getChangeNumber(), diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 8924b84d..03d5bfc1 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -26,9 +26,9 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt function InLocalStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - // Fallback to InMemoryStorage if LocalStorage API is not available - if (!isLocalStorageAvailable()) { - params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable. Falling back to default MEMORY storage'); + // Fallback to InMemoryStorage if LocalStorage API is not available or preloaded data is provided + if (!isLocalStorageAvailable() || params.settings.initialRolloutPlan) { + params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable or `initialRolloutPlan` is provided. Falling back to default MEMORY storage'); return InMemoryStorageCSFactory(params); } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index c8c47501..e6b5becc 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -8,7 +8,7 @@ import { LOCALHOST_MODE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; import { getMatching } from '../../utils/key'; -import { setCache } from '../dataLoader'; +import { setRolloutPlan } from '../dataLoader'; import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; /** @@ -17,7 +17,7 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, initialRolloutPlan }, onReadyFromCacheCb } = params; const storages: Record = {}; @@ -45,8 +45,8 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag const segments = new MySegmentsCacheInMemory(); const largeSegments = new MySegmentsCacheInMemory(); - if (preloadedData) { - setCache(log, preloadedData, { segments, largeSegments }, matchingKey); + if (initialRolloutPlan) { + setRolloutPlan(log, initialRolloutPlan, { segments, largeSegments }, matchingKey); } storages[matchingKey] = { @@ -81,8 +81,8 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag const matchingKey = getMatching(params.settings.core.key); storages[matchingKey] = storage; - if (preloadedData) { - setCache(log, preloadedData, storage, matchingKey); + if (initialRolloutPlan) { + setRolloutPlan(log, initialRolloutPlan, storage, matchingKey); if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); } diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 5bd3a5cc..0edf8500 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -353,7 +353,7 @@ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISync /** * Data to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. */ - preloadedData?: SplitIO.PreloadedData; + initialRolloutPlan?: SplitIO.RolloutPlan; /** * SDK Startup settings. */ @@ -559,7 +559,7 @@ declare namespace SplitIO { eventsFirstPushWindow: number; }; readonly storage: StorageSyncFactory | StorageAsyncFactory | StorageOptions; - readonly preloadedData?: SplitIO.PreloadedData; + readonly initialRolloutPlan?: SplitIO.RolloutPlan; readonly urls: { events: string; sdk: string; @@ -1025,11 +1025,11 @@ declare namespace SplitIO { type: NodeSyncStorage | NodeAsyncStorage | BrowserStorage; prefix?: string; options?: Object; - } + }; /** - * Defines the format of rollout plan data to preload the factory storage (cache). + * Defines the format of rollout plan data to preload the SDK cache. */ - type PreloadedData = { + type RolloutPlan = { /** * Change number of feature flags. */ @@ -1037,29 +1037,29 @@ declare namespace SplitIO { /** * List of feature flags. */ - flags: Object[], + flags: Object[]; /** * Change number of rule-based segments. */ - rbSince?: number, + rbSince?: number; /** * List of rule-based segments. */ - rbSegments?: Object[], + rbSegments?: Object[]; /** * Optional map of user keys to their memberships. */ memberships?: { - [key: string]: Object - }, + [key: string]: Object; + }; /** * Optional map of segments to their list of keys. * This property is ignored if `memberships` is provided. */ segments?: { - [segmentName: string]: string[] - }, - } + [segmentName: string]: string[]; + }; + }; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. @@ -1082,7 +1082,7 @@ declare namespace SplitIO { type IntegrationFactory = { readonly type: string; (params: any): (Integration | void); - } + }; /** * A pair of user key and it's trafficType, required for tracking valid Split events. */ @@ -1609,7 +1609,7 @@ declare namespace SplitIO { * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. * @returns The current snapshot of the SDK rollout plan. */ - getCache(keys?: SplitKey[]): PreloadedData, + getRolloutPlan(keys?: SplitKey[]): RolloutPlan; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From b937db52ba01dfd67e5214ffba1e9fc54bdd74c6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 27 Aug 2025 11:12:22 -0300 Subject: [PATCH 09/20] Remove outdated validation utils --- .../__tests__/preloadedData.spec.ts | 157 ------------------ src/utils/inputValidation/index.ts | 1 - src/utils/inputValidation/preloadedData.ts | 57 ------- 3 files changed, 215 deletions(-) delete mode 100644 src/utils/inputValidation/__tests__/preloadedData.spec.ts delete mode 100644 src/utils/inputValidation/preloadedData.ts diff --git a/src/utils/inputValidation/__tests__/preloadedData.spec.ts b/src/utils/inputValidation/__tests__/preloadedData.spec.ts deleted file mode 100644 index 79f1d1a4..00000000 --- a/src/utils/inputValidation/__tests__/preloadedData.spec.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -// Import the module mocking the logger. -import { validatePreloadedData } from '../preloadedData'; - -const method = 'some_method'; -const testCases = [ - // valid inputs - { - input: { lastUpdated: 10, since: 10, splitsData: {} }, - output: true, - warn: `${method}: preloadedData.splitsData doesn't contain feature flag definitions.` - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: { some_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'], some_other_key: ['some_segment'] }, segmentsData: { some_segment: 'SEGMENT DEFINITION', some_other_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - // should be true, even using objects for strings and numbers or having extra properties - input: { ignoredProperty: 'IGNORED', lastUpdated: new Number(10), since: new Number(10), splitsData: { 'some_split': new String('SPLIT DEFINITION') }, mySegmentsData: { some_key: [new String('some_segment')] }, segmentsData: { some_segment: new String('SEGMENT DEFINITION') } }, - output: true - }, - - // invalid inputs - { - // should be false if preloadedData is not an object - input: undefined, - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if preloadedData is not an object - input: [], - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: undefined, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: -1, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: undefined, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: -1, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: undefined }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: undefined } }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: { some_key: undefined } }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: { some_segment: undefined } }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - } -]; - -test('INPUT VALIDATION for preloadedData', () => { - - for (let i = 0; i < testCases.length; i++) { - const testCase = testCases[i]; - expect(validatePreloadedData(loggerMock, testCase.input, method)).toBe(testCase.output); - - if (testCase.error) { - expect(loggerMock.error.mock.calls[0]).toEqual([testCase.error]); // Should log the error for the invalid preloadedData. - loggerMock.error.mockClear(); - } else { - expect(loggerMock.error).not.toBeCalled(); // Should not log any error. - } - - if (testCase.warn) { - expect(loggerMock.warn.mock.calls[0]).toEqual([testCase.warn]); // Should log the warning for the given preloadedData. - loggerMock.warn.mockClear(); - } else { - expect(loggerMock.warn).not.toBeCalled(); // Should not log any warning. - } - } -}); diff --git a/src/utils/inputValidation/index.ts b/src/utils/inputValidation/index.ts index 96cf4be6..eac9777d 100644 --- a/src/utils/inputValidation/index.ts +++ b/src/utils/inputValidation/index.ts @@ -10,5 +10,4 @@ export { validateTrafficType } from './trafficType'; export { validateIfNotDestroyed, validateIfOperational } from './isOperational'; export { validateSplitExistence } from './splitExistence'; export { validateTrafficTypeExistence } from './trafficTypeExistence'; -export { validatePreloadedData } from './preloadedData'; export { validateEvaluationOptions } from './eventProperties'; diff --git a/src/utils/inputValidation/preloadedData.ts b/src/utils/inputValidation/preloadedData.ts deleted file mode 100644 index f07ee432..00000000 --- a/src/utils/inputValidation/preloadedData.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { isObject, isString, isFiniteNumber } from '../lang'; -import { validateSplit } from './split'; -import { ILogger } from '../../logger/types'; - -function validateTimestampData(log: ILogger, maybeTimestamp: any, method: string, item: string) { - if (isFiniteNumber(maybeTimestamp) && maybeTimestamp > -1) return true; - log.error(`${method}: preloadedData.${item} must be a positive number.`); - return false; -} - -function validateSplitsData(log: ILogger, maybeSplitsData: any, method: string) { - if (isObject(maybeSplitsData)) { - const splitNames = Object.keys(maybeSplitsData); - if (splitNames.length === 0) log.warn(`${method}: preloadedData.splitsData doesn't contain feature flag definitions.`); - // @TODO in the future, consider handling the possibility of having parsed definitions of splits - if (splitNames.every(splitName => validateSplit(log, splitName, method) && isString(maybeSplitsData[splitName]))) return true; - } - log.error(`${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.`); - return false; -} - -function validateMySegmentsData(log: ILogger, maybeMySegmentsData: any, method: string) { - if (isObject(maybeMySegmentsData)) { - const userKeys = Object.keys(maybeMySegmentsData); - if (userKeys.every(userKey => { - const segmentNames = maybeMySegmentsData[userKey]; - // an empty list is valid - return Array.isArray(segmentNames) && segmentNames.every(segmentName => isString(segmentName)); - })) return true; - } - log.error(`${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.`); - return false; -} - -function validateSegmentsData(log: ILogger, maybeSegmentsData: any, method: string) { - if (isObject(maybeSegmentsData)) { - const segmentNames = Object.keys(maybeSegmentsData); - if (segmentNames.every(segmentName => isString(maybeSegmentsData[segmentName]))) return true; - } - log.error(`${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.`); - return false; -} - -export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string) { - if (!isObject(maybePreloadedData)) { - log.error(`${method}: preloadedData must be an object.`); - } else if ( - validateTimestampData(log, maybePreloadedData.lastUpdated, method, 'lastUpdated') && - validateTimestampData(log, maybePreloadedData.since, method, 'since') && - validateSplitsData(log, maybePreloadedData.splitsData, method) && - (!maybePreloadedData.mySegmentsData || validateMySegmentsData(log, maybePreloadedData.mySegmentsData, method)) && - (!maybePreloadedData.segmentsData || validateSegmentsData(log, maybePreloadedData.segmentsData, method)) - ) { - return true; - } - return false; -} From a95edb9c2ea8d1eef488fc2b6485db5e104a665d Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 27 Aug 2025 15:00:31 -0300 Subject: [PATCH 10/20] refactor type definitions --- src/storages/dataLoader.ts | 44 ++++++++++++++++++++++++++++++++------ src/types.ts | 2 ++ types/splitio.d.ts | 37 ++++---------------------------- 3 files changed, 44 insertions(+), 39 deletions(-) diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 51340a0c..860bde59 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -5,12 +5,44 @@ import { getMatching } from '../utils/key'; import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; import { ILogger } from '../logger/types'; +export type RolloutPlan = { + /** + * Change number of feature flags. + */ + since: number; + /** + * List of feature flags. + */ + flags: ISplit[]; + /** + * Change number of rule-based segments. + */ + rbSince?: number; + /** + * List of rule-based segments. + */ + rbSegments?: IRBSegment[]; + /** + * Optional map of user keys to their memberships. + */ + memberships?: { + [key: string]: IMembershipsResponse; + }; + /** + * Optional map of standard segments to their list of keys. + * This property is ignored if `memberships` is provided. + */ + segments?: { + [segmentName: string]: string[]; + }; +}; + /** * Sets the given synchronous storage with the provided rollout plan snapshot. * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ -export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current rollout plan is empty if (Object.keys(rolloutPlan).length === 0) return; @@ -20,12 +52,12 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, s if (splits) { splits.clear(); - splits.update(rolloutPlan.flags as ISplit[] || [], [], rolloutPlan.since || -1); + splits.update(rolloutPlan.flags || [], [], rolloutPlan.since || -1); } if (rbSegments) { rbSegments.clear(); - rbSegments.update(rolloutPlan.rbSegments as IRBSegment[] || [], [], rolloutPlan.rbSince || -1); + rbSegments.update(rolloutPlan.rbSegments || [], [], rolloutPlan.rbSince || -1); } const segmentsData = rolloutPlan.segments || {}; @@ -43,8 +75,8 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, s } if (memberships) { - if ((memberships as IMembershipsResponse).ms) segments.resetSegments((memberships as IMembershipsResponse).ms!); - if ((memberships as IMembershipsResponse).ls && largeSegments) largeSegments.resetSegments((memberships as IMembershipsResponse).ls!); + if (memberships.ms) segments.resetSegments(memberships.ms!); + if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); } } else { // add segments data (server-side) Object.keys(segmentsData).forEach(segmentName => { @@ -59,7 +91,7 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: SplitIO.RolloutPlan, s * If `keys` are provided, the memberships for those keys is returned, to protect segments data. * Otherwise, the segments data is returned. */ -export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): SplitIO.RolloutPlan { +export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): RolloutPlan { log.debug(`storage: get feature flags and segments${keys ? ` for keys ${keys}` : ''}`); diff --git a/src/types.ts b/src/types.ts index ab3e74bb..be4132a1 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,6 +1,7 @@ import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; import { ILogger } from './logger/types'; +import { RolloutPlan } from './storages/dataLoader'; /** * SplitIO.ISettings interface extended with private properties for internal use @@ -10,6 +11,7 @@ export interface ISettings extends SplitIO.ISettings { __splitFiltersValidation: ISplitFiltersValidation; }; readonly log: ILogger; + readonly initialRolloutPlan?: RolloutPlan; } /** diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 0edf8500..3ffc3db6 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -351,7 +351,8 @@ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISync */ features?: SplitIO.MockedFeaturesMap; /** - * Data to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. + * Rollout plan object (i.e., feature flags and segment definitions) to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. + * This object is derived from calling the Node.js SDK’s `getRolloutPlan` method. */ initialRolloutPlan?: SplitIO.RolloutPlan; /** @@ -1027,39 +1028,9 @@ declare namespace SplitIO { options?: Object; }; /** - * Defines the format of rollout plan data to preload the SDK cache. + * A JSON-serializable plain object that defines the format of rollout plan data to preload the SDK cache with feature flags and segments. */ - type RolloutPlan = { - /** - * Change number of feature flags. - */ - since: number; - /** - * List of feature flags. - */ - flags: Object[]; - /** - * Change number of rule-based segments. - */ - rbSince?: number; - /** - * List of rule-based segments. - */ - rbSegments?: Object[]; - /** - * Optional map of user keys to their memberships. - */ - memberships?: { - [key: string]: Object; - }; - /** - * Optional map of segments to their list of keys. - * This property is ignored if `memberships` is provided. - */ - segments?: { - [segmentName: string]: string[]; - }; - }; + type RolloutPlan = Object; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. From 5b84df372909f44e19ac9db1ba1e2f0c890bf58e Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Tue, 2 Sep 2025 17:03:29 -0300 Subject: [PATCH 11/20] refactor: restructure rollout plan data format and improve data loading --- src/dtos/types.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 91 ++++++++++++------ src/storages/dataLoader.ts | 107 ++++++++++++---------- types/splitio.d.ts | 19 +++- 4 files changed, 142 insertions(+), 77 deletions(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 78d62de4..a72b751b 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -259,7 +259,7 @@ export interface ISegmentChangesResponse { name: string, added: string[], removed: string[], - since: number, + since?: number, till: number } diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 02f556d1..41dbde30 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -6,7 +6,7 @@ import { IRBSegment, ISplit } from '../../dtos/types'; import * as dataLoader from '../dataLoader'; -describe('setRolloutPlan & getRolloutPlan', () => { +describe('getRolloutPlan & setRolloutPlan (client-side)', () => { jest.spyOn(dataLoader, 'setRolloutPlan'); const onReadyFromCacheCb = jest.fn(); const onReadyCb = jest.fn(); @@ -19,15 +19,52 @@ describe('setRolloutPlan & getRolloutPlan', () => { serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); + const expectedRolloutPlan = { + splitChanges: { + ff: { d: [{ name: 'split1' }], t: 123 }, + rbs: { d: [{ name: 'rbs1' }], t: 321 } + }, + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segmentChanges: [{ + name: 'segment1', + added: [fullSettings.core.key as string, otherKey], + removed: [], + till: 123 + }] + }; + afterEach(() => { jest.clearAllMocks(); }); + test('using preloaded data (no memberships, no segments)', () => { + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual([]); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual([]); + + // Get preloaded data from client-side storage + expect(dataLoader.getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined, segmentChanges: undefined }); + }); + test('using preloaded data with memberships', () => { - const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage, [fullSettings.core.key as string, otherKey]); + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); @@ -39,43 +76,43 @@ describe('setRolloutPlan & getRolloutPlan', () => { expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // Get preloaded data from client-side storage - expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, [fullSettings.core.key as string, otherKey])).toEqual(rolloutPlanData); - expect(rolloutPlanData).toEqual({ - since: 123, - flags: [{ name: 'split1' }], - rbSince: 321, - rbSegments: [{ name: 'rbs1' }], - memberships: { - [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, - [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } - }, - segments: undefined - }); + expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); }); test('using preloaded data with segments', () => { - const rolloutPlanData = dataLoader.getRolloutPlan(loggerMock, serverStorage); + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlanData }, onReadyFromCacheCb, onReadyCb }); + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); expect(onReadyFromCacheCb).toBeCalledTimes(1); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); - expect(rolloutPlanData).toEqual({ - since: 123, - flags: [{ name: 'split1' }], - rbSince: 321, - rbSegments: [{ name: 'rbs1' }], - memberships: undefined, - segments: { - segment1: [fullSettings.core.key as string, otherKey] - } - }); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined }); + }); + + test('using preloaded data with memberships and segments', () => { + const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); + + // Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); + expect(onReadyFromCacheCb).toBeCalledTimes(1); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // main client membership is set via the rollout plan `memberships` field + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // shared client membership is set via the rollout plan `segmentChanges` field + + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: { [fullSettings.core.key as string]: expectedRolloutPlan.memberships![fullSettings.core.key as string] } }); }); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 860bde59..f8741af3 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -2,41 +2,37 @@ import SplitIO from '../../types/splitio'; import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; import { setToArray } from '../utils/lang/sets'; import { getMatching } from '../utils/key'; -import { IMembershipsResponse, IMySegmentsResponse, IRBSegment, ISplit } from '../dtos/types'; +import { IMembershipsResponse, IMySegmentsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { ILogger } from '../logger/types'; +import { isObject } from '../utils/lang'; export type RolloutPlan = { /** - * Change number of feature flags. + * Feature flags and rule-based segments. */ - since: number; + splitChanges: ISplitChangesResponse; /** - * List of feature flags. - */ - flags: ISplit[]; - /** - * Change number of rule-based segments. - */ - rbSince?: number; - /** - * List of rule-based segments. - */ - rbSegments?: IRBSegment[]; - /** - * Optional map of user keys to their memberships. + * Optional map of matching keys to their memberships. */ memberships?: { - [key: string]: IMembershipsResponse; + [matchingKey: string]: IMembershipsResponse; }; /** - * Optional map of standard segments to their list of keys. + * Optional list of standard segments. * This property is ignored if `memberships` is provided. */ - segments?: { - [segmentName: string]: string[]; - }; + segmentChanges?: ISegmentChangesResponse[]; }; +/** + * Validates if the given rollout plan is valid. + */ +function validateRolloutPlan(rolloutPlan: unknown): rolloutPlan is RolloutPlan { + if (isObject(rolloutPlan) && isObject((rolloutPlan as any).splitChanges)) return true; + + return false; +} + /** * Sets the given synchronous storage with the provided rollout plan snapshot. * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). @@ -44,32 +40,35 @@ export type RolloutPlan = { */ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { // Do not load data if current rollout plan is empty - if (Object.keys(rolloutPlan).length === 0) return; + if (!validateRolloutPlan(rolloutPlan)) { + log.error('storage: invalid rollout plan provided'); + return; + } const { splits, rbSegments, segments, largeSegments } = storage; + const { splitChanges: { ff, rbs } } = rolloutPlan; log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); - if (splits) { + if (splits && ff) { splits.clear(); - splits.update(rolloutPlan.flags || [], [], rolloutPlan.since || -1); + splits.update(ff.d, [], ff.t); } - if (rbSegments) { + if (rbSegments && rbs) { rbSegments.clear(); - rbSegments.update(rolloutPlan.rbSegments || [], [], rolloutPlan.rbSince || -1); + rbSegments.update(rbs.d, [], rbs.t); } - const segmentsData = rolloutPlan.segments || {}; + const segmentChanges = rolloutPlan.segmentChanges; if (matchingKey) { // add memberships data (client-side) let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; - if (!memberships && segmentsData) { + if (!memberships && segmentChanges) { memberships = { ms: { - k: Object.keys(segmentsData).filter(segmentName => { - const segmentKeys = segmentsData[segmentName]; - return segmentKeys.indexOf(matchingKey) > -1; - }).map(segmentName => ({ n: segmentName })) + k: segmentChanges.filter(segment => { + return segment.added.indexOf(matchingKey) > -1; + }).map(segment => ({ n: segment.name })) } }; } @@ -79,10 +78,11 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); } } else { // add segments data (server-side) - Object.keys(segmentsData).forEach(segmentName => { - const segmentKeys = segmentsData[segmentName]; - segments.update(segmentName, segmentKeys, [], -1); - }); + if (segmentChanges) { + segmentChanges.forEach(segment => { + segments.update(segment.name, segment.added, segment.removed, segment.till); + }); + } } } @@ -91,21 +91,32 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: * If `keys` are provided, the memberships for those keys is returned, to protect segments data. * Otherwise, the segments data is returned. */ -export function getRolloutPlan(log: ILogger, storage: IStorageSync, keys?: SplitIO.SplitKey[]): RolloutPlan { +export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { - log.debug(`storage: get feature flags and segments${keys ? ` for keys ${keys}` : ''}`); + const { keys, exposeSegments } = options; + const { splits, segments, rbSegments } = storage; + + log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); return { - since: storage.splits.getChangeNumber(), - flags: storage.splits.getAll(), - rbSince: storage.rbSegments.getChangeNumber(), - rbSegments: storage.rbSegments.getAll(), - segments: keys ? - undefined : // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop - prev[cur] = setToArray(storage.segments.segmentCache[cur] as Set); - return prev; - }, {}), + splitChanges: { + ff: { + t: splits.getChangeNumber(), + d: splits.getAll(), + }, + rbs: { + t: rbSegments.getChangeNumber(), + d: rbSegments.getAll(), + } + }, + segmentChanges: exposeSegments ? // @ts-ignore accessing private prop + Object.keys(segments.segmentCache).map(segmentName => ({ + name: segmentName, // @ts-ignore + added: setToArray(segments.segmentCache[segmentName] as Set), + removed: [], + till: segments.getChangeNumber(segmentName)! + })) : + undefined, memberships: keys ? keys.reduce>((prev, key) => { if (storage.shared) { diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 3ffc3db6..3a9fe72d 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1031,6 +1031,23 @@ declare namespace SplitIO { * A JSON-serializable plain object that defines the format of rollout plan data to preload the SDK cache with feature flags and segments. */ type RolloutPlan = Object; + /** + * Options for the `factory.getRolloutPlan` method. + */ + type RolloutPlanOptions = { + /** + * Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys. + * + * @defaultValue `undefined` + */ + keys?: SplitKey[]; + /** + * Optional flag to expose segments data in the rollout plan snapshot. + * + * @defaultValue `false` + */ + exposeSegments?: boolean; + }; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. @@ -1580,7 +1597,7 @@ declare namespace SplitIO { * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. * @returns The current snapshot of the SDK rollout plan. */ - getRolloutPlan(keys?: SplitKey[]): RolloutPlan; + getRolloutPlan(options?: RolloutPlanOptions): RolloutPlan; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From c20e74f76d0b336ea2bb756a1f222135b6cf1397 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 4 Sep 2025 12:03:19 -0300 Subject: [PATCH 12/20] refactor: do not mutate FF definitions when parsing matchers --- src/evaluator/convertions/index.ts | 10 ++++++++ src/evaluator/matchersTransform/index.ts | 7 +++--- src/storages/__tests__/dataLoader.spec.ts | 4 ++-- src/storages/dataLoader.ts | 29 ++++++++++------------- 4 files changed, 28 insertions(+), 22 deletions(-) diff --git a/src/evaluator/convertions/index.ts b/src/evaluator/convertions/index.ts index 7d7384d7..acad8017 100644 --- a/src/evaluator/convertions/index.ts +++ b/src/evaluator/convertions/index.ts @@ -1,3 +1,5 @@ +import { IBetweenMatcherData } from '../../dtos/types'; + export function zeroSinceHH(millisSinceEpoch: number): number { return new Date(millisSinceEpoch).setUTCHours(0, 0, 0, 0); } @@ -5,3 +7,11 @@ export function zeroSinceHH(millisSinceEpoch: number): number { export function zeroSinceSS(millisSinceEpoch: number): number { return new Date(millisSinceEpoch).setUTCSeconds(0, 0); } + +export function betweenDateTimeTransform(betweenMatcherData: IBetweenMatcherData): IBetweenMatcherData { + return { + dataType: betweenMatcherData.dataType, + start: zeroSinceSS(betweenMatcherData.start), + end: zeroSinceSS(betweenMatcherData.end) + }; +} diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 6219c4dc..075ea9f0 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -3,7 +3,7 @@ import { matcherTypes, matcherTypesMapper, matcherDataTypes } from '../matchers/ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; -import { zeroSinceHH, zeroSinceSS } from '../convertions'; +import { zeroSinceHH, zeroSinceSS, betweenDateTimeTransform } from '../convertions'; import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; @@ -32,7 +32,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { let type = matcherTypesMapper(matcherType); // As default input data type we use string (even for ALL_KEYS) let dataType = matcherDataTypes.STRING; - let value = undefined; + let value; if (type === matcherTypes.IN_SEGMENT) { value = segmentTransform(userDefinedSegmentMatcherData as IInSegmentMatcherData); @@ -60,8 +60,7 @@ export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { dataType = matcherDataTypes.NUMBER; if (value.dataType === 'DATETIME') { - value.start = zeroSinceSS(value.start); - value.end = zeroSinceSS(value.end); + value = betweenDateTimeTransform(value); dataType = matcherDataTypes.DATETIME; } } else if (type === matcherTypes.BETWEEN_SEMVER) { diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 41dbde30..0225d3be 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -21,8 +21,8 @@ describe('getRolloutPlan & setRolloutPlan (client-side)', () => { const expectedRolloutPlan = { splitChanges: { - ff: { d: [{ name: 'split1' }], t: 123 }, - rbs: { d: [{ name: 'rbs1' }], t: 321 } + ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, + rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } }, memberships: { [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index f8741af3..62f74d5e 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -102,10 +102,12 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl splitChanges: { ff: { t: splits.getChangeNumber(), + s: -1, d: splits.getAll(), }, rbs: { t: rbSegments.getChangeNumber(), + s: -1, d: rbSegments.getAll(), } }, @@ -119,27 +121,22 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl undefined, memberships: keys ? keys.reduce>((prev, key) => { - if (storage.shared) { - // Client-side segments - // @ts-ignore accessing private prop - const sharedStorage = storage.shared(key); - prev[getMatching(key)] = { - ms: { - // @ts-ignore accessing private prop + const matchingKey = getMatching(key); + if (storage.shared) { // Client-side segments + const sharedStorage = storage.shared(matchingKey); + prev[matchingKey] = { + ms: { // @ts-ignore k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), }, - ls: sharedStorage.largeSegments ? { - // @ts-ignore accessing private prop + ls: sharedStorage.largeSegments ? { // @ts-ignore k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), } : undefined }; - } else { - prev[getMatching(key)] = { - ms: { - // Server-side segments - // @ts-ignore accessing private prop - k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(key) ? + } else { // Server-side segments + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore + return storage.segments.segmentCache[segmentName].has(matchingKey) ? prev!.concat({ n: segmentName }) : prev; }, []) From c65b3d026f3edd4a4fc618771243e99fec5f4ca6 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Thu, 4 Sep 2025 14:26:51 -0300 Subject: [PATCH 13/20] refactor: call setRolloutPlan outside storage, to generalize to any storage --- src/sdkClient/sdkClientMethodCS.ts | 9 +++- src/sdkFactory/index.ts | 12 ++++- src/storages/__tests__/dataLoader.spec.ts | 59 ++++++++++------------ src/storages/inLocalStorage/index.ts | 6 +-- src/storages/inMemory/InMemoryStorageCS.ts | 51 +++++-------------- 5 files changed, 61 insertions(+), 76 deletions(-) diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index ebc755a1..280e9509 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -9,13 +9,16 @@ import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, L import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { buildInstanceId } from './identity'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { setRolloutPlan } from '../storages/dataLoader'; +import { ISegmentsCacheSync } from '../storages/types'; /** * Factory of client method for the client-side API variant where TT is ignored. * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IBrowserClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log, initialRolloutPlan, mode } } = params; const mainClientInstance = clientCSDecorator( log, @@ -56,6 +59,10 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sharedSdkReadiness.readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); }); + if (sharedStorage && initialRolloutPlan && !isConsumerMode(mode)) { + setRolloutPlan(log, initialRolloutPlan, { segments: sharedStorage.segments as ISegmentsCacheSync, largeSegments: sharedStorage.largeSegments as ISegmentsCacheSync }, matchingKey); + } + // 3 possibilities: // - Standalone mode: both syncManager and sharedSyncManager are defined // - Consumer mode: both syncManager and sharedSyncManager are undefined diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 47cf69c3..decbfa9a 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -14,6 +14,10 @@ import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized import { strategyNoneFactory } from '../trackers/strategy/strategyNone'; import { uniqueKeysTrackerFactory } from '../trackers/uniqueKeysTracker'; import { DEBUG, OPTIMIZED } from '../utils/constants'; +import { setRolloutPlan } from '../storages/dataLoader'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { IStorageSync } from '../storages/types'; +import { getMatching } from '../utils/key'; /** * Modular SDK factory @@ -24,7 +28,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, filterAdapterFactory, lazyInit } = params; - const { log, sync: { impressionsMode } } = settings; + const { log, sync: { impressionsMode }, initialRolloutPlan, mode, core: { key } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. @@ -57,7 +61,11 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA } }); - // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` + if (initialRolloutPlan && !isConsumerMode(mode)) { + setRolloutPlan(log, initialRolloutPlan, storage as IStorageSync, key && getMatching(key)); + if ((storage as IStorageSync).splits.getChangeNumber() > -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + } + const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 0225d3be..9dca7faf 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -4,13 +4,9 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { IRBSegment, ISplit } from '../../dtos/types'; -import * as dataLoader from '../dataLoader'; +import { setRolloutPlan, getRolloutPlan } from '../dataLoader'; describe('getRolloutPlan & setRolloutPlan (client-side)', () => { - jest.spyOn(dataLoader, 'setRolloutPlan'); - const onReadyFromCacheCb = jest.fn(); - const onReadyCb = jest.fn(); - const otherKey = 'otherKey'; // @ts-expect-error Load server-side storage @@ -41,56 +37,54 @@ describe('getRolloutPlan & setRolloutPlan (client-side)', () => { }); test('using preloaded data (no memberships, no segments)', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual([]); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual([]); // Get preloaded data from client-side storage - expect(dataLoader.getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); + expect(getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined, segmentChanges: undefined }); }); test('using preloaded data with memberships', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); - // Get preloaded data from client-side storage - expect(dataLoader.getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); - expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); + // @TODO requires internal storage cache for `shared` storages + // // Get preloaded data from client-side storage + // expect(getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); + // expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); }); test('using preloaded data with segments', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); @@ -99,16 +93,15 @@ describe('getRolloutPlan & setRolloutPlan (client-side)', () => { }); test('using preloaded data with memberships and segments', () => { - const rolloutPlan = dataLoader.getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); - // Load client-side storage with preloaded data - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, initialRolloutPlan: rolloutPlan }, onReadyFromCacheCb, onReadyCb }); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); // Shared client storage const sharedClientStorage = clientStorage.shared!(otherKey); - expect(dataLoader.setRolloutPlan).toBeCalledTimes(2); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // main client membership is set via the rollout plan `memberships` field expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // shared client membership is set via the rollout plan `segmentChanges` field diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index 03d5bfc1..8924b84d 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -26,9 +26,9 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt function InLocalStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - // Fallback to InMemoryStorage if LocalStorage API is not available or preloaded data is provided - if (!isLocalStorageAvailable() || params.settings.initialRolloutPlan) { - params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable or `initialRolloutPlan` is provided. Falling back to default MEMORY storage'); + // Fallback to InMemoryStorage if LocalStorage API is not available + if (!isLocalStorageAvailable()) { + params.settings.log.warn(LOG_PREFIX + 'LocalStorage API is unavailable. Falling back to default MEMORY storage'); return InMemoryStorageCSFactory(params); } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index e6b5becc..5ae8351c 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,8 +7,6 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { LOCALHOST_MODE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; -import { getMatching } from '../../utils/key'; -import { setRolloutPlan } from '../dataLoader'; import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; /** @@ -17,9 +15,7 @@ import { RBSegmentsCacheInMemory } from './RBSegmentsCacheInMemory'; * @param params - parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { log, scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation }, initialRolloutPlan }, onReadyFromCacheCb } = params; - - const storages: Record = {}; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize }, sync: { __splitFiltersValidation } } } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const rbSegments = new RBSegmentsCacheInMemory(); @@ -40,31 +36,20 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag destroy() { }, // When using shared instantiation with MEMORY we reuse everything but segments (they are unique per key) - shared(matchingKey: string) { - if (!storages[matchingKey]) { - const segments = new MySegmentsCacheInMemory(); - const largeSegments = new MySegmentsCacheInMemory(); - - if (initialRolloutPlan) { - setRolloutPlan(log, initialRolloutPlan, { segments, largeSegments }, matchingKey); - } - - storages[matchingKey] = { - splits: this.splits, - rbSegments: this.rbSegments, - segments, - largeSegments, - impressions: this.impressions, - impressionCounts: this.impressionCounts, - events: this.events, - telemetry: this.telemetry, - uniqueKeys: this.uniqueKeys, - - destroy() { } - }; - } + shared() { + return { + splits: this.splits, + rbSegments: this.rbSegments, + segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), + impressions: this.impressions, + impressionCounts: this.impressionCounts, + events: this.events, + telemetry: this.telemetry, + uniqueKeys: this.uniqueKeys, - return storages[matchingKey]; + destroy() { } + }; }, }; @@ -78,14 +63,6 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag storage.uniqueKeys.track = noopTrack; } - const matchingKey = getMatching(params.settings.core.key); - storages[matchingKey] = storage; - - if (initialRolloutPlan) { - setRolloutPlan(log, initialRolloutPlan, storage, matchingKey); - if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); - } - return storage; } From 9ddadd63fb02eb7a611ed6e992335f75aa572566 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 14:39:10 -0300 Subject: [PATCH 14/20] refactor: mode rollout plan validation --- src/sdkClient/sdkClientMethodCS.ts | 5 +- src/sdkFactory/index.ts | 5 +- src/storages/__tests__/dataLoader.spec.ts | 60 +++++++++++++------- src/storages/dataLoader.ts | 22 ++++--- src/storages/inLocalStorage/validateCache.ts | 4 +- src/utils/settingsValidation/index.ts | 4 ++ 6 files changed, 63 insertions(+), 37 deletions(-) diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index 280e9509..c1f16676 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -9,7 +9,6 @@ import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, L import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { buildInstanceId } from './identity'; -import { isConsumerMode } from '../utils/settingsValidation/mode'; import { setRolloutPlan } from '../storages/dataLoader'; import { ISegmentsCacheSync } from '../storages/types'; @@ -18,7 +17,7 @@ import { ISegmentsCacheSync } from '../storages/types'; * Therefore, clients don't have a bound TT for the track method. */ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: SplitIO.SplitKey) => SplitIO.IBrowserClient { - const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log, initialRolloutPlan, mode } } = params; + const { clients, storage, syncManager, sdkReadinessManager, settings: { core: { key }, log, initialRolloutPlan } } = params; const mainClientInstance = clientCSDecorator( log, @@ -59,7 +58,7 @@ export function sdkClientMethodCSFactory(params: ISdkFactoryContext): (key?: Spl sharedSdkReadiness.readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); }); - if (sharedStorage && initialRolloutPlan && !isConsumerMode(mode)) { + if (sharedStorage && initialRolloutPlan) { setRolloutPlan(log, initialRolloutPlan, { segments: sharedStorage.segments as ISegmentsCacheSync, largeSegments: sharedStorage.largeSegments as ISegmentsCacheSync }, matchingKey); } diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index decbfa9a..994a529d 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -15,7 +15,6 @@ import { strategyNoneFactory } from '../trackers/strategy/strategyNone'; import { uniqueKeysTrackerFactory } from '../trackers/uniqueKeysTracker'; import { DEBUG, OPTIMIZED } from '../utils/constants'; import { setRolloutPlan } from '../storages/dataLoader'; -import { isConsumerMode } from '../utils/settingsValidation/mode'; import { IStorageSync } from '../storages/types'; import { getMatching } from '../utils/key'; @@ -28,7 +27,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA syncManagerFactory, SignalListener, impressionsObserverFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, filterAdapterFactory, lazyInit } = params; - const { log, sync: { impressionsMode }, initialRolloutPlan, mode, core: { key } } = settings; + const { log, sync: { impressionsMode }, initialRolloutPlan, core: { key } } = settings; // @TODO handle non-recoverable errors, such as, global `fetch` not available, invalid SDK Key, etc. // On non-recoverable errors, we should mark the SDK as destroyed and not start synchronization. @@ -61,7 +60,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA } }); - if (initialRolloutPlan && !isConsumerMode(mode)) { + if (initialRolloutPlan) { setRolloutPlan(log, initialRolloutPlan, storage as IStorageSync, key && getMatching(key)); if ((storage as IStorageSync).splits.getChangeNumber() > -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 9dca7faf..c8589353 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -4,34 +4,54 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { IRBSegment, ISplit } from '../../dtos/types'; -import { setRolloutPlan, getRolloutPlan } from '../dataLoader'; +import { validateRolloutPlan, setRolloutPlan, getRolloutPlan } from '../dataLoader'; + +const otherKey = 'otherKey'; +const expectedRolloutPlan = { + splitChanges: { + ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, + rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } + }, + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segmentChanges: [{ + name: 'segment1', + added: [fullSettings.core.key as string, otherKey], + removed: [], + till: 123 + }] +}; + +describe('validateRolloutPlan', () => { + afterEach(() => { + loggerMock.mockClear(); + }); -describe('getRolloutPlan & setRolloutPlan (client-side)', () => { - const otherKey = 'otherKey'; + test('valid rollout plan and mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: expectedRolloutPlan } as any)).toEqual(expectedRolloutPlan); + expect(loggerMock.error).not.toHaveBeenCalled(); + }); + + test('invalid rollout plan', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: {} } as any)).toBeUndefined(); + expect(loggerMock.error).toHaveBeenCalledWith('storage: invalid rollout plan provided'); + }); + test('invalid mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'consumer', initialRolloutPlan: expectedRolloutPlan } as any)).toBeUndefined(); + expect(loggerMock.warn).toHaveBeenCalledWith('storage: initial rollout plan is ignored in consumer mode'); + }); +}); + +describe('getRolloutPlan & setRolloutPlan (client-side)', () => { // @ts-expect-error Load server-side storage const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); - const expectedRolloutPlan = { - splitChanges: { - ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, - rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } - }, - memberships: { - [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, - [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } - }, - segmentChanges: [{ - name: 'segment1', - added: [fullSettings.core.key as string, otherKey], - removed: [], - till: 123 - }] - }; - afterEach(() => { jest.clearAllMocks(); }); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 62f74d5e..f61be538 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -5,6 +5,7 @@ import { getMatching } from '../utils/key'; import { IMembershipsResponse, IMySegmentsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { ILogger } from '../logger/types'; import { isObject } from '../utils/lang'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; export type RolloutPlan = { /** @@ -27,10 +28,18 @@ export type RolloutPlan = { /** * Validates if the given rollout plan is valid. */ -function validateRolloutPlan(rolloutPlan: unknown): rolloutPlan is RolloutPlan { - if (isObject(rolloutPlan) && isObject((rolloutPlan as any).splitChanges)) return true; +export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { + const { mode, initialRolloutPlan } = settings; - return false; + if (isConsumerMode(mode)) { + log.warn('storage: initial rollout plan is ignored in consumer mode'); + return; + } + + if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; + + log.error('storage: invalid rollout plan provided'); + return; } /** @@ -39,12 +48,6 @@ function validateRolloutPlan(rolloutPlan: unknown): rolloutPlan is RolloutPlan { * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). */ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - // Do not load data if current rollout plan is empty - if (!validateRolloutPlan(rolloutPlan)) { - log.error('storage: invalid rollout plan provided'); - return; - } - const { splits, rbSegments, segments, largeSegments } = storage; const { splitChanges: { ff, rbs } } = rolloutPlan; @@ -79,6 +82,7 @@ export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: } } else { // add segments data (server-side) if (segmentChanges) { + segments.clear(); segmentChanges.forEach(segment => { segments.update(segment.name, segment.added, segment.removed, segment.till); }); diff --git a/src/storages/inLocalStorage/validateCache.ts b/src/storages/inLocalStorage/validateCache.ts index 93d3144c..3fa54ec6 100644 --- a/src/storages/inLocalStorage/validateCache.ts +++ b/src/storages/inLocalStorage/validateCache.ts @@ -17,7 +17,7 @@ const MILLIS_IN_A_DAY = 86400000; * @returns `true` if cache should be cleared, `false` otherwise */ function validateExpiration(options: SplitIO.InLocalStorageOptions, settings: ISettings, keys: KeyBuilderCS, currentTimestamp: number, isThereCache: boolean) { - const { log } = settings; + const { log, initialRolloutPlan } = settings; // Check expiration const lastUpdatedTimestamp = parseInt(localStorage.getItem(keys.buildLastUpdatedKey()) as string, 10); @@ -41,7 +41,7 @@ function validateExpiration(options: SplitIO.InLocalStorageOptions, settings: IS } catch (e) { log.error(LOG_PREFIX + e); } - if (isThereCache) { + if (isThereCache && !initialRolloutPlan) { log.info(LOG_PREFIX + 'SDK key, flags filter criteria, or flags spec version has changed. Cleaning up cache'); return true; } diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 3c7ecfe7..1c300ed6 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -7,6 +7,7 @@ import { ISettingsValidationParams } from './types'; import { ISettings } from '../../types'; import { validateKey } from '../inputValidation/key'; import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants'; +import { validateRolloutPlan } from '../../storages/dataLoader'; // Exported for telemetry export const base = { @@ -152,6 +153,9 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV // @ts-ignore, modify readonly prop if (storage) withDefaults.storage = storage(withDefaults); + // @ts-ignore, modify readonly prop + if (withDefaults.initialRolloutPlan) withDefaults.initialRolloutPlan = validateRolloutPlan(log, withDefaults); + // Validate key and TT (for client-side) const maybeKey = withDefaults.core.key; if (validationParams.acceptKey) { From 590daa2ad21ba9cf9c1fe32afcd145c3f53d1a2c Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 14:44:57 -0300 Subject: [PATCH 15/20] Separate getRolloutPlan and setRolloutPlan for bundle size reduction --- src/sdkClient/sdkClientMethodCS.ts | 2 +- src/sdkFactory/index.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 3 +- src/storages/dataLoader.ts | 157 ---------------------- src/storages/getRolloutPlan.ts | 73 ++++++++++ src/storages/setRolloutPlan.ts | 71 ++++++++++ src/storages/types.ts | 20 ++- src/types.ts | 2 +- src/utils/settingsValidation/index.ts | 2 +- 9 files changed, 169 insertions(+), 163 deletions(-) delete mode 100644 src/storages/dataLoader.ts create mode 100644 src/storages/getRolloutPlan.ts create mode 100644 src/storages/setRolloutPlan.ts diff --git a/src/sdkClient/sdkClientMethodCS.ts b/src/sdkClient/sdkClientMethodCS.ts index c1f16676..b68481a9 100644 --- a/src/sdkClient/sdkClientMethodCS.ts +++ b/src/sdkClient/sdkClientMethodCS.ts @@ -9,7 +9,7 @@ import { RETRIEVE_CLIENT_DEFAULT, NEW_SHARED_CLIENT, RETRIEVE_CLIENT_EXISTING, L import { SDK_SEGMENTS_ARRIVED } from '../readiness/constants'; import { ISdkFactoryContext } from '../sdkFactory/types'; import { buildInstanceId } from './identity'; -import { setRolloutPlan } from '../storages/dataLoader'; +import { setRolloutPlan } from '../storages/setRolloutPlan'; import { ISegmentsCacheSync } from '../storages/types'; /** diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 994a529d..d1dcac43 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -14,7 +14,7 @@ import { strategyOptimizedFactory } from '../trackers/strategy/strategyOptimized import { strategyNoneFactory } from '../trackers/strategy/strategyNone'; import { uniqueKeysTrackerFactory } from '../trackers/uniqueKeysTracker'; import { DEBUG, OPTIMIZED } from '../utils/constants'; -import { setRolloutPlan } from '../storages/dataLoader'; +import { setRolloutPlan } from '../storages/setRolloutPlan'; import { IStorageSync } from '../storages/types'; import { getMatching } from '../utils/key'; diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index c8589353..f6afd300 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -4,7 +4,8 @@ import { fullSettings } from '../../utils/settingsValidation/__tests__/settings. import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { IRBSegment, ISplit } from '../../dtos/types'; -import { validateRolloutPlan, setRolloutPlan, getRolloutPlan } from '../dataLoader'; +import { validateRolloutPlan, setRolloutPlan } from '../setRolloutPlan'; +import { getRolloutPlan } from '../getRolloutPlan'; const otherKey = 'otherKey'; const expectedRolloutPlan = { diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts deleted file mode 100644 index f61be538..00000000 --- a/src/storages/dataLoader.ts +++ /dev/null @@ -1,157 +0,0 @@ -import SplitIO from '../../types/splitio'; -import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; -import { setToArray } from '../utils/lang/sets'; -import { getMatching } from '../utils/key'; -import { IMembershipsResponse, IMySegmentsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; -import { ILogger } from '../logger/types'; -import { isObject } from '../utils/lang'; -import { isConsumerMode } from '../utils/settingsValidation/mode'; - -export type RolloutPlan = { - /** - * Feature flags and rule-based segments. - */ - splitChanges: ISplitChangesResponse; - /** - * Optional map of matching keys to their memberships. - */ - memberships?: { - [matchingKey: string]: IMembershipsResponse; - }; - /** - * Optional list of standard segments. - * This property is ignored if `memberships` is provided. - */ - segmentChanges?: ISegmentChangesResponse[]; -}; - -/** - * Validates if the given rollout plan is valid. - */ -export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { - const { mode, initialRolloutPlan } = settings; - - if (isConsumerMode(mode)) { - log.warn('storage: initial rollout plan is ignored in consumer mode'); - return; - } - - if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; - - log.error('storage: invalid rollout plan provided'); - return; -} - -/** - * Sets the given synchronous storage with the provided rollout plan snapshot. - * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). - * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). - */ -export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - const { splits, rbSegments, segments, largeSegments } = storage; - const { splitChanges: { ff, rbs } } = rolloutPlan; - - log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); - - if (splits && ff) { - splits.clear(); - splits.update(ff.d, [], ff.t); - } - - if (rbSegments && rbs) { - rbSegments.clear(); - rbSegments.update(rbs.d, [], rbs.t); - } - - const segmentChanges = rolloutPlan.segmentChanges; - if (matchingKey) { // add memberships data (client-side) - let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; - if (!memberships && segmentChanges) { - memberships = { - ms: { - k: segmentChanges.filter(segment => { - return segment.added.indexOf(matchingKey) > -1; - }).map(segment => ({ n: segment.name })) - } - }; - } - - if (memberships) { - if (memberships.ms) segments.resetSegments(memberships.ms!); - if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); - } - } else { // add segments data (server-side) - if (segmentChanges) { - segments.clear(); - segmentChanges.forEach(segment => { - segments.update(segment.name, segment.added, segment.removed, segment.till); - }); - } - } -} - -/** - * Gets the rollout plan snapshot from the given synchronous storage. - * If `keys` are provided, the memberships for those keys is returned, to protect segments data. - * Otherwise, the segments data is returned. - */ -export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { - - const { keys, exposeSegments } = options; - const { splits, segments, rbSegments } = storage; - - log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); - - return { - splitChanges: { - ff: { - t: splits.getChangeNumber(), - s: -1, - d: splits.getAll(), - }, - rbs: { - t: rbSegments.getChangeNumber(), - s: -1, - d: rbSegments.getAll(), - } - }, - segmentChanges: exposeSegments ? // @ts-ignore accessing private prop - Object.keys(segments.segmentCache).map(segmentName => ({ - name: segmentName, // @ts-ignore - added: setToArray(segments.segmentCache[segmentName] as Set), - removed: [], - till: segments.getChangeNumber(segmentName)! - })) : - undefined, - memberships: keys ? - keys.reduce>((prev, key) => { - const matchingKey = getMatching(key); - if (storage.shared) { // Client-side segments - const sharedStorage = storage.shared(matchingKey); - prev[matchingKey] = { - ms: { // @ts-ignore - k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), - }, - ls: sharedStorage.largeSegments ? { // @ts-ignore - k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), - } : undefined - }; - } else { // Server-side segments - prev[matchingKey] = { - ms: { // @ts-ignore - k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore - return storage.segments.segmentCache[segmentName].has(matchingKey) ? - prev!.concat({ n: segmentName }) : - prev; - }, []) - }, - ls: { - k: [] - } - }; - } - return prev; - }, {}) : - undefined - }; -} diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts new file mode 100644 index 00000000..db6333aa --- /dev/null +++ b/src/storages/getRolloutPlan.ts @@ -0,0 +1,73 @@ +import SplitIO from '../../types/splitio'; +import { IStorageSync } from './types'; +import { setToArray } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; +import { ILogger } from '../logger/types'; +import { RolloutPlan } from './types'; +import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; + +/** + * Gets the rollout plan snapshot from the given synchronous storage. + * If `keys` are provided, the memberships for those keys is returned, to protect segments data. + * Otherwise, the segments data is returned. + */ +export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { + + const { keys, exposeSegments } = options; + const { splits, segments, rbSegments } = storage; + + log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); + + return { + splitChanges: { + ff: { + t: splits.getChangeNumber(), + s: -1, + d: splits.getAll(), + }, + rbs: { + t: rbSegments.getChangeNumber(), + s: -1, + d: rbSegments.getAll(), + } + }, + segmentChanges: exposeSegments ? // @ts-ignore accessing private prop + Object.keys(segments.segmentCache).map(segmentName => ({ + name: segmentName, // @ts-ignore + added: setToArray(segments.segmentCache[segmentName] as Set), + removed: [], + till: segments.getChangeNumber(segmentName)! + })) : + undefined, + memberships: keys ? + keys.reduce>((prev, key) => { + const matchingKey = getMatching(key); + if (storage.shared) { // Client-side segments + const sharedStorage = storage.shared(matchingKey); + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), + }, + ls: sharedStorage.largeSegments ? { // @ts-ignore + k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), + } : undefined + }; + } else { // Server-side segments + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore + return storage.segments.segmentCache[segmentName].has(matchingKey) ? + prev!.concat({ n: segmentName }) : + prev; + }, []) + }, + ls: { + k: [] + } + }; + } + return prev; + }, {}) : + undefined + }; +} diff --git a/src/storages/setRolloutPlan.ts b/src/storages/setRolloutPlan.ts new file mode 100644 index 00000000..a8529231 --- /dev/null +++ b/src/storages/setRolloutPlan.ts @@ -0,0 +1,71 @@ +import SplitIO from '../../types/splitio'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ILogger } from '../logger/types'; +import { isObject } from '../utils/lang'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { RolloutPlan } from './types'; + +/** + * Validates if the given rollout plan is valid. + */ +export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { + const { mode, initialRolloutPlan } = settings; + + if (isConsumerMode(mode)) { + log.warn('storage: initial rollout plan is ignored in consumer mode'); + return; + } + + if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; + + log.error('storage: invalid rollout plan provided'); + return; +} + +/** + * Sets the given synchronous storage with the provided rollout plan snapshot. + * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). + * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). + */ +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + const { splits, rbSegments, segments, largeSegments } = storage; + const { splitChanges: { ff, rbs } } = rolloutPlan; + + log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); + + if (splits && ff) { + splits.clear(); + splits.update(ff.d, [], ff.t); + } + + if (rbSegments && rbs) { + rbSegments.clear(); + rbSegments.update(rbs.d, [], rbs.t); + } + + const segmentChanges = rolloutPlan.segmentChanges; + if (matchingKey) { // add memberships data (client-side) + let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; + if (!memberships && segmentChanges) { + memberships = { + ms: { + k: segmentChanges.filter(segment => { + return segment.added.indexOf(matchingKey) > -1; + }).map(segment => ({ n: segment.name })) + } + }; + } + + if (memberships) { + if (memberships.ms) segments.resetSegments(memberships.ms!); + if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); + } + } else { // add segments data (server-side) + if (segmentChanges) { + segments.clear(); + segmentChanges.forEach(segment => { + segments.update(segment.name, segment.added, segment.removed, segment.till); + }); + } + } +} diff --git a/src/storages/types.ts b/src/storages/types.ts index 53b049ed..219e9a55 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse } from '../dtos/types'; +import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -520,3 +520,21 @@ export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } + +export type RolloutPlan = { + /** + * Feature flags and rule-based segments. + */ + splitChanges: ISplitChangesResponse; + /** + * Optional map of matching keys to their memberships. + */ + memberships?: { + [matchingKey: string]: IMembershipsResponse; + }; + /** + * Optional list of standard segments. + * This property is ignored if `memberships` is provided. + */ + segmentChanges?: ISegmentChangesResponse[]; +}; diff --git a/src/types.ts b/src/types.ts index be4132a1..ad3fa04c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,7 +1,7 @@ import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; import { ILogger } from './logger/types'; -import { RolloutPlan } from './storages/dataLoader'; +import { RolloutPlan } from './storages/types'; /** * SplitIO.ISettings interface extended with private properties for internal use diff --git a/src/utils/settingsValidation/index.ts b/src/utils/settingsValidation/index.ts index 1c300ed6..2dc63018 100644 --- a/src/utils/settingsValidation/index.ts +++ b/src/utils/settingsValidation/index.ts @@ -7,7 +7,7 @@ import { ISettingsValidationParams } from './types'; import { ISettings } from '../../types'; import { validateKey } from '../inputValidation/key'; import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants'; -import { validateRolloutPlan } from '../../storages/dataLoader'; +import { validateRolloutPlan } from '../../storages/setRolloutPlan'; // Exported for telemetry export const base = { From 4ee373f48a6a2c0ab84a71c53ebe73ad0e706b66 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 14:49:14 -0300 Subject: [PATCH 16/20] Polishing --- src/dtos/types.ts | 2 +- src/storages/__tests__/dataLoader.spec.ts | 1 + src/storages/getRolloutPlan.ts | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dtos/types.ts b/src/dtos/types.ts index a72b751b..78d62de4 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -259,7 +259,7 @@ export interface ISegmentChangesResponse { name: string, added: string[], removed: string[], - since?: number, + since: number, till: number } diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index f6afd300..3f1de562 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -21,6 +21,7 @@ const expectedRolloutPlan = { name: 'segment1', added: [fullSettings.core.key as string, otherKey], removed: [], + since: -1, till: 123 }] }; diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index db6333aa..d4ac25d8 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -36,6 +36,7 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl name: segmentName, // @ts-ignore added: setToArray(segments.segmentCache[segmentName] as Set), removed: [], + since: -1, till: segments.getChangeNumber(segmentName)! })) : undefined, From 9ddac790e62d660be07c8939d5f1b1ba76b40c42 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 15:21:12 -0300 Subject: [PATCH 17/20] Add data loader utils: getRolloutPlan, setRolloutPlan, validateRolloutPlan --- src/storages/__tests__/dataLoader.spec.ts | 133 +++++++++++++++ src/storages/dataLoader.ts | 55 ------ src/storages/getRolloutPlan.ts | 74 +++++++++ src/storages/setRolloutPlan.ts | 71 ++++++++ src/storages/types.ts | 20 ++- src/types.ts | 37 +---- .../__tests__/preloadedData.spec.ts | 157 ------------------ src/utils/inputValidation/index.ts | 1 - src/utils/inputValidation/preloadedData.ts | 57 ------- types/splitio.d.ts | 38 ++++- 10 files changed, 335 insertions(+), 308 deletions(-) create mode 100644 src/storages/__tests__/dataLoader.spec.ts delete mode 100644 src/storages/dataLoader.ts create mode 100644 src/storages/getRolloutPlan.ts create mode 100644 src/storages/setRolloutPlan.ts delete mode 100644 src/utils/inputValidation/__tests__/preloadedData.spec.ts delete mode 100644 src/utils/inputValidation/preloadedData.ts diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts new file mode 100644 index 00000000..3f1de562 --- /dev/null +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -0,0 +1,133 @@ +import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; +import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; +import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; +import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; +import { IRBSegment, ISplit } from '../../dtos/types'; + +import { validateRolloutPlan, setRolloutPlan } from '../setRolloutPlan'; +import { getRolloutPlan } from '../getRolloutPlan'; + +const otherKey = 'otherKey'; +const expectedRolloutPlan = { + splitChanges: { + ff: { d: [{ name: 'split1' }], t: 123, s: -1 }, + rbs: { d: [{ name: 'rbs1' }], t: 321, s: -1 } + }, + memberships: { + [fullSettings.core.key as string]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } }, + [otherKey]: { ms: { k: [{ n: 'segment1' }] }, ls: { k: [] } } + }, + segmentChanges: [{ + name: 'segment1', + added: [fullSettings.core.key as string, otherKey], + removed: [], + since: -1, + till: 123 + }] +}; + +describe('validateRolloutPlan', () => { + afterEach(() => { + loggerMock.mockClear(); + }); + + test('valid rollout plan and mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: expectedRolloutPlan } as any)).toEqual(expectedRolloutPlan); + expect(loggerMock.error).not.toHaveBeenCalled(); + }); + + test('invalid rollout plan', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'standalone', initialRolloutPlan: {} } as any)).toBeUndefined(); + expect(loggerMock.error).toHaveBeenCalledWith('storage: invalid rollout plan provided'); + }); + + test('invalid mode', () => { + expect(validateRolloutPlan(loggerMock, { mode: 'consumer', initialRolloutPlan: expectedRolloutPlan } as any)).toBeUndefined(); + expect(loggerMock.warn).toHaveBeenCalledWith('storage: initial rollout plan is ignored in consumer mode'); + }); +}); + +describe('getRolloutPlan & setRolloutPlan (client-side)', () => { + // @ts-expect-error Load server-side storage + const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); + serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); + serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); + serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); + + afterEach(() => { + jest.clearAllMocks(); + }); + + test('using preloaded data (no memberships, no segments)', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual([]); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual([]); + + // Get preloaded data from client-side storage + expect(getRolloutPlan(loggerMock, clientStorage)).toEqual(rolloutPlan); + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined, segmentChanges: undefined }); + }); + + test('using preloaded data with memberships', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string, otherKey] }); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + // @TODO requires internal storage cache for `shared` storages + // // Get preloaded data from client-side storage + // expect(getRolloutPlan(loggerMock, clientStorage, { keys: [fullSettings.core.key as string, otherKey] })).toEqual(rolloutPlan); + // expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, segmentChanges: undefined }); + }); + + test('using preloaded data with segments', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { exposeSegments: true }); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); + + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: undefined }); + }); + + test('using preloaded data with memberships and segments', () => { + const rolloutPlan = getRolloutPlan(loggerMock, serverStorage, { keys: [fullSettings.core.key as string], exposeSegments: true }); + + // @ts-expect-error Load client-side storage with preloaded data + const clientStorage = InMemoryStorageCSFactory({ settings: fullSettings }); + setRolloutPlan(loggerMock, rolloutPlan, clientStorage, fullSettings.core.key as string); + + // Shared client storage + const sharedClientStorage = clientStorage.shared!(otherKey); + setRolloutPlan(loggerMock, rolloutPlan, { segments: sharedClientStorage.segments, largeSegments: sharedClientStorage.largeSegments }, otherKey); + + expect(clientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // main client membership is set via the rollout plan `memberships` field + expect(sharedClientStorage.segments.getRegisteredSegments()).toEqual(['segment1']); // shared client membership is set via the rollout plan `segmentChanges` field + + expect(rolloutPlan).toEqual({ ...expectedRolloutPlan, memberships: { [fullSettings.core.key as string]: expectedRolloutPlan.memberships![fullSettings.core.key as string] } }); + }); +}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts deleted file mode 100644 index 49522bce..00000000 --- a/src/storages/dataLoader.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { PreloadedData } from '../types'; -import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; - -// This value might be eventually set via a config parameter -const DEFAULT_CACHE_EXPIRATION_IN_MILLIS = 864000000; // 10 days - -/** - * Factory of client-side storage loader - * - * @param preloadedData - validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader - * and extended with a `mySegmentsData` property. - * @returns function to preload the storage - */ -export function dataLoaderFactory(preloadedData: PreloadedData): DataLoader { - - /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) - * - * @param storage - object containing `splits` and `segments` cache (client-side variant) - * @param userId - user key string of the provided MySegmentsCache - */ - // @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. - // @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; - - const storedSince = storage.splits.getChangeNumber(); - const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - - // Do not load data if current localStorage data is more recent, - // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, - if (storedSince > since || lastUpdated < expirationTimestamp) return; - - // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data - storage.splits.clear(); - - // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.update(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName])), [], since); - - // add mySegments data - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; - if (!mySegmentsData) { - // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds - mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const userIds = JSON.parse(segmentsData[segmentName]).added; - return Array.isArray(userIds) && userIds.indexOf(userId) > -1; - }); - } - storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); - }; -} diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts new file mode 100644 index 00000000..d4ac25d8 --- /dev/null +++ b/src/storages/getRolloutPlan.ts @@ -0,0 +1,74 @@ +import SplitIO from '../../types/splitio'; +import { IStorageSync } from './types'; +import { setToArray } from '../utils/lang/sets'; +import { getMatching } from '../utils/key'; +import { ILogger } from '../logger/types'; +import { RolloutPlan } from './types'; +import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; + +/** + * Gets the rollout plan snapshot from the given synchronous storage. + * If `keys` are provided, the memberships for those keys is returned, to protect segments data. + * Otherwise, the segments data is returned. + */ +export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { + + const { keys, exposeSegments } = options; + const { splits, segments, rbSegments } = storage; + + log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); + + return { + splitChanges: { + ff: { + t: splits.getChangeNumber(), + s: -1, + d: splits.getAll(), + }, + rbs: { + t: rbSegments.getChangeNumber(), + s: -1, + d: rbSegments.getAll(), + } + }, + segmentChanges: exposeSegments ? // @ts-ignore accessing private prop + Object.keys(segments.segmentCache).map(segmentName => ({ + name: segmentName, // @ts-ignore + added: setToArray(segments.segmentCache[segmentName] as Set), + removed: [], + since: -1, + till: segments.getChangeNumber(segmentName)! + })) : + undefined, + memberships: keys ? + keys.reduce>((prev, key) => { + const matchingKey = getMatching(key); + if (storage.shared) { // Client-side segments + const sharedStorage = storage.shared(matchingKey); + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(sharedStorage.segments.segmentCache).map(segmentName => ({ n: segmentName })), + }, + ls: sharedStorage.largeSegments ? { // @ts-ignore + k: Object.keys(sharedStorage.largeSegments.segmentCache).map(segmentName => ({ n: segmentName })), + } : undefined + }; + } else { // Server-side segments + prev[matchingKey] = { + ms: { // @ts-ignore + k: Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore + return storage.segments.segmentCache[segmentName].has(matchingKey) ? + prev!.concat({ n: segmentName }) : + prev; + }, []) + }, + ls: { + k: [] + } + }; + } + return prev; + }, {}) : + undefined + }; +} diff --git a/src/storages/setRolloutPlan.ts b/src/storages/setRolloutPlan.ts new file mode 100644 index 00000000..a8529231 --- /dev/null +++ b/src/storages/setRolloutPlan.ts @@ -0,0 +1,71 @@ +import SplitIO from '../../types/splitio'; +import { IRBSegmentsCacheSync, ISegmentsCacheSync, ISplitsCacheSync } from './types'; +import { ILogger } from '../logger/types'; +import { isObject } from '../utils/lang'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import { RolloutPlan } from './types'; + +/** + * Validates if the given rollout plan is valid. + */ +export function validateRolloutPlan(log: ILogger, settings: SplitIO.ISettings): RolloutPlan | undefined { + const { mode, initialRolloutPlan } = settings; + + if (isConsumerMode(mode)) { + log.warn('storage: initial rollout plan is ignored in consumer mode'); + return; + } + + if (isObject(initialRolloutPlan) && isObject((initialRolloutPlan as any).splitChanges)) return initialRolloutPlan as RolloutPlan; + + log.error('storage: invalid rollout plan provided'); + return; +} + +/** + * Sets the given synchronous storage with the provided rollout plan snapshot. + * If `matchingKey` is provided, the storage is handled as a client-side storage (segments and largeSegments are instances of MySegmentsCache). + * Otherwise, the storage is handled as a server-side storage (segments is an instance of SegmentsCache). + */ +export function setRolloutPlan(log: ILogger, rolloutPlan: RolloutPlan, storage: { splits?: ISplitsCacheSync, rbSegments?: IRBSegmentsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { + const { splits, rbSegments, segments, largeSegments } = storage; + const { splitChanges: { ff, rbs } } = rolloutPlan; + + log.debug(`storage: set feature flags and segments${matchingKey ? ` for key ${matchingKey}` : ''}`); + + if (splits && ff) { + splits.clear(); + splits.update(ff.d, [], ff.t); + } + + if (rbSegments && rbs) { + rbSegments.clear(); + rbSegments.update(rbs.d, [], rbs.t); + } + + const segmentChanges = rolloutPlan.segmentChanges; + if (matchingKey) { // add memberships data (client-side) + let memberships = rolloutPlan.memberships && rolloutPlan.memberships[matchingKey]; + if (!memberships && segmentChanges) { + memberships = { + ms: { + k: segmentChanges.filter(segment => { + return segment.added.indexOf(matchingKey) > -1; + }).map(segment => ({ n: segment.name })) + } + }; + } + + if (memberships) { + if (memberships.ms) segments.resetSegments(memberships.ms!); + if (memberships.ls && largeSegments) largeSegments.resetSegments(memberships.ls!); + } + } else { // add segments data (server-side) + if (segmentChanges) { + segments.clear(); + segmentChanges.forEach(segment => { + segments.update(segment.name, segment.added, segment.removed, segment.till); + }); + } + } +} diff --git a/src/storages/types.ts b/src/storages/types.ts index 97664de5..b1fa8081 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse } from '../dtos/types'; +import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -522,3 +522,21 @@ export type IStorageAsyncFactory = SplitIO.StorageAsyncFactory & { readonly type: SplitIO.StorageType, (params: IStorageFactoryParams): IStorageAsync } + +export type RolloutPlan = { + /** + * Feature flags and rule-based segments. + */ + splitChanges: ISplitChangesResponse; + /** + * Optional map of matching keys to their memberships. + */ + memberships?: { + [matchingKey: string]: IMembershipsResponse; + }; + /** + * Optional list of standard segments. + * This property is ignored if `memberships` is provided. + */ + segmentChanges?: ISegmentChangesResponse[]; +}; diff --git a/src/types.ts b/src/types.ts index bdb0933c..ad3fa04c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,6 +1,7 @@ import SplitIO from '../types/splitio'; import { ISplitFiltersValidation } from './dtos/types'; import { ILogger } from './logger/types'; +import { RolloutPlan } from './storages/types'; /** * SplitIO.ISettings interface extended with private properties for internal use @@ -10,6 +11,7 @@ export interface ISettings extends SplitIO.ISettings { __splitFiltersValidation: ISplitFiltersValidation; }; readonly log: ILogger; + readonly initialRolloutPlan?: RolloutPlan; } /** @@ -42,38 +44,3 @@ export interface IBasicClient extends SplitIO.IBasicClient { isClientSide?: boolean; key?: SplitIO.SplitKey; } -/** - * Defines the format of rollout plan data to preload the factory storage (cache). - */ -export interface PreloadedData { - /** - * Timestamp of the last moment the data was synchronized with Split servers. - * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. - */ - // @TODO configurable expiration time policy? - lastUpdated: number; - /** - * Change number of the preloaded data. - * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. - */ - since: number; - /** - * Map of feature flags to their stringified definitions. - */ - splitsData: { - [splitName: string]: string; - }; - /** - * Optional map of user keys to their list of segments. - */ - mySegmentsData?: { - [key: string]: string[]; - }; - /** - * Optional map of segments to their stringified definitions. - * This property is ignored if `mySegmentsData` was provided. - */ - segmentsData?: { - [segmentName: string]: string; - }; -} diff --git a/src/utils/inputValidation/__tests__/preloadedData.spec.ts b/src/utils/inputValidation/__tests__/preloadedData.spec.ts deleted file mode 100644 index 79f1d1a4..00000000 --- a/src/utils/inputValidation/__tests__/preloadedData.spec.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -// Import the module mocking the logger. -import { validatePreloadedData } from '../preloadedData'; - -const method = 'some_method'; -const testCases = [ - // valid inputs - { - input: { lastUpdated: 10, since: 10, splitsData: {} }, - output: true, - warn: `${method}: preloadedData.splitsData doesn't contain feature flag definitions.` - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { 'some_key': [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: [] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'] } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: {} }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, segmentsData: { some_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - input: { lastUpdated: 10, since: 10, splitsData: { 'some_split': 'SPLIT DEFINITION' }, mySegmentsData: { some_key: ['some_segment'], some_other_key: ['some_segment'] }, segmentsData: { some_segment: 'SEGMENT DEFINITION', some_other_segment: 'SEGMENT DEFINITION' } }, - output: true - }, - { - // should be true, even using objects for strings and numbers or having extra properties - input: { ignoredProperty: 'IGNORED', lastUpdated: new Number(10), since: new Number(10), splitsData: { 'some_split': new String('SPLIT DEFINITION') }, mySegmentsData: { some_key: [new String('some_segment')] }, segmentsData: { some_segment: new String('SEGMENT DEFINITION') } }, - output: true - }, - - // invalid inputs - { - // should be false if preloadedData is not an object - input: undefined, - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if preloadedData is not an object - input: [], - output: false, - error: `${method}: preloadedData must be an object.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: undefined, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if lastUpdated property is invalid - input: { lastUpdated: -1, since: 10, splitsData: {} }, - output: false, - error: `${method}: preloadedData.lastUpdated must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: undefined, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if since property is invalid - input: { lastUpdated: 10, since: -1, splitsData: {} }, - output: false, - error: `${method}: preloadedData.since must be a positive number.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: undefined }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if splitsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: undefined } }, - output: false, - error: `${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if mySegmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, mySegmentsData: { some_key: undefined } }, - output: false, - error: `${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: ['DEFINITION'] }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - }, - { - // should be false if segmentsData property is invalid - input: { lastUpdated: 10, since: 10, splitsData: { some_split: 'DEFINITION' }, segmentsData: { some_segment: undefined } }, - output: false, - error: `${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.` - } -]; - -test('INPUT VALIDATION for preloadedData', () => { - - for (let i = 0; i < testCases.length; i++) { - const testCase = testCases[i]; - expect(validatePreloadedData(loggerMock, testCase.input, method)).toBe(testCase.output); - - if (testCase.error) { - expect(loggerMock.error.mock.calls[0]).toEqual([testCase.error]); // Should log the error for the invalid preloadedData. - loggerMock.error.mockClear(); - } else { - expect(loggerMock.error).not.toBeCalled(); // Should not log any error. - } - - if (testCase.warn) { - expect(loggerMock.warn.mock.calls[0]).toEqual([testCase.warn]); // Should log the warning for the given preloadedData. - loggerMock.warn.mockClear(); - } else { - expect(loggerMock.warn).not.toBeCalled(); // Should not log any warning. - } - } -}); diff --git a/src/utils/inputValidation/index.ts b/src/utils/inputValidation/index.ts index 96cf4be6..eac9777d 100644 --- a/src/utils/inputValidation/index.ts +++ b/src/utils/inputValidation/index.ts @@ -10,5 +10,4 @@ export { validateTrafficType } from './trafficType'; export { validateIfNotDestroyed, validateIfOperational } from './isOperational'; export { validateSplitExistence } from './splitExistence'; export { validateTrafficTypeExistence } from './trafficTypeExistence'; -export { validatePreloadedData } from './preloadedData'; export { validateEvaluationOptions } from './eventProperties'; diff --git a/src/utils/inputValidation/preloadedData.ts b/src/utils/inputValidation/preloadedData.ts deleted file mode 100644 index f07ee432..00000000 --- a/src/utils/inputValidation/preloadedData.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { isObject, isString, isFiniteNumber } from '../lang'; -import { validateSplit } from './split'; -import { ILogger } from '../../logger/types'; - -function validateTimestampData(log: ILogger, maybeTimestamp: any, method: string, item: string) { - if (isFiniteNumber(maybeTimestamp) && maybeTimestamp > -1) return true; - log.error(`${method}: preloadedData.${item} must be a positive number.`); - return false; -} - -function validateSplitsData(log: ILogger, maybeSplitsData: any, method: string) { - if (isObject(maybeSplitsData)) { - const splitNames = Object.keys(maybeSplitsData); - if (splitNames.length === 0) log.warn(`${method}: preloadedData.splitsData doesn't contain feature flag definitions.`); - // @TODO in the future, consider handling the possibility of having parsed definitions of splits - if (splitNames.every(splitName => validateSplit(log, splitName, method) && isString(maybeSplitsData[splitName]))) return true; - } - log.error(`${method}: preloadedData.splitsData must be a map of feature flag names to their stringified definitions.`); - return false; -} - -function validateMySegmentsData(log: ILogger, maybeMySegmentsData: any, method: string) { - if (isObject(maybeMySegmentsData)) { - const userKeys = Object.keys(maybeMySegmentsData); - if (userKeys.every(userKey => { - const segmentNames = maybeMySegmentsData[userKey]; - // an empty list is valid - return Array.isArray(segmentNames) && segmentNames.every(segmentName => isString(segmentName)); - })) return true; - } - log.error(`${method}: preloadedData.mySegmentsData must be a map of user keys to their list of segment names.`); - return false; -} - -function validateSegmentsData(log: ILogger, maybeSegmentsData: any, method: string) { - if (isObject(maybeSegmentsData)) { - const segmentNames = Object.keys(maybeSegmentsData); - if (segmentNames.every(segmentName => isString(maybeSegmentsData[segmentName]))) return true; - } - log.error(`${method}: preloadedData.segmentsData must be a map of segment names to their stringified definitions.`); - return false; -} - -export function validatePreloadedData(log: ILogger, maybePreloadedData: any, method: string) { - if (!isObject(maybePreloadedData)) { - log.error(`${method}: preloadedData must be an object.`); - } else if ( - validateTimestampData(log, maybePreloadedData.lastUpdated, method, 'lastUpdated') && - validateTimestampData(log, maybePreloadedData.since, method, 'since') && - validateSplitsData(log, maybePreloadedData.splitsData, method) && - (!maybePreloadedData.mySegmentsData || validateMySegmentsData(log, maybePreloadedData.mySegmentsData, method)) && - (!maybePreloadedData.segmentsData || validateSegmentsData(log, maybePreloadedData.segmentsData, method)) - ) { - return true; - } - return false; -} diff --git a/types/splitio.d.ts b/types/splitio.d.ts index e85ab01b..3a9fe72d 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -350,6 +350,11 @@ interface IClientSideSyncSharedSettings extends IClientSideSharedSettings, ISync * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} */ features?: SplitIO.MockedFeaturesMap; + /** + * Rollout plan object (i.e., feature flags and segment definitions) to initialize the SDK storage with. If provided and valid, the SDK will be ready from cache immediately. + * This object is derived from calling the Node.js SDK’s `getRolloutPlan` method. + */ + initialRolloutPlan?: SplitIO.RolloutPlan; /** * SDK Startup settings. */ @@ -555,6 +560,7 @@ declare namespace SplitIO { eventsFirstPushWindow: number; }; readonly storage: StorageSyncFactory | StorageAsyncFactory | StorageOptions; + readonly initialRolloutPlan?: SplitIO.RolloutPlan; readonly urls: { events: string; sdk: string; @@ -1020,7 +1026,28 @@ declare namespace SplitIO { type: NodeSyncStorage | NodeAsyncStorage | BrowserStorage; prefix?: string; options?: Object; - } + }; + /** + * A JSON-serializable plain object that defines the format of rollout plan data to preload the SDK cache with feature flags and segments. + */ + type RolloutPlan = Object; + /** + * Options for the `factory.getRolloutPlan` method. + */ + type RolloutPlanOptions = { + /** + * Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys. + * + * @defaultValue `undefined` + */ + keys?: SplitKey[]; + /** + * Optional flag to expose segments data in the rollout plan snapshot. + * + * @defaultValue `false` + */ + exposeSegments?: boolean; + }; /** * Impression listener interface. This is the interface that needs to be implemented * by the element you provide to the SDK as impression listener. @@ -1043,7 +1070,7 @@ declare namespace SplitIO { type IntegrationFactory = { readonly type: string; (params: any): (Integration | void); - } + }; /** * A pair of user key and it's trafficType, required for tracking valid Split events. */ @@ -1564,6 +1591,13 @@ declare namespace SplitIO { * @returns The manager instance. */ manager(): IManager; + /** + * Returns the current snapshot of the SDK rollout plan in cache. + * + * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. + * @returns The current snapshot of the SDK rollout plan. + */ + getRolloutPlan(options?: RolloutPlanOptions): RolloutPlan; } /** * This represents the interface for the SDK instance for server-side with asynchronous storage. From 705057d0a6f83d78c2288ea3f0020e43d4266309 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Fri, 5 Sep 2025 16:17:51 -0300 Subject: [PATCH 18/20] rc --- CHANGES.txt | 2 +- package-lock.json | 4 ++-- package.json | 2 +- src/storages/getRolloutPlan.ts | 2 -- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 50aef19e..4952c979 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -2.5.0 (August XX, 2025) +2.5.0 (September 9, 2025) - Added `factory.getRolloutPlan()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `initialRolloutPlan` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. diff --git a/package-lock.json b/package-lock.json index f9c7ba15..67e40526 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.0", + "version": "2.5.0-rc.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.0", + "version": "2.5.0-rc.1", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index 47c53107..ce1cc17d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.0", + "version": "2.5.0-rc.1", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index d4ac25d8..80061426 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -8,8 +8,6 @@ import { IMembershipsResponse, IMySegmentsResponse } from '../dtos/types'; /** * Gets the rollout plan snapshot from the given synchronous storage. - * If `keys` are provided, the memberships for those keys is returned, to protect segments data. - * Otherwise, the segments data is returned. */ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: SplitIO.RolloutPlanOptions = {}): RolloutPlan { From 09263b2854480dba3394a64023288ab3d3b881f4 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Sep 2025 12:08:05 -0300 Subject: [PATCH 19/20] Stable version --- CHANGES.txt | 2 +- package-lock.json | 4 ++-- package.json | 2 +- src/storages/getRolloutPlan.ts | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 4952c979..4a80a5e8 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -2.5.0 (September 9, 2025) +2.5.0 (September 10, 2025) - Added `factory.getRolloutPlan()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - Added `initialRolloutPlan` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. diff --git a/package-lock.json b/package-lock.json index 67e40526..14125ac1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.1", + "version": "2.5.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.1", + "version": "2.5.0", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index ce1cc17d..155f650a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.5.0-rc.1", + "version": "2.5.0", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/storages/getRolloutPlan.ts b/src/storages/getRolloutPlan.ts index 80061426..40e6ea84 100644 --- a/src/storages/getRolloutPlan.ts +++ b/src/storages/getRolloutPlan.ts @@ -14,7 +14,7 @@ export function getRolloutPlan(log: ILogger, storage: IStorageSync, options: Spl const { keys, exposeSegments } = options; const { splits, segments, rbSegments } = storage; - log.debug(`storage: get feature flags${keys ? `, and memberships for keys ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); + log.debug(`storage: get feature flags${keys ? `, and memberships for keys: ${keys}` : ''}${exposeSegments ? ', and segments' : ''}`); return { splitChanges: { From cf45f70792dbe1b15e1f6633e176936ed8887df7 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 10 Sep 2025 16:13:05 -0300 Subject: [PATCH 20/20] Fix type definition comment --- types/splitio.d.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 3a9fe72d..2680f8ef 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1594,7 +1594,14 @@ declare namespace SplitIO { /** * Returns the current snapshot of the SDK rollout plan in cache. * - * @param keys - Optional list of keys to generate the rollout plan snapshot with the memberships of the given keys, rather than the complete segments data. + * Wait for the SDK client to be ready before calling this method. + * + * ```js + * await factory.client().ready(); + * const rolloutPlan = factory.getRolloutPlan(); + * ``` + * + * @param options - An object of type RolloutPlanOptions for advanced options. * @returns The current snapshot of the SDK rollout plan. */ getRolloutPlan(options?: RolloutPlanOptions): RolloutPlan;