diff --git a/src/dtos/types.ts b/src/dtos/types.ts index f03d142d..b2fc6068 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -2,8 +2,6 @@ import SplitIO from '../../types/splitio'; export type MaybeThenable = T | Promise -/** Split Matchers */ - export type IMatcherDataType = null | 'DATETIME' | 'NUMBER' export interface IUnaryNumericMatcherData { @@ -39,7 +37,7 @@ export interface IDependencyMatcherData { treatments: string[] } -interface ISplitMatcherBase { +interface IDefinitionMatcherBase { matcherType: string negate?: boolean keySelector?: null | { @@ -57,144 +55,143 @@ interface ISplitMatcherBase { betweenStringMatcherData?: null | IBetweenStringMatcherData } -interface IAllKeysMatcher extends ISplitMatcherBase { +interface IAllKeysMatcher extends IDefinitionMatcherBase { matcherType: 'ALL_KEYS' } -interface IInSegmentMatcher extends ISplitMatcherBase { +interface IInSegmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: IInSegmentMatcherData } -interface IInRBSegmentMatcher extends ISplitMatcherBase { +interface IInRBSegmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_RULE_BASED_SEGMENT', userDefinedSegmentMatcherData: IInSegmentMatcherData } -interface IInLargeSegmentMatcher extends ISplitMatcherBase { +interface IInLargeSegmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_LARGE_SEGMENT', userDefinedLargeSegmentMatcherData: IInLargeSegmentMatcherData } -interface IWhitelistMatcher extends ISplitMatcherBase { +interface IWhitelistMatcher extends IDefinitionMatcherBase { matcherType: 'WHITELIST', whitelistMatcherData: IWhitelistMatcherData } -interface IEqualToMatcher extends ISplitMatcherBase { +interface IEqualToMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO', unaryNumericMatcherData: IUnaryNumericMatcherData } -interface IGreaterThanOrEqualToMatcher extends ISplitMatcherBase { +interface IGreaterThanOrEqualToMatcher extends IDefinitionMatcherBase { matcherType: 'GREATER_THAN_OR_EQUAL_TO', unaryNumericMatcherData: IUnaryNumericMatcherData } -interface ILessThanOrEqualToMatcher extends ISplitMatcherBase { +interface ILessThanOrEqualToMatcher extends IDefinitionMatcherBase { matcherType: 'LESS_THAN_OR_EQUAL_TO', unaryNumericMatcherData: IUnaryNumericMatcherData } -interface IBetweenMatcher extends ISplitMatcherBase { +interface IBetweenMatcher extends IDefinitionMatcherBase { matcherType: 'BETWEEN' betweenMatcherData: IBetweenMatcherData } -interface IEqualToSetMatcher extends ISplitMatcherBase { +interface IEqualToSetMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IContainsAnyOfSetMatcher extends ISplitMatcherBase { +interface IContainsAnyOfSetMatcher extends IDefinitionMatcherBase { matcherType: 'CONTAINS_ANY_OF_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IContainsAllOfSetMatcher extends ISplitMatcherBase { +interface IContainsAllOfSetMatcher extends IDefinitionMatcherBase { matcherType: 'CONTAINS_ALL_OF_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IPartOfSetMatcher extends ISplitMatcherBase { +interface IPartOfSetMatcher extends IDefinitionMatcherBase { matcherType: 'PART_OF_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IStartsWithMatcher extends ISplitMatcherBase { +interface IStartsWithMatcher extends IDefinitionMatcherBase { matcherType: 'STARTS_WITH', whitelistMatcherData: IWhitelistMatcherData } -interface IEndsWithMatcher extends ISplitMatcherBase { +interface IEndsWithMatcher extends IDefinitionMatcherBase { matcherType: 'ENDS_WITH', whitelistMatcherData: IWhitelistMatcherData } -interface IContainsStringMatcher extends ISplitMatcherBase { +interface IContainsStringMatcher extends IDefinitionMatcherBase { matcherType: 'CONTAINS_STRING', whitelistMatcherData: IWhitelistMatcherData } -interface IInSplitTreatmentMatcher extends ISplitMatcherBase { +interface IInSplitTreatmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_SPLIT_TREATMENT', dependencyMatcherData: IDependencyMatcherData, } -interface IEqualToBooleanMatcher extends ISplitMatcherBase { +interface IEqualToBooleanMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO_BOOLEAN', booleanMatcherData: boolean } -interface IMatchesStringMatcher extends ISplitMatcherBase { +interface IMatchesStringMatcher extends IDefinitionMatcherBase { matcherType: 'MATCHES_STRING', stringMatcherData: string } -interface IEqualToSemverMatcher extends ISplitMatcherBase { +interface IEqualToSemverMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO_SEMVER', stringMatcherData: string } -interface IGreaterThanOrEqualToSemverMatcher extends ISplitMatcherBase { +interface IGreaterThanOrEqualToSemverMatcher extends IDefinitionMatcherBase { matcherType: 'GREATER_THAN_OR_EQUAL_TO_SEMVER', stringMatcherData: string } -interface ILessThanOrEqualToSemverMatcher extends ISplitMatcherBase { +interface ILessThanOrEqualToSemverMatcher extends IDefinitionMatcherBase { matcherType: 'LESS_THAN_OR_EQUAL_TO_SEMVER', stringMatcherData: string } -interface IBetweenSemverMatcher extends ISplitMatcherBase { +interface IBetweenSemverMatcher extends IDefinitionMatcherBase { matcherType: 'BETWEEN_SEMVER' betweenStringMatcherData: IBetweenStringMatcherData } -interface IInListSemverMatcher extends ISplitMatcherBase { +interface IInListSemverMatcher extends IDefinitionMatcherBase { matcherType: 'IN_LIST_SEMVER', whitelistMatcherData: IWhitelistMatcherData } -export type ISplitMatcher = IAllKeysMatcher | IInSegmentMatcher | IWhitelistMatcher | IEqualToMatcher | IGreaterThanOrEqualToMatcher | +export type IDefinitionMatcher = IAllKeysMatcher | IInSegmentMatcher | IWhitelistMatcher | IEqualToMatcher | IGreaterThanOrEqualToMatcher | ILessThanOrEqualToMatcher | IBetweenMatcher | IEqualToSetMatcher | IContainsAnyOfSetMatcher | IContainsAllOfSetMatcher | IPartOfSetMatcher | IStartsWithMatcher | IEndsWithMatcher | IContainsStringMatcher | IInSplitTreatmentMatcher | IEqualToBooleanMatcher | IMatchesStringMatcher | IEqualToSemverMatcher | IGreaterThanOrEqualToSemverMatcher | ILessThanOrEqualToSemverMatcher | IBetweenSemverMatcher | IInListSemverMatcher | IInLargeSegmentMatcher | IInRBSegmentMatcher -/** Split object */ -export interface ISplitPartition { +export interface IDefinitionPartition { treatment: string size: number } -export interface ISplitCondition { +export interface IDefinitionCondition { matcherGroup: { combiner: 'AND', - matchers: ISplitMatcher[] + matchers: IDefinitionMatcher[] } - partitions?: ISplitPartition[] + partitions?: IDefinitionPartition[] label?: string conditionType?: 'ROLLOUT' | 'WHITELIST' } @@ -204,49 +201,44 @@ export interface IExcludedSegment { name: string, } -export interface IRBSegment { - name: string, - changeNumber: number, - status?: 'ACTIVE' | 'ARCHIVED', - conditions?: ISplitCondition[] | null, +export interface TargetingEntity { + name: string; + changeNumber: number; + status: 'ACTIVE' | 'ARCHIVED'; + conditions: IDefinitionCondition[]; +} + +export interface IRBSegment extends TargetingEntity { excluded?: { keys?: string[] | null, segments?: IExcludedSegment[] | null } | null } -// @TODO: rename to IDefinition (Configs and Feature Flags are definitions) -export interface ISplit { - name: string, - changeNumber: number, - status?: 'ACTIVE' | 'ARCHIVED', - conditions: ISplitCondition[], +export interface IDefinition extends TargetingEntity { + trafficTypeName: string; + sets?: string[]; + impressionsDisabled?: boolean; prerequisites?: null | { n: string, ts: string[] - }[] - killed: boolean, - defaultTreatment: string, - trafficTypeName: string, - seed: number, - trafficAllocation?: number, - trafficAllocationSeed?: number + }[]; + killed: boolean; + defaultTreatment: string; + seed: number; + trafficAllocation?: number; + trafficAllocationSeed?: number; configurations?: { [treatmentName: string]: string | SplitIO.JsonObject - }, - sets?: string[], - impressionsDisabled?: boolean + }; } -// Split definition used in offline mode -export type ISplitPartial = Pick - /** Interface of the parsed JSON response of `/splitChanges` */ -export interface ISplitChangesResponse { +export interface IDefinitionChangesResponse { ff?: { t: number, s?: number, - d: ISplit[] + d: IDefinition[] }, rbs?: { t: number, diff --git a/src/evaluator/Engine.ts b/src/evaluator/Engine.ts index 4228316f..3ef683e5 100644 --- a/src/evaluator/Engine.ts +++ b/src/evaluator/Engine.ts @@ -4,10 +4,10 @@ import { keyParser } from '../utils/key'; import { thenable } from '../utils/promise/thenable'; import { NO_CONDITION_MATCH, SPLIT_ARCHIVED, SPLIT_KILLED, PREREQUISITES_NOT_MET } from '../utils/labels'; import { CONTROL } from '../utils/constants'; -import { ISplit, MaybeThenable } from '../dtos/types'; +import { IDefinition, MaybeThenable } from '../dtos/types'; import SplitIO from '../../types/splitio'; import { IStorageAsync, IStorageSync } from '../storages/types'; -import { IEvaluation, IEvaluationResult, ISplitEvaluator } from './types'; +import { IEvaluation, IEvaluationResult, IDefinitionEvaluator } from './types'; import { ILogger } from '../logger/types'; import { ENGINE_DEFAULT } from '../logger/constants'; import { prerequisitesMatcherContext } from './matchers/prerequisites'; @@ -19,7 +19,7 @@ function evaluationResult(result: IEvaluation | undefined, defaultTreatment: str }; } -export function engineParser(log: ILogger, split: ISplit, storage: IStorageSync | IStorageAsync) { +export function engineParser(log: ILogger, split: IDefinition, storage: IStorageSync | IStorageAsync) { const { killed, seed, trafficAllocation, trafficAllocationSeed, status, conditions, prerequisites } = split; const defaultTreatment = isString(split.defaultTreatment) ? split.defaultTreatment : CONTROL; @@ -29,7 +29,7 @@ export function engineParser(log: ILogger, split: ISplit, storage: IStorageSync return { - getTreatment(key: SplitIO.SplitKey, attributes: SplitIO.Attributes | undefined, splitEvaluator: ISplitEvaluator): MaybeThenable { + getTreatment(key: SplitIO.SplitKey, attributes: SplitIO.Attributes | undefined, splitEvaluator: IDefinitionEvaluator): MaybeThenable { const parsedKey = keyParser(key); diff --git a/src/evaluator/__tests__/evaluate-feature.spec.ts b/src/evaluator/__tests__/evaluate-feature.spec.ts index 85db31e7..500bf5ca 100644 --- a/src/evaluator/__tests__/evaluate-feature.spec.ts +++ b/src/evaluator/__tests__/evaluate-feature.spec.ts @@ -1,10 +1,10 @@ import { evaluateFeature } from '../index'; -import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, SPLIT_NOT_FOUND } from '../../utils/labels'; +import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, DEFINITION_NOT_FOUND } from '../../utils/labels'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { ISplit } from '../../dtos/types'; +import { IDefinition } from '../../dtos/types'; import { IStorageSync } from '../../storages/types'; -const splitsMock: Record = { +const splitsMock: Record = { regular: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, config: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': { 'on': "{color:'black'}" }, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, killed: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on2', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': true, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, @@ -53,7 +53,7 @@ test('EVALUATOR / should return right label, treatment and config if storage ret config: '{color:\'black\'}', changeNumber: 1487277320548 }; const expectedOutputControl = { - treatment: 'control', label: SPLIT_NOT_FOUND, config: null + treatment: 'control', label: DEFINITION_NOT_FOUND, config: null }; const evaluationWithConfig = evaluateFeature( diff --git a/src/evaluator/__tests__/evaluate-features.spec.ts b/src/evaluator/__tests__/evaluate-features.spec.ts index 45832bd0..d4225cc2 100644 --- a/src/evaluator/__tests__/evaluate-features.spec.ts +++ b/src/evaluator/__tests__/evaluate-features.spec.ts @@ -1,11 +1,11 @@ import { evaluateFeatures, evaluateFeaturesByFlagSets } from '../index'; -import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, SPLIT_NOT_FOUND } from '../../utils/labels'; +import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, DEFINITION_NOT_FOUND } from '../../utils/labels'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../../logger/constants'; -import { ISplit } from '../../dtos/types'; +import { IDefinition } from '../../dtos/types'; import { IStorageSync } from '../../storages/types'; -const splitsMock: Record = { +const splitsMock: Record = { regular: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, config: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': { 'on': "{color:'black'}" }, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, killed: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on2', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': true, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, @@ -33,7 +33,7 @@ const mockStorage = { return names.reduce((acc, name) => { acc[name] = this.getSplit(name); return acc; - }, {} as Record); + }, {} as Record); }, getNamesByFlagSets(flagSets: string[]) { return flagSets.map(flagset => flagSetsMock[flagset] || new Set()); @@ -71,7 +71,7 @@ test('EVALUATOR - Multiple evaluations at once / should return right labels, tre config: '{color:\'black\'}', changeNumber: 1487277320548 }, not_existent_split: { - treatment: 'control', label: SPLIT_NOT_FOUND, config: null + treatment: 'control', label: DEFINITION_NOT_FOUND, config: null }, }; @@ -122,7 +122,7 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { config: '{color:\'black\'}', changeNumber: 1487277320548 }, not_existent_split: { - treatment: 'control', label: SPLIT_NOT_FOUND, config: null + treatment: 'control', label: DEFINITION_NOT_FOUND, config: null }, }; diff --git a/src/evaluator/combiners/and.ts b/src/evaluator/combiners/and.ts index fd239753..a2547c25 100644 --- a/src/evaluator/combiners/and.ts +++ b/src/evaluator/combiners/and.ts @@ -2,11 +2,11 @@ import { findIndex } from '../../utils/lang'; import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; import { MaybeThenable } from '../../dtos/types'; -import { ISplitEvaluator } from '../types'; +import { IDefinitionEvaluator } from '../types'; import { ENGINE_COMBINER_AND } from '../../logger/constants'; import SplitIO from '../../../types/splitio'; -export function andCombinerContext(log: ILogger, matchers: Array<(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => MaybeThenable>) { +export function andCombinerContext(log: ILogger, matchers: Array<(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => MaybeThenable>) { function andResults(results: boolean[]): boolean { // Array.prototype.every is supported by target environments @@ -16,7 +16,7 @@ export function andCombinerContext(log: ILogger, matchers: Array<(key: SplitIO.S return hasMatchedAll; } - return function andCombiner(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator): MaybeThenable { + return function andCombiner(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator): MaybeThenable { const matcherResults = matchers.map(matcher => matcher(key, attributes, splitEvaluator)); // If any matching result is a thenable we should use Promise.all diff --git a/src/evaluator/combiners/ifelseif.ts b/src/evaluator/combiners/ifelseif.ts index aaba4b27..a28013c6 100644 --- a/src/evaluator/combiners/ifelseif.ts +++ b/src/evaluator/combiners/ifelseif.ts @@ -4,7 +4,7 @@ import { thenable } from '../../utils/promise/thenable'; import { UNSUPPORTED_MATCHER_TYPE } from '../../utils/labels'; import { CONTROL } from '../../utils/constants'; import SplitIO from '../../../types/splitio'; -import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; +import { IEvaluation, IEvaluator, IDefinitionEvaluator } from '../types'; import { ENGINE_COMBINER_IFELSEIF, ENGINE_COMBINER_IFELSEIF_NO_TREATMENT, ERROR_ENGINE_COMBINER_IFELSEIF } from '../../logger/constants'; export function ifElseIfCombinerContext(log: ILogger, predicates: IEvaluator[]): IEvaluator { @@ -33,7 +33,7 @@ export function ifElseIfCombinerContext(log: ILogger, predicates: IEvaluator[]): return undefined; } - function ifElseIfCombiner(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) { + function ifElseIfCombiner(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) { // In Async environments we are going to have async predicates. There is none way to know // before hand so we need to evaluate all the predicates, verify for thenables, and finally, // define how to return the treatment (wrap result into a Promise or not). diff --git a/src/evaluator/condition/index.ts b/src/evaluator/condition/index.ts index 5facaa5c..996a38f1 100644 --- a/src/evaluator/condition/index.ts +++ b/src/evaluator/condition/index.ts @@ -2,7 +2,7 @@ import { getTreatment, shouldApplyRollout } from './engineUtils'; import { thenable } from '../../utils/promise/thenable'; import { NOT_IN_SPLIT } from '../../utils/labels'; import { MaybeThenable } from '../../dtos/types'; -import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; +import { IEvaluation, IEvaluator, IDefinitionEvaluator } from '../types'; import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; @@ -22,9 +22,9 @@ function match(log: ILogger, matchingResult: boolean, bucketingKey: string | und } // Condition factory -export function conditionContext(log: ILogger, matcherEvaluator: (key: SplitIO.SplitKeyObject, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => MaybeThenable, treatments?: { getTreatmentFor: (x: number) => string }, label?: string, conditionType?: 'ROLLOUT' | 'WHITELIST'): IEvaluator { +export function conditionContext(log: ILogger, matcherEvaluator: (key: SplitIO.SplitKeyObject, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => MaybeThenable, treatments?: { getTreatmentFor: (x: number) => string }, label?: string, conditionType?: 'ROLLOUT' | 'WHITELIST'): IEvaluator { - return function conditionEvaluator(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) { + return function conditionEvaluator(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) { // Whitelisting has more priority than traffic allocation, so we don't apply this filtering to those conditions. if (conditionType === 'ROLLOUT' && !shouldApplyRollout(trafficAllocation!, key.bucketingKey, trafficAllocationSeed!)) { diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index 4dbdaa58..18f69b4c 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -1,8 +1,8 @@ import { engineParser } from './Engine'; import { thenable } from '../utils/promise/thenable'; -import { EXCEPTION, NO_CONDITION_MATCH, SPLIT_NOT_FOUND } from '../utils/labels'; +import { EXCEPTION, NO_CONDITION_MATCH, DEFINITION_NOT_FOUND } from '../utils/labels'; import { CONTROL } from '../utils/constants'; -import { ISplit, MaybeThenable } from '../dtos/types'; +import { IDefinition, MaybeThenable } from '../dtos/types'; import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluationResult } from './types'; import SplitIO from '../../types/splitio'; @@ -18,14 +18,14 @@ const EVALUATION_EXCEPTION = { let EVALUATION_NOT_FOUND = { treatment: CONTROL, - label: SPLIT_NOT_FOUND, + label: DEFINITION_NOT_FOUND, config: null }; -function treatmentsException(splitNames: string[]) { +function treatmentsException(definitionNames: string[]) { const evaluations: Record = {}; - splitNames.forEach(splitName => { - evaluations[splitName] = EVALUATION_EXCEPTION; + definitionNames.forEach(definitionName => { + evaluations[definitionName] = EVALUATION_EXCEPTION; }); return evaluations; } @@ -33,25 +33,25 @@ function treatmentsException(splitNames: string[]) { export function evaluateFeature( log: ILogger, key: SplitIO.SplitKey, - splitName: string, + definitionName: string, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions ): MaybeThenable { - let parsedSplit; + let definition; try { - parsedSplit = storage.splits.getSplit(splitName); + definition = storage.splits.getSplit(definitionName); } catch (e) { // Exception on sync `getSplit` storage. Not possible ATM with InMemory and InLocal storages. return EVALUATION_EXCEPTION; } - if (thenable(parsedSplit)) { - return parsedSplit.then((split) => getEvaluation( + if (thenable(definition)) { + return definition.then((definition) => getEvaluation( log, key, - split, + definition, attributes, storage, options, @@ -65,7 +65,7 @@ export function evaluateFeature( return getEvaluation( log, key, - parsedSplit, + definition, attributes, storage, options, @@ -75,28 +75,28 @@ export function evaluateFeature( export function evaluateFeatures( log: ILogger, key: SplitIO.SplitKey, - splitNames: string[], + definitionNames: string[], attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, ): MaybeThenable> { - let parsedSplits; + let definitions; try { - parsedSplits = storage.splits.getSplits(splitNames); + definitions = storage.splits.getSplits(definitionNames); } catch (e) { // Exception on sync `getSplits` storage. Not possible ATM with InMemory and InLocal storages. - return treatmentsException(splitNames); + return treatmentsException(definitionNames); } - return thenable(parsedSplits) ? - parsedSplits.then(splits => getEvaluations(log, key, splitNames, splits, attributes, storage, options)) + return thenable(definitions) ? + definitions.then(definitions => getEvaluations(log, key, definitionNames, definitions, attributes, storage, options)) .catch(() => { // Exception on async `getSplits` storage. For example, when the storage is redis or // pluggable and there is a connection issue and we can't retrieve the split to be evaluated - return treatmentsException(splitNames); + return treatmentsException(definitionNames); }) : - getEvaluations(log, key, splitNames, parsedSplits, attributes, storage, options); + getEvaluations(log, key, definitionNames, definitions, attributes, storage, options); } export function evaluateFeaturesByFlagSets( @@ -146,32 +146,31 @@ export function evaluateFeaturesByFlagSets( function getEvaluation( log: ILogger, key: SplitIO.SplitKey, - splitJSON: ISplit | null, + definition: IDefinition | null, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, ): MaybeThenable { - - if (splitJSON) { - const split = engineParser(log, splitJSON, storage); + if (definition) { + const split = engineParser(log, definition, storage); const evaluation = split.getTreatment(key, attributes, evaluateFeature); // If the storage is async and the evaluated flag uses segments or dependencies, evaluation is thenable if (thenable(evaluation)) { return evaluation.then(result => { - result.changeNumber = splitJSON.changeNumber; - result.config = splitJSON.configurations && splitJSON.configurations[result.treatment] || null; + result.changeNumber = definition.changeNumber; + result.config = definition.configurations && definition.configurations[result.treatment] || null; // @ts-expect-error impressionsDisabled is not exposed in the public typings yet. - result.impressionsDisabled = options?.impressionsDisabled || splitJSON.impressionsDisabled; + result.impressionsDisabled = options?.impressionsDisabled || definition.impressionsDisabled; return result; }); } else { - evaluation.changeNumber = splitJSON.changeNumber; - evaluation.config = splitJSON.configurations && splitJSON.configurations[evaluation.treatment] || null; + evaluation.changeNumber = definition.changeNumber; + evaluation.config = definition.configurations && definition.configurations[evaluation.treatment] || null; // @ts-expect-error impressionsDisabled is not exposed in the public typings yet. - evaluation.impressionsDisabled = options?.impressionsDisabled || splitJSON.impressionsDisabled; + evaluation.impressionsDisabled = options?.impressionsDisabled || definition.impressionsDisabled; } return evaluation; @@ -183,29 +182,29 @@ function getEvaluation( function getEvaluations( log: ILogger, key: SplitIO.SplitKey, - splitNames: string[], - splits: Record, + definitionNames: string[], + splits: Record, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, ): MaybeThenable> { const result: Record = {}; const thenables: Promise[] = []; - splitNames.forEach(splitName => { + definitionNames.forEach(definitionName => { const evaluation = getEvaluation( log, key, - splits[splitName], + splits[definitionName], attributes, storage, options ); if (thenable(evaluation)) { thenables.push(evaluation.then(res => { - result[splitName] = res; + result[definitionName] = res; })); } else { - result[splitName] = evaluation; + result[definitionName] = evaluation; } }); @@ -213,31 +212,31 @@ function getEvaluations( } export function evaluateDefaultTreatment( - splitName: string, + definitionName: string, storage: IStorageSync | IStorageAsync, ): MaybeThenable { - let parsedSplit; + let definition; try { - parsedSplit = storage.splits.getSplit(splitName); + definition = storage.splits.getSplit(definitionName); } catch (e) { return EVALUATION_EXCEPTION; } - return thenable(parsedSplit) ? - parsedSplit.then(getDefaultTreatment).catch(() => EVALUATION_EXCEPTION) : - getDefaultTreatment(parsedSplit); + return thenable(definition) ? + definition.then(getDefaultTreatment).catch(() => EVALUATION_EXCEPTION) : + getDefaultTreatment(definition); } function getDefaultTreatment( - splitJSON: ISplit | null, + definition: IDefinition | null, ): MaybeThenable { - if (splitJSON) { + if (definition) { return { - treatment: splitJSON.defaultTreatment, + treatment: definition.defaultTreatment, label: NO_CONDITION_MATCH, // "default rule" - config: splitJSON.configurations && splitJSON.configurations[splitJSON.defaultTreatment] || null, - changeNumber: splitJSON.changeNumber + config: definition.configurations && definition.configurations[definition.defaultTreatment] || null, + changeNumber: definition.changeNumber }; } diff --git a/src/evaluator/matchers/__tests__/dependency.spec.ts b/src/evaluator/matchers/__tests__/dependency.spec.ts index 7cb184d6..c9463e18 100644 --- a/src/evaluator/matchers/__tests__/dependency.spec.ts +++ b/src/evaluator/matchers/__tests__/dependency.spec.ts @@ -4,10 +4,10 @@ import { evaluateFeature } from '../../index'; import { IMatcher, IMatcherDto } from '../../types'; import { IStorageSync } from '../../../storages/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { ALWAYS_ON_SPLIT, ALWAYS_OFF_SPLIT } from '../../../storages/__tests__/testUtils'; -const STORED_SPLITS: Record = { +const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT, 'always-off': ALWAYS_OFF_SPLIT }; diff --git a/src/evaluator/matchers/__tests__/prerequisites.spec.ts b/src/evaluator/matchers/__tests__/prerequisites.spec.ts index 2d67eefa..7b1a55cd 100644 --- a/src/evaluator/matchers/__tests__/prerequisites.spec.ts +++ b/src/evaluator/matchers/__tests__/prerequisites.spec.ts @@ -1,11 +1,11 @@ import { evaluateFeature } from '../../index'; import { IStorageSync } from '../../../storages/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { ALWAYS_ON_SPLIT, ALWAYS_OFF_SPLIT } from '../../../storages/__tests__/testUtils'; import { prerequisitesMatcherContext } from '../prerequisites'; -const STORED_SPLITS: Record = { +const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT, 'always-off': ALWAYS_OFF_SPLIT }; diff --git a/src/evaluator/matchers/__tests__/rbsegment.spec.ts b/src/evaluator/matchers/__tests__/rbsegment.spec.ts index db597738..f1048d17 100644 --- a/src/evaluator/matchers/__tests__/rbsegment.spec.ts +++ b/src/evaluator/matchers/__tests__/rbsegment.spec.ts @@ -3,12 +3,12 @@ import { matcherFactory } from '..'; import { evaluateFeature } from '../../index'; import { IMatcherDto } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { IRBSegment, ISplit } from '../../../dtos/types'; +import { IRBSegment, IDefinition } from '../../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../../storages/types'; import { thenable } from '../../../utils/promise/thenable'; import { ALWAYS_ON_SPLIT } from '../../../storages/__tests__/testUtils'; -const STORED_SPLITS: Record = { +const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT }; diff --git a/src/evaluator/matchers/dependency.ts b/src/evaluator/matchers/dependency.ts index 68448a8c..2b1128f8 100644 --- a/src/evaluator/matchers/dependency.ts +++ b/src/evaluator/matchers/dependency.ts @@ -2,7 +2,7 @@ import { IDependencyMatcherData, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; -import { IDependencyMatcherValue, IEvaluation, ISplitEvaluator } from '../types'; +import { IDependencyMatcherValue, IEvaluation, IDefinitionEvaluator } from '../types'; import { ENGINE_MATCHER_DEPENDENCY, ENGINE_MATCHER_DEPENDENCY_PRE } from '../../logger/constants'; export function dependencyMatcherContext({ split, treatments }: IDependencyMatcherData, storage: IStorageSync | IStorageAsync, log: ILogger) { @@ -19,7 +19,7 @@ export function dependencyMatcherContext({ split, treatments }: IDependencyMatch return matches; } - return function dependencyMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + return function dependencyMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: IDefinitionEvaluator): MaybeThenable { log.debug(ENGINE_MATCHER_DEPENDENCY_PRE, [split, JSON.stringify(key), attributes ? '\n attributes: ' + JSON.stringify(attributes) : '']); const evaluation = splitEvaluator(log, key, split, attributes, storage); diff --git a/src/evaluator/matchers/prerequisites.ts b/src/evaluator/matchers/prerequisites.ts index 247e9948..2c4868f3 100644 --- a/src/evaluator/matchers/prerequisites.ts +++ b/src/evaluator/matchers/prerequisites.ts @@ -1,12 +1,12 @@ -import { ISplit, MaybeThenable } from '../../dtos/types'; +import { IDefinition, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; -import { IDependencyMatcherValue, ISplitEvaluator } from '../types'; +import { IDependencyMatcherValue, IDefinitionEvaluator } from '../types'; -export function prerequisitesMatcherContext(prerequisites: ISplit['prerequisites'], storage: IStorageSync | IStorageAsync, log: ILogger) { +export function prerequisitesMatcherContext(prerequisites: IDefinition['prerequisites'], storage: IStorageSync | IStorageAsync, log: ILogger) { - return function prerequisitesMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + return function prerequisitesMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: IDefinitionEvaluator): MaybeThenable { prerequisites = prerequisites == null ? [] : prerequisites; diff --git a/src/evaluator/matchers/rbsegment.ts b/src/evaluator/matchers/rbsegment.ts index f9cc12e4..01cfbed2 100644 --- a/src/evaluator/matchers/rbsegment.ts +++ b/src/evaluator/matchers/rbsegment.ts @@ -1,7 +1,7 @@ import { IExcludedSegment, IRBSegment, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { ILogger } from '../../logger/types'; -import { IDependencyMatcherValue, ISplitEvaluator } from '../types'; +import { IDependencyMatcherValue, IDefinitionEvaluator } from '../types'; import { thenable } from '../../utils/promise/thenable'; import { getMatching, keyParser } from '../../utils/key'; import { parser } from '../parser'; @@ -10,7 +10,7 @@ import { STANDARD_SEGMENT, RULE_BASED_SEGMENT, LARGE_SEGMENT } from '../../utils export function ruleBasedSegmentMatcherContext(segmentName: string, storage: IStorageSync | IStorageAsync, log: ILogger) { - return function ruleBasedSegmentMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + return function ruleBasedSegmentMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: IDefinitionEvaluator): MaybeThenable { const matchingKey = getMatching(key); function matchConditions(rbsegment: IRBSegment) { diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 075ea9f0..c842790f 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -4,13 +4,13 @@ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; import { zeroSinceHH, zeroSinceSS, betweenDateTimeTransform } from '../convertions'; -import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; +import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, IDefinitionMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; /** * Flat the complex matcherGroup structure into something handy. */ -export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { +export function matchersTransform(matchers: IDefinitionMatcher[]): IMatcherDto[] { let parsedMatchers = matchers.map(matcher => { let { diff --git a/src/evaluator/matchersTransform/whitelist.ts b/src/evaluator/matchersTransform/whitelist.ts index eab5966f..6eb64592 100644 --- a/src/evaluator/matchersTransform/whitelist.ts +++ b/src/evaluator/matchersTransform/whitelist.ts @@ -1,8 +1,8 @@ -import { ISplitMatcher } from '../../dtos/types'; +import { IDefinitionMatcher } from '../../dtos/types'; /** * Extract whitelist array. */ -export function whitelistTransform(whitelistObject: ISplitMatcher['whitelistMatcherData']) { +export function whitelistTransform(whitelistObject: IDefinitionMatcher['whitelistMatcherData']) { return whitelistObject && whitelistObject.whitelist; } diff --git a/src/evaluator/parser/__tests__/boolean.spec.ts b/src/evaluator/parser/__tests__/boolean.spec.ts index 255a5cf6..06f397b2 100644 --- a/src/evaluator/parser/__tests__/boolean.spec.ts +++ b/src/evaluator/parser/__tests__/boolean.spec.ts @@ -1,6 +1,6 @@ import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { IEvaluation } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -24,7 +24,7 @@ test('PARSER / if user.boolean is true then split 100%:on', async () => { treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('testing'), 31, 100, 31, { bool: false diff --git a/src/evaluator/parser/__tests__/index.spec.ts b/src/evaluator/parser/__tests__/index.spec.ts index c3829a3d..ba00f2df 100644 --- a/src/evaluator/parser/__tests__/index.spec.ts +++ b/src/evaluator/parser/__tests__/index.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; test('PARSER / if user is in segment all 100%:on', async () => { @@ -21,7 +21,7 @@ test('PARSER / if user is in segment all 100%:on', async () => { size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -50,7 +50,7 @@ test('PARSER / if user is in segment all 100%:off', async () => { size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -75,7 +75,7 @@ test('PARSER / NEGATED if user is in segment all 100%:on, then no match', async size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -107,7 +107,7 @@ test('PARSER / if user is in segment ["u1", "u2", "u3", "u4"] then split 100%:on size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation === undefined).toBe(true); // evaluation should throw undefined @@ -146,7 +146,7 @@ test('PARSER / NEGATED if user is in segment ["u1", "u2", "u3", "u4"] then split size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -190,7 +190,7 @@ test('PARSER / if user.account is in list ["v1", "v2", "v3"] then split 100:on', size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { account: 'v1' @@ -236,7 +236,7 @@ test('PARSER / NEGATED if user.account is in list ["v1", "v2", "v3"] then split size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { account: 'v1' @@ -273,7 +273,7 @@ test('PARSER / if user.account is in segment all then split 100:on', async () => size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31); expect(evaluation.treatment === 'on').toBe(true); // ALL_KEYS always matches @@ -305,7 +305,7 @@ test('PARSER / if user.attr is between 10 and 20 then split 100:on', async () => treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 10 @@ -346,7 +346,7 @@ test('PARSER / NEGATED if user.attr is between 10 and 20 then split 100:on, nega treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 10 @@ -387,7 +387,7 @@ test('PARSER / if user.attr <= datetime 1458240947021 then split 100:on', async treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -432,7 +432,7 @@ test('PARSER / NEGATED if user.attr <= datetime 1458240947021 then split 100:on, treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -478,7 +478,7 @@ test('PARSER / if user.attr >= datetime 1458240947021 then split 100:on', async treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -523,7 +523,7 @@ test('PARSER / NEGATED if user.attr >= datetime 1458240947021 then split 100:on, treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -569,7 +569,7 @@ test('PARSER / if user.attr = datetime 1458240947021 then split 100:on', async ( treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 1458240947021 @@ -614,7 +614,7 @@ test('PARSER / NEGATED if user.attr = datetime 1458240947021 then split 100:on, treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 1458240947021 @@ -656,7 +656,7 @@ test('PARSER / if user is in segment all then split 20%:A,20%:B,60%:A', async () treatment: 'A', size: 60 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('aa'), 31, 100, 31); expect(evaluation.treatment).toBe('A'); // 20%:A // bucket 6 with murmur3 diff --git a/src/evaluator/parser/__tests__/invalidMatcher.spec.ts b/src/evaluator/parser/__tests__/invalidMatcher.spec.ts index 87cfc422..9eabbbe8 100644 --- a/src/evaluator/parser/__tests__/invalidMatcher.spec.ts +++ b/src/evaluator/parser/__tests__/invalidMatcher.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { parser } from '..'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; test('PARSER / handle invalid matcher as control', async () => { @@ -24,7 +24,7 @@ test('PARSER / handle invalid matcher as control', async () => { treatment: 'A', size: 60 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator('aaaaa', 31); diff --git a/src/evaluator/parser/__tests__/regex.spec.ts b/src/evaluator/parser/__tests__/regex.spec.ts index 736e93d3..1775301b 100644 --- a/src/evaluator/parser/__tests__/regex.spec.ts +++ b/src/evaluator/parser/__tests__/regex.spec.ts @@ -1,6 +1,6 @@ import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { IEvaluation } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -23,7 +23,7 @@ test('PARSER / if user.string is true then split 100%:on', async () => { treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('testing'), 31, 100, 31, { string: 'ehllo dude' diff --git a/src/evaluator/parser/__tests__/set.spec.ts b/src/evaluator/parser/__tests__/set.spec.ts index 6a6d8c35..5e217237 100644 --- a/src/evaluator/parser/__tests__/set.spec.ts +++ b/src/evaluator/parser/__tests__/set.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; // @@ -31,7 +31,7 @@ test('PARSER / if user.permissions ["read", "write"] equal to set ["read", "writ size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'write'] @@ -66,7 +66,7 @@ test('PARSER / if user.permissions ["write", "read"] equal to set ["read", "writ size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['write', 'read'] @@ -101,7 +101,7 @@ test('PARSER / if user.permissions ["1", 2] equal to set ["1", "2"] then split 1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['1', 2] @@ -136,7 +136,7 @@ test('PARSER / if user.permissions ["read", "write", "delete"] equal to set ["re size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'write', 'delete'] @@ -170,7 +170,7 @@ test('PARSER / if user.permissions ["read"] equal to set ["read", "write"] then size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -204,7 +204,7 @@ test('PARSER / if user.permissions ["read", "delete"] equal to set ["read", "wri size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'delete'] @@ -238,7 +238,7 @@ test('PARSER / if user.countries ["argentina", "usa"] equal to set ["usa","argen size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { countries: ['argentina', 'usa'] @@ -272,7 +272,7 @@ test('PARSER / if attribute is not an array we should not match equal to set', a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -307,7 +307,7 @@ test('PARSER / if attribute is an EMPTY array we should not match equal to set', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { countries: [] @@ -340,7 +340,7 @@ test('PARSER / NEGATED if user.permissions ["read", "write"] equal to set ["read size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'write'] @@ -373,7 +373,7 @@ test('PARSER / NEGATED if user.permissions ["read"] equal to set ["read", "write size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -407,7 +407,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match equal to size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -444,7 +444,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match equal size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { countries: [] @@ -481,7 +481,7 @@ test('PARSER / if user.permissions ["read", "edit", "delete"] contains all of se size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit', 'delete'] @@ -516,7 +516,7 @@ test('PARSER / if user.permissions ["edit", "read", "delete"] contains all of se size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['edit', 'read', 'delete'] @@ -551,7 +551,7 @@ test('PARSER / if user.permissions [1, "edit", "delete"] contains all of set ["1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [1, 'edit', 'delete'] @@ -586,7 +586,7 @@ test('PARSER / if user.permissions ["read"] contains all of set ["read", "edit"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -620,7 +620,7 @@ test('PARSER / if user.permissions ["read", "delete", "manage"] contains all of size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'delete', 'manage'] @@ -654,7 +654,7 @@ test('PARSER / if attribute is not an array we should not match contains all', a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -689,7 +689,7 @@ test('PARSER / if attribute is an EMPTY array we should not match contains all', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -722,7 +722,7 @@ test('PARSER / NEGATED if user.permissions ["read", "edit", "delete"] contains a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit', 'delete'] @@ -755,7 +755,7 @@ test('PARSER / NEGATED if user.permissions ["read"] contains all of set ["read", size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -789,7 +789,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match contains size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -826,7 +826,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match contai size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -863,7 +863,7 @@ test('PARSER / if user.permissions ["read", "edit"] is part of set ["read", "edi size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit'] @@ -898,7 +898,7 @@ test('PARSER / if user.permissions ["edit", "read"] is part of set ["read", "edi size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['edit', 'read'] @@ -933,7 +933,7 @@ test('PARSER / if user.permissions [1, "edit"] is part of set ["1", "edit", "del size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [1, 'edit'] @@ -968,7 +968,7 @@ test('PARSER / if user.permissions ["admin", "magic"] is part of set ["read", "e size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1002,7 +1002,7 @@ test('PARSER / if attribute is not an array we should not match part of', async size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -1037,7 +1037,7 @@ test('PARSER / if attribute is an EMPTY array we should not match part of', asyn size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -1070,7 +1070,7 @@ test('PARSER / NEGATED if user.permissions ["read", "edit"] is part of set ["rea size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit'] @@ -1103,7 +1103,7 @@ test('PARSER / NEGATED if user.permissions ["admin", "magic"] is part of set ["r size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1137,7 +1137,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match part of, size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -1174,7 +1174,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match part o size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -1211,7 +1211,7 @@ test('PARSER / if user.permissions ["admin", "edit"] contains any of set ["read" size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'edit'] @@ -1246,7 +1246,7 @@ test('PARSER / if user.permissions ["admin", 1] contains any of set ["read", "1" size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 1] @@ -1281,7 +1281,7 @@ test('PARSER / if user.permissions ["admin", "magic"] contains any of set ["read size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1315,7 +1315,7 @@ test('PARSER / if attribute is not an array we should not match contains any', a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -1350,7 +1350,7 @@ test('PARSER / if attribute is an EMPTY array we should not match contains any', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -1383,7 +1383,7 @@ test('PARSER / NEGATED if user.permissions ["admin", "edit"] contains any of set size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'edit'] @@ -1416,7 +1416,7 @@ test('PARSER / NEGATED if user.permissions ["admin", "magic"] contains any of se size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1450,7 +1450,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match contains size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -1482,7 +1482,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match contai size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] diff --git a/src/evaluator/parser/__tests__/string.spec.ts b/src/evaluator/parser/__tests__/string.spec.ts index 81fe9b9d..6d59326c 100644 --- a/src/evaluator/parser/__tests__/string.spec.ts +++ b/src/evaluator/parser/__tests__/string.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { parser } from '..'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { keyParser } from '../../../utils/key'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -30,7 +30,7 @@ test('PARSER / if user.email starts with ["nico"] then split 100:on', async () = size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -64,7 +64,7 @@ test('PARSER / if user.email = 123, starts with ["1"] then split 100:on should m size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 123 @@ -98,7 +98,7 @@ test('PARSER / if user.email starts with ["nico", "marcio", "facu"] then split 1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@split.io' @@ -132,7 +132,7 @@ test('PARSER / if user.email starts with ["nico", "marcio", "facu"] then split 1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'marciomisi@split.io' @@ -165,7 +165,7 @@ test('PARSER / if user.email does not start with ["nico"] then not match', async treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@split.io' @@ -197,7 +197,7 @@ test('PARSER / if user.email is an EMPTY string, start with ["nico"] should not treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -227,7 +227,7 @@ test('PARSER / if user.email is not a string, start with ["nico"] should not mat treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: {} @@ -261,7 +261,7 @@ test('PARSER / NEGATED if user.email starts with ["nico"] then split 100:on, so size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -294,7 +294,7 @@ test('PARSER / NEGATED if user.email does not start with ["nico"] should not mat size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@split.io' @@ -328,7 +328,7 @@ test('PARSER / NEGATED if user.email is an EMPTY string, start with ["nico"] sho size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -360,7 +360,7 @@ test('PARSER / NEGATED if user.email is not a string, start with ["nico"] should size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: /asd4?/ @@ -399,7 +399,7 @@ test('PARSER / if user.email ends with ["split.io"] then split 100:on', async () size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -433,7 +433,7 @@ test('PARSER / if user.email = 123, ends with ["3"] then split 100:on should mat size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 123 @@ -466,7 +466,7 @@ test('PARSER / if user.email ends with ["gmail.com", "split.io", "hotmail.com"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -500,7 +500,7 @@ test('PARSER / if user.email ends with ["gmail.com", "split.io", "hotmail.com"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@hotmail.com' @@ -534,7 +534,7 @@ test('PARSER / if user.email ends with ["gmail.com", "split.io", "hotmail.com"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -567,7 +567,7 @@ test('PARSER / if user.email does not end with ["split.io"] then not match', asy size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -599,7 +599,7 @@ test('PARSER / if user.email is an EMPTY string, end with ["nico"] should not ma treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -629,7 +629,7 @@ test('PARSER / if user.email is not a string, end with ["nico"] should not match treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: [] @@ -665,7 +665,7 @@ test('PARSER / NEGATED if user.email ends with ["split.io"] then split 100:on, s size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -697,7 +697,7 @@ test('PARSER / NEGATED if user.email does not end with ["split.io"] then no matc size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -730,7 +730,7 @@ test('PARSER / NEGATED if user.email is an EMPTY string, end with ["nico"] shoul size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -762,7 +762,7 @@ test('PARSER / NEGATED if user.email is not a string, end with ["nico"] should n size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: NaN @@ -801,7 +801,7 @@ test('PARSER / if user.email contains ["@split"] then split 100:on', async () => size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -835,7 +835,7 @@ test('PARSER / if user.email = 123, contains ["2"] then split 100:on should matc size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 123 @@ -869,7 +869,7 @@ test('PARSER / if user.email contains ["@split"] (beginning) then split 100:on', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '@split.io.com.ar' @@ -903,7 +903,7 @@ test('PARSER / if user.email contains ["@split"] (end) then split 100:on', async size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split' @@ -937,7 +937,7 @@ test('PARSER / if user.email contains ["@split"] (whole string matches) then spl size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '@split' @@ -971,7 +971,7 @@ test('PARSER / if user.email contains ["@split", "@gmail", "@hotmail"] then spli size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nico@hotmail.com' @@ -1005,7 +1005,7 @@ test('PARSER / if user.email contains ["@split", "@gmail", "@hotmail"] then spli size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nico@gmail.com' @@ -1039,7 +1039,7 @@ test('PARSER / if user.email does not contain ["@split"] then not match', async size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -1071,7 +1071,7 @@ test('PARSER / if user.email is an EMPTY string, contains ["nico"] should not ma treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -1101,7 +1101,7 @@ test('PARSER / if user.email is not a string, contains ["nico"] should not match treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: null @@ -1137,7 +1137,7 @@ test('PARSER / NEGATED if user.email contains ["@split"] then split 100:on, then size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -1170,7 +1170,7 @@ test('PARSER / NEGATED if user.email does not contain ["@split"] then not match, size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -1203,7 +1203,7 @@ test('PARSER / NEGATED if user.email is an EMPTY string, contains ["nico"] shoul size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -1235,7 +1235,7 @@ test('PARSER / NEGATED if user.email is not a string, contains ["nico"] should n size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: () => { } diff --git a/src/evaluator/parser/__tests__/trafficAllocation.spec.ts b/src/evaluator/parser/__tests__/trafficAllocation.spec.ts index a71d6dee..2debd783 100644 --- a/src/evaluator/parser/__tests__/trafficAllocation.spec.ts +++ b/src/evaluator/parser/__tests__/trafficAllocation.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { IEvaluation } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -23,7 +23,7 @@ test('PARSER / if user is in segment all 100%:on but trafficAllocation is 0%', a size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a key'), 31, 0, 31) as IEvaluation; @@ -50,7 +50,7 @@ test('PARSER / if user is in segment all 100%:on but trafficAllocation is 99% wi size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a key'), 31, 99, 31) as IEvaluation; @@ -77,7 +77,7 @@ test('PARSER / if user is in segment all 100%:on but trafficAllocation is 99% an size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a48'), 31, 99, 14) as IEvaluation; // murmur3.bucket('a48', 14) === 100 @@ -124,7 +124,7 @@ test('PARSER / if user is whitelisted and in segment all 100%:off with trafficAl size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a key'), 31, 0, 31) as IEvaluation; diff --git a/src/evaluator/parser/index.ts b/src/evaluator/parser/index.ts index d12edf1a..ce66d67d 100644 --- a/src/evaluator/parser/index.ts +++ b/src/evaluator/parser/index.ts @@ -6,14 +6,14 @@ import { conditionContext } from '../condition'; import { ifElseIfCombinerContext } from '../combiners/ifelseif'; import { andCombinerContext } from '../combiners/and'; import { thenable } from '../../utils/promise/thenable'; -import { IEvaluator, IMatcherDto, ISplitEvaluator } from '../types'; -import { ISplitCondition, MaybeThenable } from '../../dtos/types'; +import { IEvaluator, IMatcherDto, IDefinitionEvaluator } from '../types'; +import { IDefinitionCondition, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ENGINE_MATCHER_ERROR, ENGINE_MATCHER_RESULT } from '../../logger/constants'; -export function parser(log: ILogger, conditions: ISplitCondition[], storage: IStorageSync | IStorageAsync): IEvaluator { +export function parser(log: ILogger, conditions: IDefinitionCondition[], storage: IStorageSync | IStorageAsync): IEvaluator { let predicates = []; for (let i = 0; i < conditions.length; i++) { @@ -37,7 +37,7 @@ export function parser(log: ILogger, conditions: ISplitCondition[], storage: ISt } // Evaluator function. - return (key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => { + return (key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => { const value = sanitizeValue(log, key, matcherDto, attributes); let result: MaybeThenable = false; diff --git a/src/evaluator/treatments/index.ts b/src/evaluator/treatments/index.ts index 35f7da28..d9812aa7 100644 --- a/src/evaluator/treatments/index.ts +++ b/src/evaluator/treatments/index.ts @@ -1,4 +1,4 @@ -import { ISplitPartition } from '../../dtos/types'; +import { IDefinitionPartition } from '../../dtos/types'; import { findIndex } from '../../utils/lang'; export class Treatments { @@ -15,7 +15,7 @@ export class Treatments { this._treatments = treatments; } - static parse(data: ISplitPartition[]) { + static parse(data: IDefinitionPartition[]) { let { ranges, treatments } = data.reduce((accum, value) => { let { size, treatment } = value; diff --git a/src/evaluator/types.ts b/src/evaluator/types.ts index ea976090..d87e2489 100644 --- a/src/evaluator/types.ts +++ b/src/evaluator/types.ts @@ -27,8 +27,8 @@ export interface IEvaluation { export type IEvaluationResult = IEvaluation & { treatment: string; impressionsDisabled?: boolean } -export type ISplitEvaluator = (log: ILogger, key: SplitIO.SplitKey, splitName: string, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync) => MaybeThenable +export type IDefinitionEvaluator = (log: ILogger, key: SplitIO.SplitKey, definitionName: string, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync) => MaybeThenable -export type IEvaluator = (key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => MaybeThenable +export type IEvaluator = (key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => MaybeThenable -export type IMatcher = (value: string | number | boolean | string[] | IDependencyMatcherValue, splitEvaluator?: ISplitEvaluator) => MaybeThenable +export type IMatcher = (value: string | number | boolean | string[] | IDependencyMatcherValue, definitionEvaluator?: IDefinitionEvaluator) => MaybeThenable diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 0a541f95..b7c11d89 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -19,9 +19,8 @@ export const RETRIEVE_CLIENT_DEFAULT = 27; export const RETRIEVE_CLIENT_EXISTING = 28; export const RETRIEVE_MANAGER = 29; export const SYNC_OFFLINE_DATA = 30; -export const SYNC_SPLITS_FETCH = 31; -export const SYNC_SPLITS_UPDATE = 32; -export const SYNC_RBS_UPDATE = 33; +export const SYNC_FETCH = 31; +export const SYNC_UPDATE = 32; export const STREAMING_NEW_MESSAGE = 35; export const SYNC_TASK_START = 36; export const SYNC_TASK_EXECUTE = 37; @@ -38,7 +37,6 @@ export const NEW_FACTORY = 105; export const POLLING_SMART_PAUSING = 106; export const POLLING_START = 107; export const POLLING_STOP = 108; -export const SYNC_SPLITS_FETCH_RETRY = 109; export const STREAMING_REFRESH_TOKEN = 110; export const STREAMING_RECONNECT = 111; export const STREAMING_CONNECTING = 112; @@ -60,8 +58,8 @@ export const ENGINE_VALUE_INVALID = 200; export const ENGINE_VALUE_NO_ATTRIBUTES = 201; export const CLIENT_NO_LISTENER = 202; export const CLIENT_NOT_READY_FROM_CACHE = 203; -export const SYNC_MYSEGMENTS_FETCH_RETRY = 204; -export const SYNC_SPLITS_FETCH_FAILS = 205; +export const SYNC_FETCH_RETRY = 204; +export const SYNC_FETCH_FAILS = 205; export const STREAMING_PARSING_ERROR_FAILS = 206; export const STREAMING_PARSING_MESSAGE_FAILS = 207; export const STREAMING_FALLBACK = 208; @@ -71,7 +69,7 @@ export const WARN_SETTING_NULL = 211; export const WARN_TRIMMING_PROPERTIES = 212; export const WARN_CONVERTING = 213; export const WARN_TRIMMING = 214; -export const WARN_NOT_EXISTENT_SPLIT = 215; +export const WARN_NOT_EXISTENT_DEFINITION = 215; export const WARN_LOWERCASE_TRAFFIC_TYPE = 216; export const WARN_NOT_EXISTENT_TT = 217; export const WARN_INTEGRATION_INVALID = 218; @@ -124,15 +122,12 @@ export const LOG_PREFIX_ENGINE = 'engine'; export const LOG_PREFIX_ENGINE_COMBINER = LOG_PREFIX_ENGINE + ':combiner: '; export const LOG_PREFIX_ENGINE_MATCHER = LOG_PREFIX_ENGINE + ':matcher: '; export const LOG_PREFIX_ENGINE_VALUE = LOG_PREFIX_ENGINE + ':value: '; -export const LOG_PREFIX_SYNC = 'sync'; -export const LOG_PREFIX_SYNC_MANAGER = LOG_PREFIX_SYNC + ':sync-manager: '; -export const LOG_PREFIX_SYNC_OFFLINE = LOG_PREFIX_SYNC + ':offline: '; -export const LOG_PREFIX_SYNC_STREAMING = LOG_PREFIX_SYNC + ':streaming: '; -export const LOG_PREFIX_SYNC_SPLITS = LOG_PREFIX_SYNC + ':featureflag-changes: '; -export const LOG_PREFIX_SYNC_SEGMENTS = LOG_PREFIX_SYNC + ':segment-changes: '; -export const LOG_PREFIX_SYNC_MYSEGMENTS = LOG_PREFIX_SYNC + ':my-segments: '; -export const LOG_PREFIX_SYNC_POLLING = LOG_PREFIX_SYNC + ':polling-manager: '; -export const LOG_PREFIX_SYNC_SUBMITTERS = LOG_PREFIX_SYNC + ':submitter: '; +export const LOG_PREFIX_SYNC = 'sync: '; +export const LOG_PREFIX_SYNC_MANAGER = 'sync:sync-manager: '; +export const LOG_PREFIX_SYNC_OFFLINE = 'sync:offline: '; +export const LOG_PREFIX_SYNC_STREAMING = 'sync:streaming: '; +export const LOG_PREFIX_SYNC_POLLING = 'sync:polling-manager: '; +export const LOG_PREFIX_SYNC_SUBMITTERS = 'sync:submitter: '; export const LOG_PREFIX_IMPRESSIONS_TRACKER = 'impressions-tracker: '; export const LOG_PREFIX_EVENTS_TRACKER = 'events-tracker: '; export const LOG_PREFIX_UNIQUE_KEYS_TRACKER = 'unique-keys-tracker: '; diff --git a/src/logger/messages/debug.ts b/src/logger/messages/debug.ts index c5e67dff..f5110029 100644 --- a/src/logger/messages/debug.ts +++ b/src/logger/messages/debug.ts @@ -21,13 +21,12 @@ export const codesDebug: [number, string][] = codesInfo.concat([ [c.RETRIEVE_MANAGER, 'Retrieving manager instance.'], // synchronizer [c.SYNC_OFFLINE_DATA, c.LOG_PREFIX_SYNC_OFFLINE + 'Feature flags data: \n%s'], - [c.SYNC_SPLITS_FETCH, c.LOG_PREFIX_SYNC_SPLITS + 'Spin up feature flags update using since = %s and rbSince = %s.'], - [c.SYNC_SPLITS_UPDATE, c.LOG_PREFIX_SYNC_SPLITS + 'New feature flags: %s. Removed feature flags: %s.'], - [c.SYNC_RBS_UPDATE, c.LOG_PREFIX_SYNC_SPLITS + 'New rule-based segments: %s. Removed rule-based segments: %s.'], + [c.SYNC_FETCH, c.LOG_PREFIX_SYNC + 'Spin up %s updates using since = %s and rbSince = %s.'], + [c.SYNC_UPDATE, c.LOG_PREFIX_SYNC + 'New %s: %s. Removed: %s.'], [c.STREAMING_NEW_MESSAGE, c.LOG_PREFIX_SYNC_STREAMING + 'New SSE message received, with data: %s.'], - [c.SYNC_TASK_START, c.LOG_PREFIX_SYNC + ': Starting %s. Running each %s millis'], - [c.SYNC_TASK_EXECUTE, c.LOG_PREFIX_SYNC + ': Running %s'], - [c.SYNC_TASK_STOP, c.LOG_PREFIX_SYNC + ': Stopping %s'], + [c.SYNC_TASK_START, c.LOG_PREFIX_SYNC + 'Starting %s. Running each %s millis'], + [c.SYNC_TASK_EXECUTE, c.LOG_PREFIX_SYNC + 'Running %s'], + [c.SYNC_TASK_STOP, c.LOG_PREFIX_SYNC + 'Stopping %s'], // initialization / settings validation [c.SETTINGS_SPLITS_FILTER, c.LOG_PREFIX_SETTINGS + ': feature flags filtering criteria is "%s".'] ]); diff --git a/src/logger/messages/info.ts b/src/logger/messages/info.ts index f8e230ac..fe75132d 100644 --- a/src/logger/messages/info.ts +++ b/src/logger/messages/info.ts @@ -21,7 +21,6 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.POLLING_SMART_PAUSING, c.LOG_PREFIX_SYNC_POLLING + 'Turning segments data polling %s.'], [c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'], [c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'], - [c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying fetch of feature flags (attempt #%s). Reason: %s'], [c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'], [c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'], [c.SUBMITTERS_PUSH_PAGE_HIDDEN, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing %s because page became hidden.'], diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index 4bd74dd6..16fe06ca 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -6,24 +6,24 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.ENGINE_VALUE_INVALID, c.LOG_PREFIX_ENGINE_VALUE + 'Value %s doesn\'t match with expected type.'], [c.ENGINE_VALUE_NO_ATTRIBUTES, c.LOG_PREFIX_ENGINE_VALUE + 'Defined attribute `%s`. No attributes received.'], // synchronizer - [c.SYNC_MYSEGMENTS_FETCH_RETRY, c.LOG_PREFIX_SYNC_MYSEGMENTS + 'Retrying fetch of memberships (attempt #%s). Reason: %s'], - [c.SYNC_SPLITS_FETCH_FAILS, c.LOG_PREFIX_SYNC_SPLITS + 'Error while doing fetch of feature flags. %s'], + [c.SYNC_FETCH_RETRY, c.LOG_PREFIX_SYNC + 'Retrying fetch of %s (attempt #%s). Reason: %s'], + [c.SYNC_FETCH_FAILS, c.LOG_PREFIX_SYNC + 'Error while doing fetch of %s. %s'], [c.STREAMING_PARSING_ERROR_FAILS, c.LOG_PREFIX_SYNC_STREAMING + 'Error parsing SSE error notification: %s'], [c.STREAMING_PARSING_MESSAGE_FAILS, c.LOG_PREFIX_SYNC_STREAMING + 'Error parsing SSE message notification: %s'], [c.STREAMING_FALLBACK, c.LOG_PREFIX_SYNC_STREAMING + 'Falling back to polling mode. Reason: %s'], [c.SUBMITTERS_PUSH_FAILS, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Dropping %s after retry. Reason: %s.'], [c.SUBMITTERS_PUSH_RETRY, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Failed to push %s, keeping data to retry on next iteration. Reason: %s.'], // client status - [c.CLIENT_NOT_READY_FROM_CACHE, '%s: the SDK is not ready to evaluate. Results may be incorrect%s. Make sure to wait for SDK readiness before using this method.'], + [c.CLIENT_NOT_READY_FROM_CACHE, '%s: the SDK is not ready to evaluate. Results may be incorrect. Make sure to wait for SDK readiness before using this method.'], [c.CLIENT_NO_LISTENER, 'No listeners for SDK_READY event detected. Incorrect control treatments could have been logged if you called getTreatment/s while the SDK was not yet synchronized with the backend.'], // input validation [c.WARN_SETTING_NULL, '%s: Property "%s" is of invalid type. Setting value to null.'], [c.WARN_TRIMMING_PROPERTIES, '%s: more than 300 properties were provided. Some of them will be trimmed when processed.'], [c.WARN_CONVERTING, '%s: %s "%s" is not of type string, converting.'], [c.WARN_TRIMMING, '%s: %s "%s" has extra whitespace, trimming.'], - [c.WARN_NOT_EXISTENT_SPLIT, '%s: feature flag "%s" does not exist in this environment. Please double check what feature flags exist in the Split user interface.'], + [c.WARN_NOT_EXISTENT_DEFINITION, '%s: "%s" does not exist in this environment. Please double check that it exists in Harness UI.'], [c.WARN_LOWERCASE_TRAFFIC_TYPE, '%s: traffic_type_name should be all lowercase - converting string to lowercase.'], - [c.WARN_NOT_EXISTENT_TT, '%s: traffic type "%s" does not have any corresponding feature flag in this environment, make sure you\'re tracking your events to a valid traffic type defined in the Split user interface.'], + [c.WARN_NOT_EXISTENT_TT, '%s: traffic type "%s" does not have any corresponding feature flag in this environment, make sure you\'re tracking your events to a valid traffic type defined in Harness UI.'], [c.WARN_FLAGSET_NOT_CONFIGURED, '%s: you passed %s which is not part of the configured FlagSetsFilter, ignoring Flag Set.'], // initialization / settings validation [c.WARN_INTEGRATION_INVALID, c.LOG_PREFIX_SETTINGS + ': %s integration item(s) at settings is invalid. %s'], @@ -36,5 +36,5 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing %s notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], - [c.WARN_FLAGSET_WITHOUT_FLAGS, '%s: you passed %s flag set that does not contain cached feature flag names. Please double check what flag sets are in use in the Split user interface.'], + [c.WARN_FLAGSET_WITHOUT_FLAGS, '%s: you passed %s flag set that does not contain cached feature flag names. Please double check what flag sets are in use in Harness UI.'], ]); diff --git a/src/sdkClient/client.ts b/src/sdkClient/client.ts index 19f0c7dd..879b3a36 100644 --- a/src/sdkClient/client.ts +++ b/src/sdkClient/client.ts @@ -1,7 +1,7 @@ import { evaluateFeature, evaluateFeatures, evaluateFeaturesByFlagSets } from '../evaluator'; import { thenable } from '../utils/promise/thenable'; import { getMatching, getBucketing } from '../utils/key'; -import { validateSplitExistence } from '../utils/inputValidation/splitExistence'; +import { validateDefinitionExistence } from '../utils/inputValidation/definitionExistence'; import { validateTrafficTypeExistence } from '../utils/inputValidation/trafficTypeExistence'; import { SDK_NOT_READY } from '../utils/labels'; import { CONTROL, TREATMENT, TREATMENTS, TREATMENT_WITH_CONFIG, TREATMENTS_WITH_CONFIG, TRACK, TREATMENTS_WITH_CONFIG_BY_FLAGSETS, TREATMENTS_BY_FLAGSETS, TREATMENTS_BY_FLAGSET, TREATMENTS_WITH_CONFIG_BY_FLAGSET, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENT_WITH_CONFIG, GET_TREATMENT, GET_TREATMENTS, TRACK_FN_LABEL } from '../utils/constants'; @@ -153,7 +153,7 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl config = fallbackTreatment.config; } - if (validateSplitExistence(log, readinessManager, featureFlagName, label, invokingMethodName)) { + if (validateDefinitionExistence(log, readinessManager, featureFlagName, label, invokingMethodName)) { log.info(IMPRESSION_QUEUEING, [featureFlagName, matchingKey, treatment, label]); queue.push({ imp: { diff --git a/src/sdkClient/clientInputValidation.ts b/src/sdkClient/clientInputValidation.ts index e8db5b0b..e2f88a32 100644 --- a/src/sdkClient/clientInputValidation.ts +++ b/src/sdkClient/clientInputValidation.ts @@ -46,7 +46,7 @@ export function clientInputValidationDecorator cond.conditionType === 'ROLLOUT'); @@ -20,7 +20,7 @@ function collectTreatments(splitObject: ISplit) { return allTreatmentsCondition ? allTreatmentsCondition.partitions!.map(v => v.treatment) : []; } -function objectToView(splitObject: ISplit | null): SplitIO.SplitView | null { +function objectToView(splitObject: IDefinition | null): SplitIO.SplitView | null { if (!splitObject) return null; return { @@ -37,7 +37,7 @@ function objectToView(splitObject: ISplit | null): SplitIO.SplitView | null { }; } -function objectsToViews(splitObjects: ISplit[]) { +function objectsToViews(splitObjects: IDefinition[]) { let views: SplitIO.SplitView[] = []; splitObjects.forEach(split => { @@ -74,12 +74,12 @@ export function sdkManagerFactory null).then(result => { // handle possible rejections when using pluggable storage - validateSplitExistence(log, readinessManager, splitName, result, SPLIT_FN_LABEL); + validateDefinitionExistence(log, readinessManager, splitName, result, SPLIT_FN_LABEL); return objectToView(result); }); } - validateSplitExistence(log, readinessManager, splitName, split, SPLIT_FN_LABEL); + validateDefinitionExistence(log, readinessManager, splitName, split, SPLIT_FN_LABEL); return objectToView(split); }, diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 420b9202..407a35b4 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -1,5 +1,5 @@ import { ISplitsCacheAsync } from './types'; -import { ISplit } from '../dtos/types'; +import { IDefinition } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; /** @@ -8,11 +8,11 @@ import { objectAssign } from '../utils/lang/objectAssign'; */ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { - protected abstract addSplit(split: ISplit): Promise + protected abstract addSplit(split: IDefinition): Promise protected abstract removeSplit(name: string): Promise protected abstract setChangeNumber(changeNumber: number): Promise - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): Promise { + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): Promise { return Promise.all([ this.setChangeNumber(changeNumber), Promise.all(toAdd.map(addedFF => this.addSplit(addedFF))), @@ -22,10 +22,10 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { }); } - abstract getSplit(name: string): Promise - abstract getSplits(names: string[]): Promise> + abstract getSplit(name: string): Promise + abstract getSplits(names: string[]): Promise> abstract getChangeNumber(): Promise - abstract getAll(): Promise + abstract getAll(): Promise abstract getSplitNames(): Promise abstract getNamesByFlagSets(flagSets: string[]): Promise[]> abstract trafficTypeExists(trafficType: string): Promise diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index 64194561..8c748dcc 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -1,5 +1,5 @@ import { ISplitsCacheSync, IStorageSync } from './types'; -import { IRBSegment, ISplit } from '../dtos/types'; +import { IRBSegment, IDefinition } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; @@ -9,21 +9,21 @@ import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; */ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { - protected abstract addSplit(split: ISplit): boolean + protected abstract addSplit(split: IDefinition): boolean protected abstract removeSplit(name: string): boolean protected abstract setChangeNumber(changeNumber: number): boolean | void - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): boolean { + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): boolean { let updated = toAdd.map(addedFF => this.addSplit(addedFF)).some(result => result); updated = toRemove.map(removedFF => this.removeSplit(removedFF.name)).some(result => result) || updated; this.setChangeNumber(changeNumber); return updated; } - abstract getSplit(name: string): ISplit | null + abstract getSplit(name: string): IDefinition | null - getSplits(names: string[]): Record { - const splits: Record = {}; + getSplits(names: string[]): Record { + const splits: Record = {}; names.forEach(name => { splits[name] = this.getSplit(name); }); @@ -32,8 +32,8 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { abstract getChangeNumber(): number - getAll(): ISplit[] { - return this.getSplitNames().map(key => this.getSplit(key) as ISplit); + getAll(): IDefinition[] { + return this.getSplitNames().map(key => this.getSplit(key) as IDefinition); } abstract getSplitNames(): string[] @@ -73,7 +73,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { * Given a parsed split, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). * This util is intended to simplify the implementation of `splitsCache::usesSegments` method */ -export function usesSegments(ruleEntity: ISplit | IRBSegment) { +export function usesSegments(ruleEntity: IDefinition | IRBSegment) { const conditions = ruleEntity.conditions || []; for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 3f1de562..3a0c93ab 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -2,7 +2,7 @@ import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; import { validateRolloutPlan, setRolloutPlan } from '../setRolloutPlan'; import { getRolloutPlan } from '../getRolloutPlan'; @@ -50,7 +50,7 @@ describe('validateRolloutPlan', () => { describe('getRolloutPlan & setRolloutPlan (client-side)', () => { // @ts-expect-error Load server-side storage const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); - serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); + serverStorage.splits.update([{ name: 'split1' } as IDefinition], [], 123); serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); diff --git a/src/storages/__tests__/testUtils.ts b/src/storages/__tests__/testUtils.ts index b2ae79dc..39db48f8 100644 --- a/src/storages/__tests__/testUtils.ts +++ b/src/storages/__tests__/testUtils.ts @@ -1,4 +1,4 @@ -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; import { IStorageSync, IStorageAsync, IImpressionsCacheSync, IEventsCacheSync } from '../types'; // Assert that instances created by storage factories have the expected interface @@ -23,26 +23,26 @@ export function assertSyncRecorderCacheInterface(cache: IEventsCacheSync | IImpr // Split mocks -export const ALWAYS_ON_SPLIT: ISplit = { 'trafficTypeName': 'user', 'name': 'always-on', 'trafficAllocation': 100, 'trafficAllocationSeed': 1012950810, 'seed': -725161385, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'changeNumber': 1494364996459, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }], 'sets': [] }; -export const ALWAYS_OFF_SPLIT: ISplit = { 'trafficTypeName': 'user', 'name': 'always-off', 'trafficAllocation': 100, 'trafficAllocationSeed': -331690370, 'seed': 403891040, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'on', 'changeNumber': 1494365020316, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 0 }, { 'treatment': 'off', 'size': 100 }], 'label': 'in segment all' }], 'sets': [] }; //@ts-ignore -export const splitWithUserTT: ISplit = { name: 'user_ff', trafficTypeName: 'user_tt', conditions: [] }; //@ts-ignore -export const splitWithAccountTT: ISplit = { name: 'account_ff', trafficTypeName: 'account_tt', conditions: [] }; //@ts-ignore -export const splitWithAccountTTAndUsesSegments: ISplit = { trafficTypeName: 'account_tt', conditions: [{ matcherGroup: { matchers: [{ matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: { segmentName: 'employees' } }] } }] }; //@ts-ignore -export const something: ISplit = { name: 'something' }; //@ts-ignore -export const somethingElse: ISplit = { name: 'something else' }; +export const ALWAYS_ON_SPLIT: IDefinition = { 'trafficTypeName': 'user', 'name': 'always-on', 'trafficAllocation': 100, 'trafficAllocationSeed': 1012950810, 'seed': -725161385, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'changeNumber': 1494364996459, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }], 'sets': [] }; +export const ALWAYS_OFF_SPLIT: IDefinition = { 'trafficTypeName': 'user', 'name': 'always-off', 'trafficAllocation': 100, 'trafficAllocationSeed': -331690370, 'seed': 403891040, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'on', 'changeNumber': 1494365020316, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 0 }, { 'treatment': 'off', 'size': 100 }], 'label': 'in segment all' }], 'sets': [] }; //@ts-ignore +export const splitWithUserTT: IDefinition = { name: 'user_ff', trafficTypeName: 'user_tt', conditions: [] }; //@ts-ignore +export const splitWithAccountTT: IDefinition = { name: 'account_ff', trafficTypeName: 'account_tt', conditions: [] }; //@ts-ignore +export const splitWithAccountTTAndUsesSegments: IDefinition = { trafficTypeName: 'account_tt', conditions: [{ matcherGroup: { matchers: [{ matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: { segmentName: 'employees' } }] } }] }; //@ts-ignore +export const something: IDefinition = { name: 'something' }; //@ts-ignore +export const somethingElse: IDefinition = { name: 'something else' }; // - With flag sets //@ts-ignore -export const featureFlagWithEmptyFS: ISplit = { name: 'ff_empty', sets: [] }; +export const featureFlagWithEmptyFS: IDefinition = { name: 'ff_empty', sets: [] }; //@ts-ignore -export const featureFlagOne: ISplit = { name: 'ff_one', sets: ['o', 'n', 'e'] }; +export const featureFlagOne: IDefinition = { name: 'ff_one', sets: ['o', 'n', 'e'] }; //@ts-ignore -export const featureFlagTwo: ISplit = { name: 'ff_two', sets: ['t', 'w', 'o'] }; +export const featureFlagTwo: IDefinition = { name: 'ff_two', sets: ['t', 'w', 'o'] }; //@ts-ignore -export const featureFlagThree: ISplit = { name: 'ff_three', sets: ['t', 'h', 'r', 'e'] }; +export const featureFlagThree: IDefinition = { name: 'ff_three', sets: ['t', 'h', 'r', 'e'] }; //@ts-ignore -export const featureFlagWithoutFS: ISplit = { name: 'ff_four' }; +export const featureFlagWithoutFS: IDefinition = { name: 'ff_four' }; // Rule-based segments //@ts-ignore diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index 30945684..a1315de3 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -1,4 +1,4 @@ -import { ISplit } from '../../dtos/types'; +import { IDefinition } from '../../dtos/types'; import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderCS } from '../KeyBuilderCS'; @@ -30,7 +30,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { else this.storage.removeItem(key); } - private _decrementCounts(split: ISplit) { + private _decrementCounts(split: IDefinition) { try { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); this._decrementCount(ttKey); @@ -44,7 +44,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { } } - private _incrementCounts(split: ISplit) { + private _incrementCounts(split: IDefinition) { try { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); this.storage.setItem(ttKey, (toNumber(this.storage.getItem(ttKey)) + 1) + ''); @@ -79,7 +79,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { this.hasSync = false; } - addSplit(split: ISplit) { + addSplit(split: IDefinition) { const name = split.name; const splitKey = this.keys.buildSplitKey(name); const splitFromStorage = this.storage.getItem(splitKey); @@ -110,7 +110,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { return true; } - getSplit(name: string): ISplit | null { + getSplit(name: string): IDefinition | null { const item = this.storage.getItem(this.keys.buildSplitKey(name)); return item && JSON.parse(item); } @@ -184,7 +184,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { }); } - private addToFlagSets(featureFlag: ISplit) { + private addToFlagSets(featureFlag: IDefinition) { if (!featureFlag.sets) return; featureFlag.sets.forEach(featureFlagSet => { diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 976baa72..c8c79c5e 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -1,7 +1,7 @@ import { SplitsCacheInLocal } from '../SplitsCacheInLocal'; import { KeyBuilderCS } from '../../KeyBuilderCS'; import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, something, somethingElse, featureFlagOne, featureFlagTwo, featureFlagThree, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; import { storages, PREFIX } from './wrapper.mock'; @@ -115,7 +115,7 @@ describe.each(storages)('SPLITS CACHE', (storage) => { // kill an existent split updated = cache.killLocally(something.name, 'some_treatment', 100); - let lol1Split = cache.getSplit(something.name) as ISplit; + let lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -125,7 +125,7 @@ describe.each(storages)('SPLITS CACHE', (storage) => { // not update if changeNumber is old updated = cache.killLocally(something.name, 'some_treatment_2', 90); - lol1Split = cache.getSplit(something.name) as ISplit; + lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index 461d15e6..7b70ccc6 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -1,4 +1,4 @@ -import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber } from '../../utils/lang'; @@ -8,7 +8,7 @@ import { isFiniteNumber } from '../../utils/lang'; export class SplitsCacheInMemory extends AbstractSplitsCacheSync { private flagSetsFilter: string[]; - private splitsCache: Record = {}; + private splitsCache: Record = {}; private ttCache: Record = {}; private changeNumber: number = -1; private segmentsCount: number = 0; @@ -27,7 +27,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.flagSetsCache = {}; } - addSplit(split: ISplit): boolean { + addSplit(split: IDefinition): boolean { const name = split.name; const previousSplit = this.getSplit(name); if (previousSplit) { // We had this Split already @@ -73,7 +73,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return true; } - getSplit(name: string): ISplit | null { + getSplit(name: string): IDefinition | null { return this.splitsCache[name] || null; } @@ -102,7 +102,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new Set()); } - private addToFlagSets(featureFlag: ISplit) { + private addToFlagSets(featureFlag: IDefinition) { if (!featureFlag.sets) return; featureFlag.sets.forEach(featureFlagSet => { diff --git a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts index 2ed4478b..f755a295 100644 --- a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts @@ -1,5 +1,5 @@ import { SplitsCacheInMemory } from '../SplitsCacheInMemory'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { splitWithUserTT, splitWithAccountTT, something, somethingElse, featureFlagWithEmptyFS, featureFlagWithoutFS, featureFlagOne, featureFlagTwo, featureFlagThree } from '../../__tests__/testUtils'; test('SPLITS CACHE / In Memory', () => { @@ -106,7 +106,7 @@ test('SPLITS CACHE / In Memory / killLocally', () => { // kill an existent split updated = cache.killLocally(something.name, 'some_treatment', 100); - let lol1Split = cache.getSplit(something.name) as ISplit; + let lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -116,7 +116,7 @@ test('SPLITS CACHE / In Memory / killLocally', () => { // not update if changeNumber is old updated = cache.killLocally(something.name, 'some_treatment_2', 90); - lol1Split = cache.getSplit(something.name) as ISplit; + lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/inRedis/SplitsCacheInRedis.ts b/src/storages/inRedis/SplitsCacheInRedis.ts index 7258a770..539cdc80 100644 --- a/src/storages/inRedis/SplitsCacheInRedis.ts +++ b/src/storages/inRedis/SplitsCacheInRedis.ts @@ -2,7 +2,7 @@ import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderSS } from '../KeyBuilderSS'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; -import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; import { returnDifference } from '../../utils/lang/sets'; import type { RedisAdapter } from './RedisAdapter'; @@ -47,14 +47,14 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { }); } - private _decrementCounts(split: ISplit) { + private _decrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.redis.decr(ttKey).then((count: number) => { if (count === 0) return this.redis.del(ttKey); }); } - private _incrementCounts(split: ISplit) { + private _incrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.redis.incr(ttKey); } @@ -82,13 +82,13 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * The returned promise is resolved when the operation success * or rejected if it fails (e.g., redis operation fails) */ - addSplit(split: ISplit): Promise { + addSplit(split: IDefinition): Promise { const name = split.name; const splitKey = this.keys.buildSplitKey(name); return this.redis.get(splitKey).then((splitFromStorage: string | null) => { // handling parsing error - let parsedPreviousSplit: ISplit, stringifiedNewSplit; + let parsedPreviousSplit: IDefinition, stringifiedNewSplit; try { parsedPreviousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : undefined; stringifiedNewSplit = JSON.stringify(split); @@ -127,7 +127,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * Get split definition or null if it's not defined. * Returned promise is rejected if redis operation fails. */ - getSplit(name: string): Promise { + getSplit(name: string): Promise { if (this.redisError) { this.log.error(LOG_PREFIX + this.redisError); @@ -171,7 +171,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * or rejected if redis operation fails. */ // @TODO we need to benchmark which is the maximun number of commands we could pipeline without kill redis performance. - getAll(): Promise { + getAll(): Promise { return this.redis.keys(this.keys.searchPatternForSplitKeys()) .then((listOfKeys: string[]) => this.redis.pipeline(listOfKeys.map((k: string) => ['get', k])).exec()) .then(processPipelineAnswer) @@ -242,7 +242,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * Fetches multiple splits definitions. * Returned promise is rejected if redis operation fails. */ - getSplits(names: string[]): Promise> { + getSplits(names: string[]): Promise> { if (this.redisError) { this.log.error(LOG_PREFIX + this.redisError); @@ -252,7 +252,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { const keys = names.map(name => this.keys.buildSplitKey(name)); return this.redis.mget(...keys) .then((splitDefinitions: (string | null)[]) => { - const splits: Record = {}; + const splits: Record = {}; names.forEach((name, idx) => { const split = splitDefinitions[idx]; splits[name] = split && JSON.parse(split); diff --git a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts index 0cbc8914..d42143ff 100644 --- a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts @@ -2,7 +2,7 @@ import { SplitsCacheInRedis } from '../SplitsCacheInRedis'; import { KeyBuilderSS } from '../../KeyBuilderSS'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { metadata } from '../../__tests__/KeyBuilder.spec'; import { RedisAdapter } from '../RedisAdapter'; @@ -122,7 +122,7 @@ describe('SPLITS CACHE REDIS', () => { // kill an existent split updated = await cache.killLocally('user_ff', 'some_treatment', 100); - let lol1Split = await cache.getSplit('user_ff') as ISplit; + let lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -132,7 +132,7 @@ describe('SPLITS CACHE REDIS', () => { // not update if changeNumber is old updated = await cache.killLocally('user_ff', 'some_treatment_2', 90); - lol1Split = await cache.getSplit('user_ff') as ISplit; + lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/pluggable/SplitsCachePluggable.ts b/src/storages/pluggable/SplitsCachePluggable.ts index 9b53f3a9..48eba081 100644 --- a/src/storages/pluggable/SplitsCachePluggable.ts +++ b/src/storages/pluggable/SplitsCachePluggable.ts @@ -2,7 +2,7 @@ import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilder } from '../KeyBuilder'; import { IPluggableStorageWrapper } from '../types'; import { ILogger } from '../../logger/types'; -import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; import { LOG_PREFIX } from './constants'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; import { returnDifference } from '../../utils/lang/sets'; @@ -31,14 +31,14 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { this.flagSetsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; } - private _decrementCounts(split: ISplit) { + private _decrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.wrapper.decr(ttKey).then(count => { if (count === 0) return this.wrapper.del(ttKey); }); } - private _incrementCounts(split: ISplit) { + private _incrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.wrapper.incr(ttKey); } @@ -66,13 +66,13 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved when the operation success * or rejected if it fails (e.g., wrapper operation fails) */ - addSplit(split: ISplit): Promise { + addSplit(split: IDefinition): Promise { const name = split.name; const splitKey = this.keys.buildSplitKey(name); return this.wrapper.get(splitKey).then(splitFromStorage => { // handling parsing error - let parsedPreviousSplit: ISplit, stringifiedNewSplit; + let parsedPreviousSplit: IDefinition, stringifiedNewSplit; try { parsedPreviousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : undefined; stringifiedNewSplit = JSON.stringify(split); @@ -112,7 +112,7 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with the split definition or null if it's not defined, * or rejected if wrapper operation fails. */ - getSplit(name: string): Promise { + getSplit(name: string): Promise { return this.wrapper.get(this.keys.buildSplitKey(name)) .then(maybeSplit => maybeSplit && JSON.parse(maybeSplit)); } @@ -122,11 +122,11 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with a map of split names to their split definition or null if it's not defined, * or rejected if wrapper operation fails. */ - getSplits(names: string[]): Promise> { + getSplits(names: string[]): Promise> { const keys = names.map(name => this.keys.buildSplitKey(name)); return this.wrapper.getMany(keys).then(splitDefinitions => { - const splits: Record = {}; + const splits: Record = {}; names.forEach((name, idx) => { const split = splitDefinitions[idx]; splits[name] = split && JSON.parse(split); @@ -140,7 +140,7 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with the list of split definitions, * or rejected if wrapper operation fails. */ - getAll(): Promise { + getAll(): Promise { return this.wrapper.getKeysByPrefix(this.keys.buildSplitKeyPrefix()) .then((listOfKeys) => this.wrapper.getMany(listOfKeys)) .then((splitDefinitions) => splitDefinitions.map((splitDefinition) => { diff --git a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts index 03d1ee6e..8f9c966c 100644 --- a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts @@ -3,7 +3,7 @@ import { KeyBuilder } from '../../KeyBuilder'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { wrapperMockFactory } from './wrapper.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; const keysBuilder = new KeyBuilder(); @@ -20,7 +20,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // Assert getSplits let valuesObj = await cache.getSplits([splitWithUserTT.name, splitWithAccountTT.name]); - expect(valuesObj).toEqual(values.reduce>((acc, split) => { + expect(valuesObj).toEqual(values.reduce>((acc, split) => { acc[split.name] = split; return acc; }, {})); @@ -114,7 +114,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // kill an existent split updated = await cache.killLocally('user_ff', 'some_treatment', 100); - let lol1Split = await cache.getSplit('user_ff') as ISplit; + let lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -124,7 +124,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // not update if changeNumber is old updated = await cache.killLocally('user_ff', 'some_treatment_2', 90); - lol1Split = await cache.getSplit('user_ff') as ISplit; + lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/types.ts b/src/storages/types.ts index fea0cc2b..222106d7 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; +import { MaybeThenable, IDefinition, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, IDefinitionChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -194,12 +194,12 @@ export interface IPluggableStorageWrapper { /** Splits cache */ export interface ISplitsCacheBase { - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): MaybeThenable, - getSplit(name: string): MaybeThenable, - getSplits(names: string[]): MaybeThenable>, // `fetchMany` in spec + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): MaybeThenable, + getSplit(name: string): MaybeThenable, + getSplits(names: string[]): MaybeThenable>, // `fetchMany` in spec // should never reject or throw an exception. Instead return -1 by default, assuming no splits are present in the storage. getChangeNumber(): MaybeThenable, - getAll(): MaybeThenable, + getAll(): MaybeThenable, getSplitNames(): MaybeThenable, // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, @@ -211,11 +211,11 @@ export interface ISplitsCacheBase { } export interface ISplitsCacheSync extends ISplitsCacheBase { - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): boolean, - getSplit(name: string): ISplit | null, - getSplits(names: string[]): Record, + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): boolean, + getSplit(name: string): IDefinition | null, + getSplits(names: string[]): Record, getChangeNumber(): number, - getAll(): ISplit[], + getAll(): IDefinition[], getSplitNames(): string[], trafficTypeExists(trafficType: string): boolean, usesSegments(): boolean, @@ -225,11 +225,11 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { } export interface ISplitsCacheAsync extends ISplitsCacheBase { - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): Promise, - getSplit(name: string): Promise, - getSplits(names: string[]): Promise>, + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): Promise, + getSplit(name: string): Promise, + getSplits(names: string[]): Promise>, getChangeNumber(): Promise, - getAll(): Promise, + getAll(): Promise, getSplitNames(): Promise, trafficTypeExists(trafficType: string): Promise, usesSegments(): Promise, @@ -544,7 +544,7 @@ export type RolloutPlan = { /** * Feature flags and rule-based segments. */ - splitChanges: ISplitChangesResponse; + splitChanges: IDefinitionChangesResponse; /** * Optional map of matching keys to their memberships. */ diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index c78b9215..902a0d93 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -10,6 +10,11 @@ jest.mock('../submitters/submitterManager', () => { }; }); +// Mocked splitApi +const splitApiMock = { + fetchSplitChanges: jest.fn() +} as any; + // Mocked storageManager const storageManagerMock = { splits: { @@ -49,6 +54,7 @@ test('syncManagerOnline should start or not the submitter depending on user cons const syncManager = syncManagerOnlineFactory()({ settings, // @ts-ignore storage: {}, + splitApi: splitApiMock, }); const submitterManager = syncManager.submitterManager!; @@ -101,6 +107,7 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy const syncManager = syncManagerOnlineFactory(() => pollingManagerMock, pushManagerFactoryMock)({ settings, // @ts-ignore storage: { validateCache: () => { return Promise.resolve({ initialCacheLoad: true, lastUpdateTimestamp: undefined }); } }, + splitApi: splitApiMock, }); expect(pushManagerFactoryMock).not.toBeCalled(); @@ -170,6 +177,7 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy const testSyncManager = syncManagerOnlineFactory(() => pollingManagerMock, pushManagerFactoryMock)({ settings, // @ts-ignore storage: { validateCache: () => Promise.resolve({ initialCacheLoad: true, lastUpdateTimestamp: undefined }) }, + splitApi: splitApiMock, }); expect(pushManagerFactoryMock).toBeCalled(); @@ -188,7 +196,8 @@ test('syncManagerOnline should emit SDK_SPLITS_CACHE_LOADED if validateCache ret const params = { settings: fullSettings, storage: { validateCache: () => Promise.resolve({ initialCacheLoad: false, lastUpdateTimestamp }) }, - readiness: { splits: { emit: jest.fn() } } + readiness: { splits: { emit: jest.fn() } }, + splitApi: splitApiMock, }; // @ts-ignore const syncManager = syncManagerOnlineFactory()(params); diff --git a/src/sync/offline/splitsParser/parseCondition.ts b/src/sync/offline/splitsParser/parseCondition.ts index a2223bec..ffe0863d 100644 --- a/src/sync/offline/splitsParser/parseCondition.ts +++ b/src/sync/offline/splitsParser/parseCondition.ts @@ -1,4 +1,4 @@ -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { isString } from '../../../utils/lang'; export interface IMockSplitEntry { @@ -7,7 +7,7 @@ export interface IMockSplitEntry { config?: string } -export function parseCondition(data: IMockSplitEntry): ISplitCondition { +export function parseCondition(data: IMockSplitEntry): IDefinitionCondition { const treatment = data.treatment; if (data.keys) { diff --git a/src/sync/offline/splitsParser/splitsParserFromSettings.ts b/src/sync/offline/splitsParser/splitsParserFromSettings.ts index f242b26c..696a5ccb 100644 --- a/src/sync/offline/splitsParser/splitsParserFromSettings.ts +++ b/src/sync/offline/splitsParser/splitsParserFromSettings.ts @@ -1,4 +1,4 @@ -import { ISplitPartial } from '../../../dtos/types'; +import { IDefinitionPartial } from './types'; import SplitIO from '../../../../types/splitio'; import { isObject, forOwn, merge } from '../../../utils/lang'; import { parseCondition } from './parseCondition'; @@ -41,12 +41,12 @@ export function splitsParserFromSettingsFactory() { * * @param settings - validated object with mocked features mapping. */ - return function splitsParserFromSettings(settings: Pick): false | Record { + return function splitsParserFromSettings(settings: Pick): false | Record { const features = settings.features as SplitIO.MockedFeaturesMap || {}; if (!mockUpdated(features)) return false; - const splitObjects: Record = {}; + const splitObjects: Record = {}; forOwn(features, (data, splitName) => { let treatment = data; diff --git a/src/sync/offline/splitsParser/types.ts b/src/sync/offline/splitsParser/types.ts index a5d93647..91fd084c 100644 --- a/src/sync/offline/splitsParser/types.ts +++ b/src/sync/offline/splitsParser/types.ts @@ -1,5 +1,8 @@ -import { ISplitPartial } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { ISettings } from '../../../types'; -// Analog to `ISplitChangesFetcher` used by `splitChangesUpdaterFactory` -export type ISplitsParser = (settings: ISettings) => false | Record +// Split definition used in offline mode +export type IDefinitionPartial = Pick + +// Analog to `IDefinitionChangesFetcher` used by `definitionChangesUpdaterFactory` +export type IDefinitionsParser = (settings: ISettings) => false | Record diff --git a/src/sync/offline/syncManagerOffline.ts b/src/sync/offline/syncManagerOffline.ts index 31ac6dd0..60c6d6e8 100644 --- a/src/sync/offline/syncManagerOffline.ts +++ b/src/sync/offline/syncManagerOffline.ts @@ -1,7 +1,7 @@ import { ISyncManagerCS } from '../types'; import { fromObjectSyncTaskFactory } from './syncTasks/fromObjectSyncTask'; import { objectAssign } from '../../utils/lang/objectAssign'; -import { ISplitsParser } from './splitsParser/types'; +import { IDefinitionsParser } from './splitsParser/types'; import { IReadinessManager } from '../../readiness/types'; import { SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; @@ -17,7 +17,7 @@ function flush() { * @param splitsParser - e.g., `splitsParserFromFile`, `splitsParserFromSettings`. */ export function syncManagerOfflineFactory( - splitsParserFactory: () => ISplitsParser + splitsParserFactory: () => IDefinitionsParser ): (params: ISdkFactoryContextSync) => ISyncManagerCS { /** diff --git a/src/sync/offline/syncTasks/fromObjectSyncTask.ts b/src/sync/offline/syncTasks/fromObjectSyncTask.ts index cc2ffcb7..14f431ea 100644 --- a/src/sync/offline/syncTasks/fromObjectSyncTask.ts +++ b/src/sync/offline/syncTasks/fromObjectSyncTask.ts @@ -1,8 +1,8 @@ import { forOwn } from '../../../utils/lang'; import { IReadinessManager } from '../../../readiness/types'; import { IStorageSync } from '../../../storages/types'; -import { ISplitsParser } from '../splitsParser/types'; -import { ISplit, ISplitPartial } from '../../../dtos/types'; +import { IDefinitionsParser, IDefinitionPartial } from '../splitsParser/types'; +import { IDefinition } from '../../../dtos/types'; import { syncTaskFactory } from '../../syncTask'; import { ISyncTask } from '../../types'; import { ISettings } from '../../../types'; @@ -14,7 +14,7 @@ import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/c * Offline equivalent of `splitChangesUpdaterFactory` */ export function fromObjectUpdaterFactory( - splitsParser: ISplitsParser, + splitsParser: IDefinitionsParser, storage: Pick, readiness: IReadinessManager, settings: ISettings, @@ -24,9 +24,9 @@ export function fromObjectUpdaterFactory( let startingUp = true; return function objectUpdater() { - const splits: ISplit[] = []; + const splits: IDefinition[] = []; let loadError = null; - let splitsMock: false | Record = {}; + let splitsMock: false | Record = {}; try { splitsMock = splitsParser(settings); } catch (err) { @@ -80,7 +80,7 @@ export function fromObjectUpdaterFactory( * PollingManager in Offline mode */ export function fromObjectSyncTaskFactory( - splitsParser: ISplitsParser, + splitsParser: IDefinitionsParser, storage: Pick, readiness: IReadinessManager, settings: ISettings diff --git a/src/sync/polling/fetchers/__tests__/configsFetcher.spec.ts b/src/sync/polling/fetchers/__tests__/configsFetcher.spec.ts index 99c1f0b1..8ea9e469 100644 --- a/src/sync/polling/fetchers/__tests__/configsFetcher.spec.ts +++ b/src/sync/polling/fetchers/__tests__/configsFetcher.spec.ts @@ -1,4 +1,4 @@ -import { ISplitChangesResponse } from '../../../../dtos/types'; +import { IDefinitionChangesResponse } from '../../../../dtos/types'; import { convertConfigsResponseToDefinitionChangesResponse, IConfigsResponse } from '../configsFetcher'; const INPUT: IConfigsResponse = { @@ -12,7 +12,7 @@ const INPUT: IConfigsResponse = { }], }; -const EXPECTED_OUTPUT: ISplitChangesResponse = { +const EXPECTED_OUTPUT: IDefinitionChangesResponse = { ff: { s: 100, t: 200, diff --git a/src/sync/polling/fetchers/configsFetcher.ts b/src/sync/polling/fetchers/configsFetcher.ts index 84b0a57a..0bf281c6 100644 --- a/src/sync/polling/fetchers/configsFetcher.ts +++ b/src/sync/polling/fetchers/configsFetcher.ts @@ -1,7 +1,8 @@ -import { ISplit, ISplitChangesResponse, ISplitCondition, ISplitMatcher } from '../../../dtos/types'; -import { IFetchDefinitionChanges, IResponse } from '../../../services/types'; -import { ISplitChangesFetcher } from './types'; +import { IDefinition, IDefinitionChangesResponse, IDefinitionCondition, IDefinitionMatcher } from '../../../dtos/types'; +import { IResponse } from '../../../services/types'; +import { IDefinitionChangesFetcher } from './types'; import SplitIO from '../../../../types/splitio'; +import { ISdkFactoryContextSync } from '../../../sdkFactory/types'; type IConfigMatcher = { type: 'IS_EQUAL_TO'; @@ -54,16 +55,17 @@ export interface IConfigsResponse { * Factory of Configs fetcher. * Configs fetcher is a wrapper around `configs` API service that parses the response and handle errors. */ -export function configsFetcherFactory(fetchConfigs: IFetchDefinitionChanges): ISplitChangesFetcher { +export function configsFetcherFactory(params: ISdkFactoryContextSync): IDefinitionChangesFetcher { + const fetchConfigs = params.splitApi.fetchConfigs; - return function configsFetcher( + function configsFetcher( since: number, noCache?: boolean, till?: number, rbSince?: number, // Optional decorator for `fetchConfigs` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise - ): Promise { + ): Promise { let configsPromise = fetchConfigs(since, noCache, till, rbSince); if (decorator) configsPromise = decorator(configsPromise); @@ -71,11 +73,14 @@ export function configsFetcherFactory(fetchConfigs: IFetchDefinitionChanges): IS return configsPromise .then((resp: IResponse) => resp.json()) .then(convertConfigsResponseToDefinitionChangesResponse); - }; + } + + configsFetcher.type = 'configs' as const; + return configsFetcher; } -function defaultCondition(treatment: string): ISplitCondition { +function defaultCondition(treatment: string): IDefinitionCondition { return { conditionType: 'ROLLOUT', matcherGroup: { @@ -91,7 +96,7 @@ function defaultCondition(treatment: string): ISplitCondition { }; } -function convertMatcher(matcher: IConfigMatcher): ISplitMatcher { +function convertMatcher(matcher: IConfigMatcher): IDefinitionMatcher { const keySelector = matcher.attribute ? { trafficType: 'user', attribute: matcher.attribute } : null; switch (matcher.type) { @@ -112,13 +117,13 @@ function convertMatcher(matcher: IConfigMatcher): ISplitMatcher { } } -function convertConfigToDefinition(config: IConfig): ISplit { +function convertConfigToDefinition(config: IConfig): IDefinition { const defaultTreatment = config.targeting?.default || config.variants[0]?.name || 'control'; const configurations: Record = {}; config.variants.forEach(variant => configurations[variant.name] = variant.definition); - const conditions: ISplitCondition[] = config.targeting?.conditions?.map(condition => ({ + const conditions: IDefinitionCondition[] = config.targeting?.conditions?.map(condition => ({ conditionType: condition.matchers.some((m: IConfigMatcher) => m.type === 'WHITELIST') ? 'WHITELIST' : 'ROLLOUT', label: condition.label, matcherGroup: { @@ -143,7 +148,7 @@ function convertConfigToDefinition(config: IConfig): ISplit { }; } -export function convertConfigsResponseToDefinitionChangesResponse(configs: IConfigsResponse): ISplitChangesResponse { +export function convertConfigsResponseToDefinitionChangesResponse(configs: IConfigsResponse): IDefinitionChangesResponse { return { ff: { s: configs.since, diff --git a/src/sync/polling/fetchers/splitChangesFetcher.ts b/src/sync/polling/fetchers/splitChangesFetcher.ts index 422beadf..5f677a0a 100644 --- a/src/sync/polling/fetchers/splitChangesFetcher.ts +++ b/src/sync/polling/fetchers/splitChangesFetcher.ts @@ -1,12 +1,12 @@ import { ISettings } from '../../../types'; -import { ISplitChangesResponse } from '../../../dtos/types'; -import { IFetchDefinitionChanges, IResponse } from '../../../services/types'; -import { IStorageBase } from '../../../storages/types'; +import { IDefinitionChangesResponse } from '../../../dtos/types'; +import { IResponse } from '../../../services/types'; import { FLAG_SPEC_VERSION } from '../../../utils/constants'; import { base } from '../../../utils/settingsValidation'; -import { ISplitChangesFetcher } from './types'; -import { LOG_PREFIX_SYNC_SPLITS } from '../../../logger/constants'; +import { IDefinitionChangesFetcher } from './types'; +import { LOG_PREFIX_SYNC } from '../../../logger/constants'; import { checkIfServerSide } from '../../../utils/key'; +import { ISdkFactoryContextSync } from '../../../sdkFactory/types'; const PROXY_CHECK_INTERVAL_MILLIS_CS = 60 * 60 * 1000; // 1 hour in Client Side const PROXY_CHECK_INTERVAL_MILLIS_SS = 24 * PROXY_CHECK_INTERVAL_MILLIS_CS; // 24 hours in Server Side @@ -20,20 +20,21 @@ function sdkEndpointOverridden(settings: ISettings) { * SplitChanges fetcher is a wrapper around `splitChanges` API service that parses the response and handle errors. */ // @TODO breaking: drop support for Split Proxy below v5.10.0 and simplify the implementation -export function splitChangesFetcherFactory(fetchSplitChanges: IFetchDefinitionChanges, settings: ISettings, storage: Pick): ISplitChangesFetcher { +export function splitChangesFetcherFactory(params: ISdkFactoryContextSync): IDefinitionChangesFetcher { + const { splitApi: { fetchSplitChanges }, settings, storage } = params; const log = settings.log; const PROXY_CHECK_INTERVAL_MILLIS = checkIfServerSide(settings) ? PROXY_CHECK_INTERVAL_MILLIS_SS : PROXY_CHECK_INTERVAL_MILLIS_CS; let lastProxyCheckTimestamp: number | undefined; - return function splitChangesFetcher( + function splitChangesFetcher( since: number, noCache?: boolean, till?: number, rbSince?: number, // Optional decorator for `fetchSplitChanges` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise - ): Promise { + ): Promise { // Recheck proxy if (lastProxyCheckTimestamp && (Date.now() - lastProxyCheckTimestamp) > PROXY_CHECK_INTERVAL_MILLIS) { @@ -44,7 +45,7 @@ export function splitChangesFetcherFactory(fetchSplitChanges: IFetchDefinitionCh .catch((err) => { // Handle proxy error with spec 1.3 if ((!err.statusCode || err.statusCode === 400) && sdkEndpointOverridden(settings) && settings.sync.flagSpecVersion === FLAG_SPEC_VERSION) { - log.error(LOG_PREFIX_SYNC_SPLITS + 'Proxy error detected. Retrying with spec 1.2. If you are using Split Proxy, please upgrade to latest version'); + log.error(LOG_PREFIX_SYNC + 'Proxy error detected. Retrying with spec 1.2. If you are using Split Proxy, please upgrade to latest version'); lastProxyCheckTimestamp = Date.now(); settings.sync.flagSpecVersion = '1.2'; // fallback to 1.2 spec return fetchSplitChanges(since, noCache, till); // retry request without rbSince @@ -70,10 +71,10 @@ export function splitChangesFetcherFactory(fetchSplitChanges: IFetchDefinitionCh // Proxy recovery if (lastProxyCheckTimestamp) { - log.info(LOG_PREFIX_SYNC_SPLITS + 'Proxy error recovered'); + log.info(LOG_PREFIX_SYNC + 'Proxy error recovered'); lastProxyCheckTimestamp = undefined; return splitChangesFetcher(-1, undefined, undefined, -1) - .then((splitChangesResponse: ISplitChangesResponse) => + .then((splitChangesResponse: IDefinitionChangesResponse) => Promise.all([storage.splits.clear(), storage.rbSegments.clear()]) .then(() => splitChangesResponse) ); @@ -81,6 +82,8 @@ export function splitChangesFetcherFactory(fetchSplitChanges: IFetchDefinitionCh return data; }); - }; + } + splitChangesFetcher.type = 'feature flags' as const; + return splitChangesFetcher; } diff --git a/src/sync/polling/fetchers/types.ts b/src/sync/polling/fetchers/types.ts index 8fe922ce..86c92f0f 100644 --- a/src/sync/polling/fetchers/types.ts +++ b/src/sync/polling/fetchers/types.ts @@ -1,13 +1,15 @@ -import { ISplitChangesResponse, ISegmentChangesResponse, IMembershipsResponse } from '../../../dtos/types'; +import { IDefinitionChangesResponse, ISegmentChangesResponse, IMembershipsResponse } from '../../../dtos/types'; import { IResponse } from '../../../services/types'; -export type ISplitChangesFetcher = ( +export type IDefinitionChangesFetcher = (( since: number, noCache?: boolean, till?: number, rbSince?: number, decorator?: (promise: Promise) => Promise -) => Promise +) => Promise) & { + type: 'configs' | 'feature flags' +} export type ISegmentChangesFetcher = ( since: number, diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 5c1169d3..05895aac 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -3,26 +3,28 @@ import { forOwn } from '../../utils/lang'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { mySegmentsSyncTaskFactory } from './syncTasks/mySegmentsSyncTask'; -import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; +import { definitionsSyncTaskFactory } from './syncTasks/definitionsSyncTask'; import { getMatching } from '../../utils/key'; import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; import { usesSegmentsSync } from '../../storages/AbstractSplitsCacheSync'; import { SdkUpdateMetadata } from '../../../types/splitio'; +import { IDefinitionChangesFetcher } from './fetchers/types'; /** * Expose start / stop mechanism for polling data from services. * For client-side API with multiple clients. */ export function pollingManagerCSFactory( - params: ISdkFactoryContextSync + params: ISdkFactoryContextSync, + definitionChangesFetcher: IDefinitionChangesFetcher ): IPollingManagerCS { const { splitApi, storage, readiness, settings } = params; const log = settings.log; - const splitsSyncTask = splitsSyncTaskFactory(splitApi.fetchSplitChanges, storage, readiness, settings, true); + const definitionsSyncTask = definitionsSyncTaskFactory(definitionChangesFetcher, storage, readiness, settings, true); // Map of matching keys to their corresponding MySegmentsSyncTask. const mySegmentsSyncTasks: Record = {}; @@ -44,7 +46,7 @@ export function pollingManagerCSFactory( // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { - if (!splitsSyncTask.isRunning()) return; // noop if not doing polling + if (!definitionsSyncTask.isRunning()) return; // noop if not doing polling const usingSegments = usesSegmentsSync(storage); if (usingSegments !== mySegmentsSyncTask.isRunning()) { log.info(POLLING_SMART_PAUSING, [usingSegments ? 'ON' : 'OFF']); @@ -71,14 +73,14 @@ export function pollingManagerCSFactory( } return { - splitsSyncTask, + definitionsSyncTask, segmentsSyncTask: mySegmentsSyncTask, // Start periodic fetching (polling) start() { log.info(POLLING_START); - splitsSyncTask.start(); + definitionsSyncTask.start(); if (usesSegmentsSync(storage)) startMySegmentsSyncTasks(); }, @@ -86,16 +88,16 @@ export function pollingManagerCSFactory( stop() { log.info(POLLING_STOP); - if (splitsSyncTask.isRunning()) splitsSyncTask.stop(); + if (definitionsSyncTask.isRunning()) definitionsSyncTask.stop(); stopMySegmentsSyncTasks(); }, // Used by SyncManager to know if running in polling mode. - isRunning: splitsSyncTask.isRunning, + isRunning: definitionsSyncTask.isRunning, - // fetch splits and segments + // fetch definitions and segments syncAll() { - const promises = [splitsSyncTask.execute()]; + const promises = [definitionsSyncTask.execute()]; forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { promises.push(mySegmentsSyncTask.execute()); }); diff --git a/src/sync/polling/pollingManagerSS.ts b/src/sync/polling/pollingManagerSS.ts index cea57dfe..028162ca 100644 --- a/src/sync/polling/pollingManagerSS.ts +++ b/src/sync/polling/pollingManagerSS.ts @@ -1,36 +1,38 @@ -import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; +import { definitionsSyncTaskFactory } from './syncTasks/definitionsSyncTask'; import { segmentsSyncTaskFactory } from './syncTasks/segmentsSyncTask'; -import { IPollingManager, ISegmentsSyncTask, ISplitsSyncTask } from './types'; +import { IPollingManager, ISegmentsSyncTask, IDefinitionsSyncTask } from './types'; import { POLLING_START, POLLING_STOP, LOG_PREFIX_SYNC_POLLING } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; +import { IDefinitionChangesFetcher } from './fetchers/types'; /** * Expose start / stop mechanism for pulling data from services. */ export function pollingManagerSSFactory( - params: ISdkFactoryContextSync + params: ISdkFactoryContextSync, + definitionChangesFetcher: IDefinitionChangesFetcher ): IPollingManager { const { splitApi, storage, readiness, settings } = params; const log = settings.log; - const splitsSyncTask: ISplitsSyncTask = splitsSyncTaskFactory(splitApi.fetchSplitChanges, storage, readiness, settings); + const definitionsSyncTask: IDefinitionsSyncTask = definitionsSyncTaskFactory(definitionChangesFetcher, storage, readiness, settings); const segmentsSyncTask: ISegmentsSyncTask = segmentsSyncTaskFactory(splitApi.fetchSegmentChanges, storage, readiness, settings); return { - splitsSyncTask, + definitionsSyncTask, segmentsSyncTask, // Start periodic fetching (polling) start() { log.info(POLLING_START); - log.debug(LOG_PREFIX_SYNC_POLLING + `Splits will be refreshed each ${settings.scheduler.featuresRefreshRate} millis`); - log.debug(LOG_PREFIX_SYNC_POLLING + `Segments will be refreshed each ${settings.scheduler.segmentsRefreshRate} millis`); + log.debug(LOG_PREFIX_SYNC_POLLING + `${definitionChangesFetcher.type} will be refreshed each ${settings.scheduler.featuresRefreshRate} millis`); + log.debug(LOG_PREFIX_SYNC_POLLING + `segments will be refreshed each ${settings.scheduler.segmentsRefreshRate} millis`); - const startingUp = splitsSyncTask.start(); + const startingUp = definitionsSyncTask.start(); if (startingUp) { startingUp.then(() => { - if (splitsSyncTask.isRunning()) segmentsSyncTask.start(); + if (definitionsSyncTask.isRunning()) segmentsSyncTask.start(); }); } }, @@ -39,16 +41,16 @@ export function pollingManagerSSFactory( stop() { log.info(POLLING_STOP); - if (splitsSyncTask.isRunning()) splitsSyncTask.stop(); + if (definitionsSyncTask.isRunning()) definitionsSyncTask.stop(); if (segmentsSyncTask.isRunning()) segmentsSyncTask.stop(); }, // Used by SyncManager to know if running in polling mode. - isRunning: splitsSyncTask.isRunning, + isRunning: definitionsSyncTask.isRunning, syncAll() { - // fetch splits and segments. There is no need to catch this promise (`SplitChangesUpdater` is always resolved with a boolean value) - return splitsSyncTask.execute().then(() => { + // fetch definitions and segments. There is no need to catch this promise (`DefinitionChangesUpdater` is always resolved with a boolean value) + return definitionsSyncTask.execute().then(() => { return segmentsSyncTask.execute(); }); } diff --git a/src/sync/polling/syncTasks/splitsSyncTask.ts b/src/sync/polling/syncTasks/definitionsSyncTask.ts similarity index 52% rename from src/sync/polling/syncTasks/splitsSyncTask.ts rename to src/sync/polling/syncTasks/definitionsSyncTask.ts index 3cd9ecbf..6e107211 100644 --- a/src/sync/polling/syncTasks/splitsSyncTask.ts +++ b/src/sync/polling/syncTasks/definitionsSyncTask.ts @@ -1,27 +1,26 @@ import { IStorageSync } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; import { syncTaskFactory } from '../../syncTask'; -import { ISplitsSyncTask } from '../types'; -import { splitChangesFetcherFactory } from '../fetchers/splitChangesFetcher'; -import { IFetchDefinitionChanges } from '../../../services/types'; +import { IDefinitionsSyncTask } from '../types'; import { ISettings } from '../../../types'; -import { splitChangesUpdaterFactory } from '../updaters/splitChangesUpdater'; +import { definitionChangesUpdaterFactory } from '../updaters/definitionChangesUpdater'; +import { IDefinitionChangesFetcher } from '../fetchers/types'; /** - * Creates a sync task that periodically executes a `splitChangesUpdater` task + * Creates a sync task that periodically executes a `definitionChangesUpdater` task */ -export function splitsSyncTaskFactory( - fetchSplitChanges: IFetchDefinitionChanges, +export function definitionsSyncTaskFactory( + definitionChangesFetcher: IDefinitionChangesFetcher, storage: IStorageSync, readiness: IReadinessManager, settings: ISettings, isClientSide?: boolean -): ISplitsSyncTask { +): IDefinitionsSyncTask { return syncTaskFactory( settings.log, - splitChangesUpdaterFactory( + definitionChangesUpdaterFactory( settings.log, - splitChangesFetcherFactory(fetchSplitChanges, settings, storage), + definitionChangesFetcher, storage, settings.sync.__splitFiltersValidation, readiness.splits, @@ -30,6 +29,6 @@ export function splitsSyncTaskFactory( isClientSide ), settings.scheduler.featuresRefreshRate, - 'splitChangesUpdater', + 'definitionChangesUpdater', ); } diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 4ff29c83..2f8e0f0a 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,10 +1,10 @@ -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../streaming/types'; import { ITask, ISyncTask } from '../types'; -export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit | IRBSegment, changeNumber: number }], boolean> { } +export interface IDefinitionsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: IDefinition | IRBSegment, changeNumber: number }], boolean> { } export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } @@ -19,7 +19,7 @@ export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegment export interface IPollingManager extends ITask { syncAll(): Promise - splitsSyncTask: ISplitsSyncTask + definitionsSyncTask: IDefinitionsSyncTask segmentsSyncTask: ISyncTask } diff --git a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts similarity index 78% rename from src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts rename to src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts index 5398e06b..757458f5 100644 --- a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts @@ -1,10 +1,10 @@ -import { IRBSegment, ISplit } from '../../../../dtos/types'; +import { IRBSegment, IDefinition } from '../../../../dtos/types'; import { readinessManagerFactory } from '../../../../readiness/readinessManager'; import { splitApiFactory } from '../../../../services/splitApi'; import { SegmentsCacheInMemory } from '../../../../storages/inMemory/SegmentsCacheInMemory'; import { SplitsCacheInMemory } from '../../../../storages/inMemory/SplitsCacheInMemory'; import { splitChangesFetcherFactory } from '../../fetchers/splitChangesFetcher'; -import { splitChangesUpdaterFactory, parseSegments, computeMutation } from '../splitChangesUpdater'; +import { definitionChangesUpdaterFactory, parseSegments, computeMutation } from '../definitionChangesUpdater'; import splitChangesMock1 from '../../../../__tests__/mocks/splitchanges.since.-1.json'; import fetchMock from '../../../../__tests__/testUtils/fetchMock'; import { fullSettings, settingsSplitApi } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; @@ -45,7 +45,7 @@ const archivedSplit = { status: 'ARCHIVED' }; // @ts-ignore -const testFFSetsAB: ISplit = +const testFFSetsAB: IDefinition = { name: 'test', status: 'ACTIVE', @@ -54,7 +54,7 @@ const testFFSetsAB: ISplit = sets: ['set_a', 'set_b'] }; // @ts-ignore -const test2FFSetsX: ISplit = +const test2FFSetsX: IDefinition = { name: 'test2', status: 'ACTIVE', @@ -63,7 +63,7 @@ const test2FFSetsX: ISplit = sets: ['set_x'] }; // @ts-ignore -const testFFRemoveSetB: ISplit = +const testFFRemoveSetB: IDefinition = { name: 'test', status: 'ACTIVE', @@ -71,7 +71,7 @@ const testFFRemoveSetB: ISplit = sets: ['set_a'] }; // @ts-ignore -const testFFRemoveSetA: ISplit = +const testFFRemoveSetA: IDefinition = { name: 'test', status: 'ACTIVE', @@ -79,7 +79,7 @@ const testFFRemoveSetA: ISplit = sets: ['set_x'] }; // @ts-ignore -const testFFEmptySet: ISplit = +const testFFEmptySet: IDefinition = { name: 'test', status: 'ACTIVE', @@ -102,8 +102,8 @@ const rbsWithExcludedSegment: IRBSegment = { } }; -test('splitChangesUpdater / segments parser', () => { - let segments = parseSegments(activeSplitWithSegments as ISplit); +test('definitionChangesUpdater / segments parser', () => { + let segments = parseSegments(activeSplitWithSegments as IDefinition); expect(segments).toEqual(new Set(['A', 'B'])); segments = parseSegments(rbsWithExcludedSegment); @@ -113,11 +113,11 @@ test('splitChangesUpdater / segments parser', () => { expect(segments).toEqual(new Set(['D'])); }); -test('splitChangesUpdater / compute splits mutation', () => { +test('definitionChangesUpdater / compute splits mutation', () => { const splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; let segments = new Set(); - let splitsMutation = computeMutation([activeSplitWithSegments, archivedSplit] as ISplit[], segments, splitFiltersValidation); + let splitsMutation = computeMutation([activeSplitWithSegments, archivedSplit] as IDefinition[], segments, splitFiltersValidation); expect(splitsMutation.added).toEqual([activeSplitWithSegments]); expect(splitsMutation.removed).toEqual([archivedSplit]); @@ -127,7 +127,7 @@ test('splitChangesUpdater / compute splits mutation', () => { // SDK initialization without sets // should process all the notifications segments = new Set(); - splitsMutation = computeMutation([testFFSetsAB, test2FFSetsX] as ISplit[], segments, splitFiltersValidation); + splitsMutation = computeMutation([testFFSetsAB, test2FFSetsX] as IDefinition[], segments, splitFiltersValidation); expect(splitsMutation.added).toEqual([testFFSetsAB, test2FFSetsX]); expect(splitsMutation.removed).toEqual([]); @@ -135,7 +135,7 @@ test('splitChangesUpdater / compute splits mutation', () => { expect(Array.from(segments)).toEqual([]); }); -test('splitChangesUpdater / compute splits mutation with filters', () => { +test('definitionChangesUpdater / compute splits mutation with filters', () => { // SDK initialization with sets: [set_a, set_b] let splitFiltersValidation = { queryString: '&sets=set_a,set_b', groupedFilters: { bySet: ['set_a', 'set_b'], byName: ['name_1'], byPrefix: [] }, validFilters: [] }; @@ -183,7 +183,7 @@ test('splitChangesUpdater / compute splits mutation with filters', () => { expect(splitsMutation.names).toEqual([test2FFSetsX.name, testFFEmptySet.name]); }); -describe('splitChangesUpdater', () => { +describe('definitionChangesUpdater', () => { const splits = new SplitsCacheInMemory(); const updateSplits = jest.spyOn(splits, 'update'); @@ -197,22 +197,22 @@ describe('splitChangesUpdater', () => { fetchMock.once('*', { status: 200, body: splitChangesMock1 }); // @ts-ignore const splitApi = splitApiFactory(settingsSplitApi, { getFetch: () => fetchMock }, telemetryTrackerFactory()); - const fetchSplitChanges = jest.spyOn(splitApi, 'fetchSplitChanges'); - const splitChangesFetcher = splitChangesFetcherFactory(splitApi.fetchSplitChanges, fullSettings, storage); + const fetchSplitChanges = jest.spyOn(splitApi, 'fetchSplitChanges'); // @ts-ignore + const splitChangesFetcher = splitChangesFetcherFactory({ splitApi, settings: fullSettings, storage }); const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); const splitsEmitSpy = jest.spyOn(readinessManager.splits, 'emit'); let splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; - let splitChangesUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1); + let definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1); afterEach(() => { jest.clearAllMocks(); }); test('test without payload', async () => { - const result = await splitChangesUpdater(); + const result = await definitionChangesUpdater(); const updatedFlags = splitChangesMock1.ff.d.map(ff => ff.name); expect(fetchSplitChanges).toBeCalledTimes(1); @@ -228,10 +228,10 @@ describe('splitChangesUpdater', () => { test('test with ff payload', async () => { let index = 0; for (const notification of splitNotifications) { - const payload = notification.decoded as Pick; + const payload = notification.decoded as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); // fetch and RBSegments.update not being called expect(fetchSplitChanges).toBeCalledTimes(0); @@ -255,7 +255,7 @@ describe('splitChangesUpdater', () => { const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); // fetch and Splits.update not being called expect(fetchSplitChanges).toBeCalledTimes(0); @@ -269,7 +269,7 @@ describe('splitChangesUpdater', () => { }); test('flag sets splits-arrived emission', async () => { - const payload = splitNotifications[3].decoded as Pick; + const payload = splitNotifications[3].decoded as Pick; const setMocks = [ { sets: [], shouldEmit: false }, /* should not emit if flag does not have any set */ { sets: ['set_a'], shouldEmit: true }, /* should emit if flag is in configured sets */ @@ -279,13 +279,13 @@ describe('splitChangesUpdater', () => { { sets: ['set_a'], shouldEmit: true }, /* should emit if flag is back in configured sets */ ]; - splitChangesUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); let index = 0; let calls = 0; // emit always if not configured sets for (const setMock of setMocks) { - await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); expect(splitsEmitSpy.mock.calls[index][0]).toBe(SDK_SPLITS_ARRIVED); expect(splitsEmitSpy.mock.calls[index][1]).toEqual({ type: FLAGS_UPDATE, names: [payload.name] }); index++; @@ -294,11 +294,11 @@ describe('splitChangesUpdater', () => { // @ts-ignore splitFiltersValidation = { queryString: null, groupedFilters: { bySet: ['set_a'], byName: [], byPrefix: [] }, validFilters: [] }; storage.splits.clear(); - splitChangesUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); splitsEmitSpy.mockReset(); index = 0; for (const setMock of setMocks) { - await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); if (setMock.shouldEmit) calls++; expect(splitsEmitSpy.mock.calls.length).toBe(calls); index++; @@ -312,10 +312,10 @@ describe('splitChangesUpdater', () => { readinessManager.splits.splitsArrived = false; storage.splits.clear(); - const payload = splitNotifications[0].decoded as Pick; + const payload = splitNotifications[0].decoded as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); }); @@ -328,12 +328,12 @@ describe('splitChangesUpdater', () => { readinessManager.splits.splitsArrived = false; readinessManager.segments.segmentsArrived = true; // Segments ready - const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 100, conditions: [] } as unknown as ISplit; - const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 101, conditions: [] } as unknown as ISplit; - const flag3 = { name: 'flag3', status: 'ACTIVE', changeNumber: 102, conditions: [] } as unknown as ISplit; + const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 100, conditions: [] } as unknown as IDefinition; + const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 101, conditions: [] } as unknown as IDefinition; + const flag3 = { name: 'flag3', status: 'ACTIVE', changeNumber: 102, conditions: [] } as unknown as IDefinition; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2, flag3], t: 102 } } }); - await splitChangesUpdater(); + await definitionChangesUpdater(); // Should emit with metadata when splitsArrived is false (first update) expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2', 'flag3'] }); @@ -347,12 +347,12 @@ describe('splitChangesUpdater', () => { readinessManager.splits.splitsArrived = false; readinessManager.segments.segmentsArrived = true; // Segments ready - const archivedFlag = { name: 'archived-flag', status: ARCHIVED_FF, changeNumber: 200, conditions: [] } as unknown as ISplit; + const archivedFlag = { name: 'archived-flag', status: ARCHIVED_FF, changeNumber: 200, conditions: [] } as unknown as IDefinition; - const payload = archivedFlag as Pick; + const payload = archivedFlag as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); // Should emit with metadata when splitsArrived is false (first update) expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); @@ -366,7 +366,7 @@ describe('splitChangesUpdater', () => { const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); // Should emit SEGMENTS_UPDATE (not FLAGS_UPDATE) when only RB segment is updated expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); @@ -381,7 +381,7 @@ describe('splitChangesUpdater', () => { // Simulate a scenario where only RB segments are updated (no flags) const rbSegment = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; fetchMock.once('*', { status: 200, body: { rbs: { d: [rbSegment], t: 1684329854385 } } }); - await splitChangesUpdater(); + await definitionChangesUpdater(); // When updatedFlags.length === 0, should emit SEGMENTS_UPDATE expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); @@ -395,12 +395,12 @@ describe('splitChangesUpdater', () => { storage.segments.clear(); // Simulate a scenario where both flags and RB segments are updated - const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 400, conditions: [] } as unknown as ISplit; - const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 401, conditions: [] } as unknown as ISplit; + const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 400, conditions: [] } as unknown as IDefinition; + const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 401, conditions: [] } as unknown as IDefinition; const rbSegment = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2], t: 401 }, rbs: { d: [rbSegment], t: 1684329854385 } } }); - await splitChangesUpdater(); + await definitionChangesUpdater(); // When both flags and RB segments are updated, should emit FLAGS_UPDATE with flag names expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2'] }); @@ -414,9 +414,9 @@ describe('splitChangesUpdater', () => { readinessManager.segments.segmentsArrived = false; // Segments not ready - client-side should still emit // Create client-side updater (isClientSide = true) - const clientSideUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + const clientSideUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); - const flag1 = { name: 'client-flag', status: 'ACTIVE', changeNumber: 300, conditions: [] } as unknown as ISplit; + const flag1 = { name: 'client-flag', status: 'ACTIVE', changeNumber: 300, conditions: [] } as unknown as IDefinition; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1], t: 300 } } }); await clientSideUpdater(); diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/definitionChangesUpdater.ts similarity index 61% rename from src/sync/polling/updaters/splitChangesUpdater.ts rename to src/sync/polling/updaters/definitionChangesUpdater.ts index 0510a485..1bcea40f 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/definitionChangesUpdater.ts @@ -1,19 +1,19 @@ import { ISegmentsCacheBase, IStorageBase } from '../../../storages/types'; -import { ISplitChangesFetcher } from '../fetchers/types'; -import { IRBSegment, ISplit, ISplitChangesResponse, ISplitFiltersValidation, MaybeThenable } from '../../../dtos/types'; +import { IDefinitionChangesFetcher } from '../fetchers/types'; +import { IRBSegment, IDefinition, IDefinitionChangesResponse, ISplitFiltersValidation, MaybeThenable } from '../../../dtos/types'; import { ISplitsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; -import { SYNC_SPLITS_FETCH, SYNC_SPLITS_UPDATE, SYNC_RBS_UPDATE, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; +import { SYNC_FETCH, SYNC_UPDATE, SYNC_FETCH_FAILS, SYNC_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; import { IN_RULE_BASED_SEGMENT, IN_SEGMENT, RULE_BASED_SEGMENT, STANDARD_SEGMENT } from '../../../utils/constants'; import { setToArray } from '../../../utils/lang/sets'; import { SPLIT_UPDATE } from '../../streaming/constants'; import { SdkUpdateMetadata } from '../../../../types/splitio'; -export type InstantUpdate = { payload: ISplit | IRBSegment, changeNumber: number, type: string }; -type SplitChangesUpdater = (noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) => Promise +export type InstantUpdate = { payload: IDefinition | IRBSegment, changeNumber: number, type: string }; +type DefinitionChangesUpdater = (noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) => Promise // Checks that all registered segments have been fetched (changeNumber !== -1 for every segment). // Returns a promise that could be rejected. @@ -30,7 +30,7 @@ function checkAllSegmentsExist(segments: ISegmentsCacheBase): Promise { * Collect segments from a raw FF or RBS definition. * Exported for testing purposes. */ -export function parseSegments(ruleEntity: ISplit | IRBSegment, matcherType: typeof IN_SEGMENT | typeof IN_RULE_BASED_SEGMENT = IN_SEGMENT): Set { +export function parseSegments(ruleEntity: IDefinition | IRBSegment, matcherType: typeof IN_SEGMENT | typeof IN_RULE_BASED_SEGMENT = IN_SEGMENT): Set { const { conditions, excluded } = ruleEntity as IRBSegment; const segments = new Set(); @@ -55,42 +55,42 @@ export function parseSegments(ruleEntity: ISplit | IRBSegment, matcherType: type return segments; } -interface ISplitMutations { +interface IDefinitionMutations { added: T[], removed: T[], names: string[] } /** - * If there are defined filters and one feature flag doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it + * If there are defined filters and one definition doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it * If there is `bySet` filter, `byName` and `byPrefix` filters are ignored * - * @param featureFlag - feature flag to be evaluated + * @param definition - definition to be evaluated * @param filters - splitFiltersValidation bySet | byName */ -function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { +function matchFilters(definition: IDefinition, filters: ISplitFiltersValidation) { const { bySet: setsFilter, byName: namesFilter, byPrefix: prefixFilter } = filters.groupedFilters; - if (setsFilter.length > 0) return featureFlag.sets && featureFlag.sets.some((featureFlagSet: string) => setsFilter.indexOf(featureFlagSet) > -1); + if (setsFilter.length > 0) return definition.sets && definition.sets.some((definitionSet: string) => setsFilter.indexOf(definitionSet) > -1); const namesFilterConfigured = namesFilter.length > 0; const prefixFilterConfigured = prefixFilter.length > 0; if (!namesFilterConfigured && !prefixFilterConfigured) return true; - const matchNames = namesFilterConfigured && namesFilter.indexOf(featureFlag.name) > -1; - const matchPrefix = prefixFilterConfigured && prefixFilter.some(prefix => startsWith(featureFlag.name, prefix)); + const matchNames = namesFilterConfigured && namesFilter.indexOf(definition.name) > -1; + const matchPrefix = prefixFilterConfigured && prefixFilter.some(prefix => startsWith(definition.name, prefix)); return matchNames || matchPrefix; } /** - * Given the list of splits from /splitChanges endpoint, it returns the mutations, - * i.e., an object with added splits, removed splits and used segments. + * Given the list of definitions from /splitChanges or /configs endpoint, it returns the mutations, + * i.e., an object with added definitions, removed definitions, and used segments. * Exported for testing purposes. */ -export function computeMutation(rules: Array, segments: Set, filters?: ISplitFiltersValidation): ISplitMutations { +export function computeMutation(rules: Array, segments: Set, filters?: ISplitFiltersValidation): IDefinitionMutations { return rules.reduce((accum, ruleEntity) => { - if (ruleEntity.status !== 'ARCHIVED' && (!filters || matchFilters(ruleEntity as ISplit, filters))) { + if (ruleEntity.status !== 'ARCHIVED' && (!filters || matchFilters(ruleEntity as IDefinition, filters))) { accum.added.push(ruleEntity); parseSegments(ruleEntity).forEach((segmentName: string) => { @@ -102,88 +102,88 @@ export function computeMutation(rules: Array, accum.names.push(ruleEntity.name); return accum; - }, { added: [], removed: [], names: [] } as ISplitMutations); + }, { added: [], removed: [], names: [] } as IDefinitionMutations); } /** - * factory of SplitChanges updater, a task that: - * - fetches split changes using `splitChangesFetcher` - * - updates `splitsCache` - * - uses `splitsEventEmitter` to emit events related to split data updates + * Factory of DefinitionChanges updater, a task that: + * - fetches definition changes using `definitionChangesFetcher` + * - updates definitions storage + * - uses `definitionsEventEmitter` to emit events related to definition data updates * * @param log - Logger instance - * @param splitChangesFetcher - Fetcher of `/splitChanges` - * @param splits - Splits storage, with sync or async methods + * @param definitionChangesFetcher - Fetcher of `/splitChanges` or `/configs` + * @param definitions - Definitions storage, with sync or async methods * @param segments - Segments storage, with sync or async methods - * @param splitsEventEmitter - Optional readiness manager. Not required for synchronizer or producer mode. + * @param definitionsEventEmitter - Optional readiness manager. Not required for synchronizer or producer mode. * @param requestTimeoutBeforeReady - How long the updater will wait for the request to timeout. Default 0, i.e., never timeout. - * @param retriesOnFailureBeforeReady - How many retries on `/splitChanges` we the updater do in case of failure or timeout. Default 0, i.e., no retries. + * @param retriesOnFailureBeforeReady - How many retries on `/splitChanges` or `/configs` we the updater do in case of failure or timeout. Default 0, i.e., no retries. */ -export function splitChangesUpdaterFactory( +export function definitionChangesUpdaterFactory( log: ILogger, - splitChangesFetcher: ISplitChangesFetcher, + definitionChangesFetcher: IDefinitionChangesFetcher, storage: Pick, splitFiltersValidation: ISplitFiltersValidation, splitsEventEmitter?: ISplitsEventEmitter, requestTimeoutBeforeReady = 0, retriesOnFailureBeforeReady = 0, isClientSide?: boolean -): SplitChangesUpdater { +): DefinitionChangesUpdater { const { splits, rbSegments, segments } = storage; let startingUp = true; - /** timeout decorator for `splitChangesFetcher` promise */ + /** timeout decorator for `definitionChangesFetcher` promise */ function _promiseDecorator(promise: Promise) { if (startingUp && requestTimeoutBeforeReady) promise = timeout(requestTimeoutBeforeReady, promise); return promise; } /** - * SplitChanges updater returns a promise that resolves with a `false` boolean value if it fails to fetch splits or synchronize them with the storage. + * DefinitionChanges updater returns a promise that resolves with a `false` boolean value if it fails to fetch definitions or synchronize them with the storage. * Returned promise will not be rejected. * * @param noCache - true to revalidate data to fetch * @param till - query param to bypass CDN requests */ - return function splitChangesUpdater(noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) { + return function definitionChangesUpdater(noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) { /** - * @param since - current changeNumber at splitsCache + * @param since - current changeNumber at definitionsCache * @param retry - current number of retry attempts */ - function _splitChangesUpdater(sinces: [number, number], retry = 0): Promise { + function _definitionChangesUpdater(sinces: [number, number], retry = 0): Promise { const [since, rbSince] = sinces; - log.debug(SYNC_SPLITS_FETCH, sinces); + log.debug(SYNC_FETCH, [definitionChangesFetcher.type, since, rbSince]); return Promise.resolve( instantUpdate ? instantUpdate.type === SPLIT_UPDATE ? // IFFU edge case: a change to a flag that adds an IN_RULE_BASED_SEGMENT matcher that is not present yet Promise.resolve(rbSegments.contains(parseSegments(instantUpdate.payload, IN_RULE_BASED_SEGMENT))).then((contains) => { return contains ? - { ff: { d: [instantUpdate.payload as ISplit], t: instantUpdate.changeNumber } } : - splitChangesFetcher(since, noCache, till, rbSince, _promiseDecorator); + { ff: { d: [instantUpdate.payload as IDefinition], t: instantUpdate.changeNumber } } : + definitionChangesFetcher(since, noCache, till, rbSince, _promiseDecorator); }) : { rbs: { d: [instantUpdate.payload as IRBSegment], t: instantUpdate.changeNumber } } : - splitChangesFetcher(since, noCache, till, rbSince, _promiseDecorator) + definitionChangesFetcher(since, noCache, till, rbSince, _promiseDecorator) ) - .then((splitChanges: ISplitChangesResponse) => { + .then((definitionChanges: IDefinitionChangesResponse) => { const usedSegments = new Set(); let updatedFlags: string[] = []; let ffUpdate: MaybeThenable = false; - if (splitChanges.ff) { - const { added, removed, names } = computeMutation(splitChanges.ff.d, usedSegments, splitFiltersValidation); + if (definitionChanges.ff) { + const { added, removed, names } = computeMutation(definitionChanges.ff.d, usedSegments, splitFiltersValidation); updatedFlags = names; - log.debug(SYNC_SPLITS_UPDATE, [added.length, removed.length]); - ffUpdate = splits.update(added, removed, splitChanges.ff.t); + log.debug(SYNC_UPDATE, [definitionChangesFetcher.type, added.length, removed.length]); + ffUpdate = splits.update(added, removed, definitionChanges.ff.t); } let rbsUpdate: MaybeThenable = false; - if (splitChanges.rbs) { - const { added, removed } = computeMutation(splitChanges.rbs.d, usedSegments); - log.debug(SYNC_RBS_UPDATE, [added.length, removed.length]); - rbsUpdate = rbSegments.update(added, removed, splitChanges.rbs.t); + if (definitionChanges.rbs) { + const { added, removed } = computeMutation(definitionChanges.rbs.d, usedSegments); + log.debug(SYNC_UPDATE, ['rule-based segments', added.length, removed.length]); + rbsUpdate = rbSegments.update(added, removed, definitionChanges.rbs.t); } return Promise.all([ffUpdate, rbsUpdate, @@ -216,17 +216,17 @@ export function splitChangesUpdaterFactory( .catch(error => { if (startingUp && retriesOnFailureBeforeReady > retry) { retry += 1; - log.warn(SYNC_SPLITS_FETCH_RETRY, [retry, error]); - return _splitChangesUpdater(sinces, retry); + log.warn(SYNC_FETCH_RETRY, [definitionChangesFetcher.type, retry, error]); + return _definitionChangesUpdater(sinces, retry); } else { startingUp = false; - log.warn(SYNC_SPLITS_FETCH_FAILS, [error]); + log.warn(SYNC_FETCH_FAILS, [definitionChangesFetcher.type, error]); } return false; }); } // `getChangeNumber` never rejects or throws error - return Promise.all([splits.getChangeNumber(), rbSegments.getChangeNumber()]).then(_splitChangesUpdater); + return Promise.all([splits.getChangeNumber(), rbSegments.getChangeNumber()]).then(_definitionChangesUpdater); }; } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 86f5cb0e..98679b6c 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -4,7 +4,7 @@ import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; -import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; +import { SYNC_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; import { MEMBERSHIPS_LS_UPDATE } from '../../streaming/constants'; @@ -76,7 +76,7 @@ export function mySegmentsUpdaterFactory( return updaterPromise.catch(error => { if (startingUp && retriesOnFailureBeforeReady > retry) { retry += 1; - log.warn(SYNC_MYSEGMENTS_FETCH_RETRY, [retry, error]); + log.warn(SYNC_FETCH_RETRY, ['memberships', retry, error]); return _mySegmentsUpdater(retry); // no need to forward `segmentList` and `noCache` params } else { startingUp = false; diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index 5b27b361..800694e0 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -3,7 +3,7 @@ import { ISegmentsCacheBase } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; -import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC_SEGMENTS } from '../../../logger/constants'; +import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC } from '../../../logger/constants'; import { timeout } from '../../../utils/promise/timeout'; import { SdkUpdateMetadata } from '../../../../types/splitio'; @@ -38,7 +38,7 @@ export function segmentChangesUpdaterFactory( } function updateSegment(segmentName: string, noCache?: boolean, till?: number, fetchOnlyNew?: boolean, retries?: number): Promise { - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing segment ${segmentName}`); + log.debug(`${LOG_PREFIX_SYNC}Processing segment ${segmentName}`); const sincePromise = Promise.resolve(segments.getChangeNumber(segmentName)); return sincePromise.then(since => { @@ -47,14 +47,14 @@ export function segmentChangesUpdaterFactory( false : segmentChangesFetcher(since || -1, segmentName, noCache, till, _promiseDecorator).then((changes) => { return Promise.all(changes.map(x => { - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); + log.debug(`${LOG_PREFIX_SYNC}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); return segments.update(segmentName, x.added, x.removed, x.till); })).then((updates) => { return updates.some(update => update); }); }).catch(error => { if (retries) { - log.warn(`${LOG_PREFIX_SYNC_SEGMENTS}Retrying fetch of segment ${segmentName} (attempt #${retries}). Reason: ${error}`); + log.warn(`${LOG_PREFIX_SYNC}Retrying fetch of segment ${segmentName} (attempt #${retries}). Reason: ${error}`); return updateSegment(segmentName, noCache, till, fetchOnlyNew, retries - 1); } throw error; @@ -73,7 +73,7 @@ export function segmentChangesUpdaterFactory( * @param till - till target for the provided segmentName, for CDN bypass. */ return function segmentChangesUpdater(fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) { - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Started segments update`); + log.debug(`${LOG_PREFIX_SYNC}Started segments update`); // If not a segment name provided, read list of available segments names to be updated. let segmentsPromise = Promise.resolve(segmentName ? [segmentName] : segments.getRegisteredSegments()); @@ -102,9 +102,9 @@ export function segmentChangesUpdaterFactory( // If the operation is forbidden, it may be due to permissions. Destroy the SDK instance. // @TODO although factory status is destroyed, synchronization is not stopped if (readiness) readiness.setDestroyed(); - log.error(`${LOG_PREFIX_INSTANTIATION}: you passed a client-side type authorizationKey, please grab an SDK Key from the Split user interface that is of type server-side.`); + log.error(`${LOG_PREFIX_INSTANTIATION}: you passed a client-side type authorizationKey, please grab an SDK Key from Harness UI that is of type server-side.`); } else { - log.warn(`${LOG_PREFIX_SYNC_SEGMENTS}Error while doing fetch of segments. ${error}`); + log.warn(`${LOG_PREFIX_SYNC}Error while doing fetch of segments. ${error}`); } return false; diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts index dc5cb7dc..58db2806 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts @@ -1,4 +1,4 @@ -import { IRBSegment, ISplit } from '../../../dtos/types'; +import { IRBSegment, IDefinition } from '../../../dtos/types'; import { STREAMING_PARSING_SPLIT_UPDATE } from '../../../logger/constants'; import { ILogger } from '../../../logger/types'; import { SDK_SPLITS_ARRIVED } from '../../../readiness/constants'; @@ -7,8 +7,8 @@ import { IRBSegmentsCacheSync, ISplitsCacheSync, IStorageSync } from '../../../s import { ITelemetryTracker } from '../../../trackers/types'; import { Backoff } from '../../../utils/Backoff'; import { SPLITS } from '../../../utils/constants'; -import { ISegmentsSyncTask, ISplitsSyncTask } from '../../polling/types'; -import { InstantUpdate } from '../../polling/updaters/splitChangesUpdater'; +import { ISegmentsSyncTask, IDefinitionsSyncTask } from '../../polling/types'; +import { InstantUpdate } from '../../polling/updaters/definitionChangesUpdater'; import { RB_SEGMENT_UPDATE } from '../constants'; import { parseFFUpdatePayload } from '../parseUtils'; import { ISplitKillData, ISplitUpdateData } from '../SSEHandler/types'; @@ -18,7 +18,7 @@ import { IUpdateWorker } from './types'; /** * SplitsUpdateWorker factory */ -export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData]> & { killSplit(event: ISplitKillData): void } { +export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definitionsSyncTask: IDefinitionsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData]> & { killSplit(event: ISplitKillData): void } { const ff = SplitsUpdateWorker(storage.splits); const rbs = SplitsUpdateWorker(storage.rbSegments); @@ -36,7 +36,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy if (maxChangeNumber > cache.getChangeNumber()) { handleNewEvent = false; // fetch splits revalidating data if cached - splitsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, instantUpdate).then(() => { + definitionsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, instantUpdate).then(() => { if (!isHandlingEvent) return; // halt if `stop` has been called if (handleNewEvent) { __handleSplitUpdateCall(); @@ -79,7 +79,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy * * @param changeNumber - change number of the notification */ - put({ changeNumber, pcn, type }: ISplitUpdateData, payload?: ISplit | IRBSegment) { + put({ changeNumber, pcn, type }: ISplitUpdateData, payload?: IDefinition | IRBSegment) { const currentChangeNumber = cache.getChangeNumber(); if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return; diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index a34f2dc9..0c24907e 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -2,7 +2,7 @@ import { algorithms } from '../../utils/decompress'; import { decodeFromBase64 } from '../../utils/base64'; import { hash } from '../../utils/murmur3/murmur3'; import { Compression, IMembershipMSUpdateData, KeyList } from './SSEHandler/types'; -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; const GZIP = 1; const ZLIB = 2; @@ -82,7 +82,7 @@ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) { /** * Parse feature flags notifications for instant feature flag updates */ -export function parseFFUpdatePayload(compression: Compression, data: string): ISplit | IRBSegment | undefined { +export function parseFFUpdatePayload(compression: Compression, data: string): IDefinition | IRBSegment | undefined { return compression > 0 ? parseKeyList(data, compression, false) : JSON.parse(decodeFromBase64(data)); diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index f0a5ac4e..945c886b 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -56,7 +56,7 @@ export function pushManagerFactory( // MySegmentsUpdateWorker (client-side) are initiated in `add` method const segmentsUpdateWorker = userKey ? undefined : SegmentsUpdateWorker(log, pollingManager.segmentsSyncTask as ISegmentsSyncTask, storage.segments); // For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments - const splitsUpdateWorker = SplitsUpdateWorker(log, storage, pollingManager.splitsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); + const splitsUpdateWorker = SplitsUpdateWorker(log, storage, pollingManager.definitionsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); // [Only for client-side] map of hashes to user keys, to dispatch membership update events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index df9ff152..f3709fa1 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -11,6 +11,8 @@ import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; import { SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; import { usesSegmentsSync } from '../storages/AbstractSplitsCacheSync'; +import { splitChangesFetcherFactory } from './polling/fetchers/splitChangesFetcher'; +import { IDefinitionChangesFetcher } from './polling/fetchers/types'; /** * Online SyncManager factory. @@ -19,10 +21,12 @@ import { usesSegmentsSync } from '../storages/AbstractSplitsCacheSync'; * @param pollingManagerFactory - allows to specialize the SyncManager for server-side or client-side API by passing * `pollingManagerSSFactory` or `pollingManagerCSFactory` respectively. * @param pushManagerFactory - optional to build a SyncManager with or without streaming support + * @param definitionChangesFetcherFactory - optional to replace the default split changes fetcher */ export function syncManagerOnlineFactory( - pollingManagerFactory?: (params: ISdkFactoryContextSync) => IPollingManager, + pollingManagerFactory?: (params: ISdkFactoryContextSync, definitionChangesFetcher: IDefinitionChangesFetcher) => IPollingManager, pushManagerFactory?: (params: ISdkFactoryContextSync, pollingManager: IPollingManager) => IPushManager | undefined, + definitionChangesFetcherFactory = splitChangesFetcherFactory ): (params: ISdkFactoryContextSync) => ISyncManagerCS { /** @@ -33,7 +37,7 @@ export function syncManagerOnlineFactory( const { settings, settings: { log, streamingEnabled, sync: { enabled: syncEnabled } }, telemetryTracker, storage, readiness } = params; /** Polling Manager */ - const pollingManager = pollingManagerFactory && pollingManagerFactory(params); + const pollingManager = pollingManagerFactory && pollingManagerFactory(params, definitionChangesFetcherFactory(params)); /** Push Manager */ const pushManager = syncEnabled && streamingEnabled && pollingManager && pushManagerFactory ? diff --git a/src/utils/inputValidation/__tests__/definitionExistence.spec.ts b/src/utils/inputValidation/__tests__/definitionExistence.spec.ts new file mode 100644 index 00000000..2a320e43 --- /dev/null +++ b/src/utils/inputValidation/__tests__/definitionExistence.spec.ts @@ -0,0 +1,47 @@ + +import * as LabelConstants from '../../labels'; + +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; + +import { validateDefinitionExistence } from '../definitionExistence'; +import { IReadinessManager } from '../../../readiness/types'; +import { WARN_NOT_EXISTENT_DEFINITION } from '../../../logger/constants'; + +describe('Split existence (special case)', () => { + + afterEach(() => { loggerMock.mockClear(); }); + + test('Should return a boolean indicating if the SDK was ready and there was no Split object or "definition not found" label', () => { + // @ts-expect-error + let readinessManagerMock = { + isReady: jest.fn(() => false) // Fake the signal for the non ready SDK + } as IReadinessManager; + + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', {}, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', null, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', undefined, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', 'a label', 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', LabelConstants.DEFINITION_NOT_FOUND, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + + expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the SDK was not ready yet. + expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the SDK was not ready yet. + + // Prepare the mock to fake that the SDK is ready now. + (readinessManagerMock.isReady as jest.Mock).mockImplementation(() => true); + + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', {}, 'other_method')).toBe(true); // Should return true if it receives a Split Object instead of null (when the object is not found, for manager). + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', 'a label', 'other_method')).toBe(true); // Should return true if it receives a Label and it is not split not found (when the Split was not found on the storage, for client). + + expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the values we used so far were considered valid. + expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the values we used so far were considered valid. + + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', null, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', undefined, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', LabelConstants.DEFINITION_NOT_FOUND, 'other_method')).toBe(false); // Should return false if it receives a label but it is the split not found one. + + expect(loggerMock.warn).toBeCalledTimes(3); // It should have logged 3 warnings, one per each time we called it + loggerMock.warn.mock.calls.forEach(call => expect(call).toEqual([WARN_NOT_EXISTENT_DEFINITION, ['other_method', 'other_split']])); // Warning logs should have the correct message. + + expect(loggerMock.error).not.toBeCalled(); // We log warnings, not errors. + }); +}); diff --git a/src/utils/inputValidation/__tests__/isOperational.spec.ts b/src/utils/inputValidation/__tests__/isOperational.spec.ts index 19c1373a..4b93096a 100644 --- a/src/utils/inputValidation/__tests__/isOperational.spec.ts +++ b/src/utils/inputValidation/__tests__/isOperational.spec.ts @@ -46,7 +46,7 @@ describe('validateIfReadyFromCache', () => { // @ts-ignore expect(validateIfReadyFromCache(loggerMock, readinessManagerMock, 'test_method')).toBe(false); // It should return true if SDK was ready. expect(readinessManagerMock.isReadyFromCache).toBeCalledTimes(1); // It checks for SDK_READY_FROM_CACHE status. - expect(loggerMock.warn).toBeCalledWith(CLIENT_NOT_READY_FROM_CACHE, ['test_method', '']); // It should log the expected warning. + expect(loggerMock.warn).toBeCalledWith(CLIENT_NOT_READY_FROM_CACHE, ['test_method']); // It should log the expected warning. expect(loggerMock.error).not.toBeCalled(); // But it should not log any errors. }); }); diff --git a/src/utils/inputValidation/__tests__/splitExistence.spec.ts b/src/utils/inputValidation/__tests__/splitExistence.spec.ts deleted file mode 100644 index 9d78df9e..00000000 --- a/src/utils/inputValidation/__tests__/splitExistence.spec.ts +++ /dev/null @@ -1,47 +0,0 @@ - -import * as LabelConstants from '../../labels'; - -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -import { validateSplitExistence } from '../splitExistence'; -import { IReadinessManager } from '../../../readiness/types'; -import { WARN_NOT_EXISTENT_SPLIT } from '../../../logger/constants'; - -describe('Split existence (special case)', () => { - - afterEach(() => { loggerMock.mockClear(); }); - - test('Should return a boolean indicating if the SDK was ready and there was no Split object or "definition not found" label', () => { - // @ts-expect-error - let readinessManagerMock = { - isReady: jest.fn(() => false) // Fake the signal for the non ready SDK - } as IReadinessManager; - - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', {}, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', null, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', undefined, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', 'a label', 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', LabelConstants.SPLIT_NOT_FOUND, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - - expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the SDK was not ready yet. - expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the SDK was not ready yet. - - // Prepare the mock to fake that the SDK is ready now. - (readinessManagerMock.isReady as jest.Mock).mockImplementation(() => true); - - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', {}, 'other_method')).toBe(true); // Should return true if it receives a Split Object instead of null (when the object is not found, for manager). - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', 'a label', 'other_method')).toBe(true); // Should return true if it receives a Label and it is not split not found (when the Split was not found on the storage, for client). - - expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the values we used so far were considered valid. - expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the values we used so far were considered valid. - - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', null, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', undefined, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', LabelConstants.SPLIT_NOT_FOUND, 'other_method')).toBe(false); // Should return false if it receives a label but it is the split not found one. - - expect(loggerMock.warn).toBeCalledTimes(3); // It should have logged 3 warnings, one per each time we called it - loggerMock.warn.mock.calls.forEach(call => expect(call).toEqual([WARN_NOT_EXISTENT_SPLIT, ['other_method', 'other_split']])); // Warning logs should have the correct message. - - expect(loggerMock.error).not.toBeCalled(); // We log warnings, not errors. - }); -}); diff --git a/src/utils/inputValidation/definitionExistence.ts b/src/utils/inputValidation/definitionExistence.ts new file mode 100644 index 00000000..290d8b59 --- /dev/null +++ b/src/utils/inputValidation/definitionExistence.ts @@ -0,0 +1,19 @@ +import { FALLBACK_DEFINITION_NOT_FOUND, DEFINITION_NOT_FOUND } from '../labels'; +import { IReadinessManager } from '../../readiness/types'; +import { ILogger } from '../../logger/types'; +import { WARN_NOT_EXISTENT_DEFINITION } from '../../logger/constants'; + +/** + * This is defined here and in this format mostly because of the logger and the fact that it's considered a validation at product level. + * But it's not going to run on the input validation layer. In any case, the most compelling reason to use it as we do is to avoid going to Redis and get a definition twice. + */ +export function validateDefinitionExistence(log: ILogger, readinessManager: IReadinessManager, definitionName: string, labelOrDefinitionObj: any, method: string): boolean { + if (readinessManager.isReady()) { // Only if it's ready (synced with BE) we validate this, otherwise it may just be that the SDK is still syncing + if (labelOrDefinitionObj === DEFINITION_NOT_FOUND || labelOrDefinitionObj === FALLBACK_DEFINITION_NOT_FOUND || labelOrDefinitionObj == null) { + log.warn(WARN_NOT_EXISTENT_DEFINITION, [method, definitionName]); + return false; + } + } + + return true; +} diff --git a/src/utils/inputValidation/index.ts b/src/utils/inputValidation/index.ts index f6e06c5e..5e892ce8 100644 --- a/src/utils/inputValidation/index.ts +++ b/src/utils/inputValidation/index.ts @@ -8,6 +8,6 @@ export { validateSplit } from './split'; export { validateSplits } from './splits'; export { validateTrafficType } from './trafficType'; export { validateIfNotDestroyed, validateIfReadyFromCache, validateIfOperational } from './isOperational'; -export { validateSplitExistence } from './splitExistence'; +export { validateDefinitionExistence } from './definitionExistence'; export { validateTrafficTypeExistence } from './trafficTypeExistence'; export { validateEvaluationOptions } from './eventProperties'; diff --git a/src/utils/inputValidation/isOperational.ts b/src/utils/inputValidation/isOperational.ts index 5f122926..5fbec731 100644 --- a/src/utils/inputValidation/isOperational.ts +++ b/src/utils/inputValidation/isOperational.ts @@ -9,14 +9,14 @@ export function validateIfNotDestroyed(log: ILogger, readinessManager: IReadines return false; } -export function validateIfReadyFromCache(log: ILogger, readinessManager: IReadinessManager, method: string, featureFlagNameOrNames?: string | string[] | false) { +export function validateIfReadyFromCache(log: ILogger, readinessManager: IReadinessManager, method: string) { if (readinessManager.isReadyFromCache()) return true; - log.warn(CLIENT_NOT_READY_FROM_CACHE, [method, featureFlagNameOrNames ? ` for feature flag ${featureFlagNameOrNames.toString()}` : '']); + log.warn(CLIENT_NOT_READY_FROM_CACHE, [method]); return false; } // Operational means that the SDK is ready to evaluate (not destroyed and ready from cache) -export function validateIfOperational(log: ILogger, readinessManager: IReadinessManager, method: string, featureFlagNameOrNames?: string | string[] | false) { - return validateIfNotDestroyed(log, readinessManager, method) && validateIfReadyFromCache(log, readinessManager, method, featureFlagNameOrNames); +export function validateIfOperational(log: ILogger, readinessManager: IReadinessManager, method: string) { + return validateIfNotDestroyed(log, readinessManager, method) && validateIfReadyFromCache(log, readinessManager, method); } diff --git a/src/utils/inputValidation/splitExistence.ts b/src/utils/inputValidation/splitExistence.ts deleted file mode 100644 index c8559b2a..00000000 --- a/src/utils/inputValidation/splitExistence.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { FALLBACK_SPLIT_NOT_FOUND, SPLIT_NOT_FOUND } from '../labels'; -import { IReadinessManager } from '../../readiness/types'; -import { ILogger } from '../../logger/types'; -import { WARN_NOT_EXISTENT_SPLIT } from '../../logger/constants'; - -/** - * This is defined here and in this format mostly because of the logger and the fact that it's considered a validation at product level. - * But it's not going to run on the input validation layer. In any case, the most compelling reason to use it as we do is to avoid going to Redis and get a split twice. - */ -export function validateSplitExistence(log: ILogger, readinessManager: IReadinessManager, splitName: string, labelOrSplitObj: any, method: string): boolean { - if (readinessManager.isReady()) { // Only if it's ready (synced with BE) we validate this, otherwise it may just be that the SDK is still syncing - if (labelOrSplitObj === SPLIT_NOT_FOUND || labelOrSplitObj === FALLBACK_SPLIT_NOT_FOUND || labelOrSplitObj == null) { - log.warn(WARN_NOT_EXISTENT_SPLIT, [method, splitName]); - return false; - } - } - - return true; -} diff --git a/src/utils/labels/index.ts b/src/utils/labels/index.ts index 78117a1d..5eccf3a6 100644 --- a/src/utils/labels/index.ts +++ b/src/utils/labels/index.ts @@ -2,11 +2,11 @@ import { FALLBACK_PREFIX } from '../../evaluator/fallbackTreatmentsCalculator'; export const SPLIT_KILLED = 'killed'; export const NO_CONDITION_MATCH = 'default rule'; -export const SPLIT_NOT_FOUND = 'definition not found'; +export const DEFINITION_NOT_FOUND = 'definition not found'; export const SDK_NOT_READY = 'not ready'; export const EXCEPTION = 'exception'; export const SPLIT_ARCHIVED = 'archived'; export const NOT_IN_SPLIT = 'not in split'; export const UNSUPPORTED_MATCHER_TYPE = 'targeting rule type unsupported by sdk'; export const PREREQUISITES_NOT_MET = 'prerequisites not met'; -export const FALLBACK_SPLIT_NOT_FOUND = FALLBACK_PREFIX + SPLIT_NOT_FOUND; +export const FALLBACK_DEFINITION_NOT_FOUND = FALLBACK_PREFIX + DEFINITION_NOT_FOUND;