diff --git a/.eslintrc b/.eslintrc index cc505a94..e06dd99e 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,4 +1,5 @@ { + "root": true, "extends": [ "eslint:recommended" ], @@ -46,7 +47,7 @@ "overrides": [ { "env": { - // platform-agnostic code must not reference Node or Browser globals + // platform-agnostic code must not reference Node or Browser globals without feature detection "node": false, "browser": false, "es6": true diff --git a/package-lock.json b/package-lock.json index 4d1a6271..7fa9ff43 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,6 +18,7 @@ "@types/lodash": "^4.14.162", "@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/parser": "^7.18.0", + "bloom-filters": "^3.0.4", "cross-env": "^7.0.2", "eslint": "^8.56.0", "eslint-plugin-compat": "^6.0.1", @@ -35,11 +36,19 @@ "typescript": "4.7.4" }, "peerDependencies": { - "ioredis": "^4.28.0 || ^5.0.0" + "bloom-filters": "^3.0.0", + "ioredis": "^4.28.0 || ^5.0.0", + "node-fetch": "^2.7.0" }, "peerDependenciesMeta": { + "bloom-filters": { + "optional": true + }, "ioredis": { "optional": true + }, + "node-fetch": { + "optional": true } } }, @@ -93,6 +102,7 @@ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.5.tgz", "integrity": "sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg==", "dev": true, + "peer": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.22.5", @@ -1562,6 +1572,7 @@ "resolved": "https://registry.npmjs.org/@types/jest/-/jest-27.0.2.tgz", "integrity": "sha512-4dRxkS/AFX0c5XW6IPMNOydLn2tEhNhJV7DnYK+0bjoJZ+QTmfucBlihX7aoEsh/ocYtkLC73UbnBXBXIxsULA==", "dev": true, + "peer": true, "dependencies": { "jest-diff": "^27.0.0", "pretty-format": "^27.0.0" @@ -1591,6 +1602,13 @@ "integrity": "sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA==", "dev": true }, + "node_modules/@types/seedrandom": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-3.0.8.tgz", + "integrity": "sha512-TY1eezMU2zH2ozQoAFAQFOPpvP15g+ZgSfTZt31AUUH/Rxtnz3H+A/Sv1Snw2/amp//omibc+AEkTaA8KUeOLQ==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/stack-utils": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", @@ -1652,6 +1670,7 @@ "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", "dev": true, "license": "BSD-2-Clause", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", @@ -1765,9 +1784,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz", + "integrity": "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==", "dev": true, "license": "MIT", "dependencies": { @@ -1863,6 +1882,7 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.0.tgz", "integrity": "sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w==", "dev": true, + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -2203,11 +2223,42 @@ "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", "dev": true }, + "node_modules/base64-arraybuffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", + "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/bloom-filters": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/bloom-filters/-/bloom-filters-3.0.4.tgz", + "integrity": "sha512-BdnPWo2OpYhlvuP2fRzJBdioMCkm7Zp0HCf8NJgF5Mbyqy7VQ/CnTiVWMMyq4EZCBHwj0Kq6098gW2/3RsZsrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/seedrandom": "^3.0.8", + "base64-arraybuffer": "^1.0.2", + "is-buffer": "^2.0.5", + "lodash": "^4.17.21", + "long": "^5.2.0", + "reflect-metadata": "^0.1.13", + "seedrandom": "^3.0.5", + "xxhashjs": "^0.2.2" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.14.tgz", + "integrity": "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2250,6 +2301,7 @@ "url": "https://github.com/sponsors/ai" } ], + "peer": true, "dependencies": { "caniuse-lite": "^1.0.30001663", "electron-to-chromium": "^1.5.28", @@ -2514,6 +2566,13 @@ "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", "dev": true }, + "node_modules/cuint": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/cuint/-/cuint-0.2.2.tgz", + "integrity": "sha512-d4ZVpCW31eWwCMe1YT3ur7mUDnTXbgwyzaL320DrcRT45rfjYxkt5QWLrmOJ+/UEAI2+fQgKe/fCjR8l4TpRgw==", + "dev": true, + "license": "MIT" + }, "node_modules/data-urls": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", @@ -2900,6 +2959,7 @@ "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -4284,6 +4344,30 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-buffer": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/is-callable": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", @@ -4605,6 +4689,7 @@ "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", "integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==", "dev": true, + "peer": true, "dependencies": { "@jest/core": "^27.5.1", "import-local": "^3.0.2", @@ -6515,6 +6600,13 @@ "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", "dev": true }, + "node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -6661,6 +6753,7 @@ "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", "dev": true, + "peer": true, "dependencies": { "whatwg-url": "^5.0.0" }, @@ -7140,6 +7233,13 @@ "node": ">=4.0.0" } }, + "node_modules/reflect-metadata": { + "version": "0.1.14", + "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.14.tgz", + "integrity": "sha512-ZhYeb6nRaXCfhnndflDK8qI6ZQ/YcWZCISRAWICW9XYqMUwjZM9Z0DveWX/ABN01oxSHwVxKQmxeYZSsm0jh5A==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -7284,6 +7384,13 @@ "node": ">=10" } }, + "node_modules/seedrandom": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz", + "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==", + "dev": true, + "license": "MIT" + }, "node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -7792,6 +7899,7 @@ "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -8103,6 +8211,16 @@ "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, + "node_modules/xxhashjs": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/xxhashjs/-/xxhashjs-0.2.2.tgz", + "integrity": "sha512-AkTuIuVTET12tpsVIQo+ZU6f/qDmKuRUcjaqR+OIvm+aCBsZ95i7UVY5WJ9TMsSaZ0DA2WxoZ4acu0sPH+OKAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cuint": "^0.2.2" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/package.json b/package.json index fdac034d..8248eec7 100644 --- a/package.json +++ b/package.json @@ -49,11 +49,19 @@ "tslib": "^2.3.1" }, "peerDependencies": { - "ioredis": "^4.28.0 || ^5.0.0" + "bloom-filters": "^3.0.0", + "ioredis": "^4.28.0 || ^5.0.0", + "node-fetch": "^2.7.0" }, "peerDependenciesMeta": { + "bloom-filters": { + "optional": true + }, "ioredis": { "optional": true + }, + "node-fetch": { + "optional": true } }, "devDependencies": { @@ -62,6 +70,7 @@ "@types/lodash": "^4.14.162", "@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/parser": "^7.18.0", + "bloom-filters": "^3.0.4", "cross-env": "^7.0.2", "eslint": "^8.56.0", "eslint-plugin-compat": "^6.0.1", diff --git a/src/dtos/types.ts b/src/dtos/types.ts index 2b0ee4ef..b2fc6068 100644 --- a/src/dtos/types.ts +++ b/src/dtos/types.ts @@ -2,8 +2,6 @@ import SplitIO from '../../types/splitio'; export type MaybeThenable = T | Promise -/** Split Matchers */ - export type IMatcherDataType = null | 'DATETIME' | 'NUMBER' export interface IUnaryNumericMatcherData { @@ -39,7 +37,7 @@ export interface IDependencyMatcherData { treatments: string[] } -interface ISplitMatcherBase { +interface IDefinitionMatcherBase { matcherType: string negate?: boolean keySelector?: null | { @@ -57,144 +55,143 @@ interface ISplitMatcherBase { betweenStringMatcherData?: null | IBetweenStringMatcherData } -interface IAllKeysMatcher extends ISplitMatcherBase { +interface IAllKeysMatcher extends IDefinitionMatcherBase { matcherType: 'ALL_KEYS' } -interface IInSegmentMatcher extends ISplitMatcherBase { +interface IInSegmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: IInSegmentMatcherData } -interface IInRBSegmentMatcher extends ISplitMatcherBase { +interface IInRBSegmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_RULE_BASED_SEGMENT', userDefinedSegmentMatcherData: IInSegmentMatcherData } -interface IInLargeSegmentMatcher extends ISplitMatcherBase { +interface IInLargeSegmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_LARGE_SEGMENT', userDefinedLargeSegmentMatcherData: IInLargeSegmentMatcherData } -interface IWhitelistMatcher extends ISplitMatcherBase { +interface IWhitelistMatcher extends IDefinitionMatcherBase { matcherType: 'WHITELIST', whitelistMatcherData: IWhitelistMatcherData } -interface IEqualToMatcher extends ISplitMatcherBase { +interface IEqualToMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO', unaryNumericMatcherData: IUnaryNumericMatcherData } -interface IGreaterThanOrEqualToMatcher extends ISplitMatcherBase { +interface IGreaterThanOrEqualToMatcher extends IDefinitionMatcherBase { matcherType: 'GREATER_THAN_OR_EQUAL_TO', unaryNumericMatcherData: IUnaryNumericMatcherData } -interface ILessThanOrEqualToMatcher extends ISplitMatcherBase { +interface ILessThanOrEqualToMatcher extends IDefinitionMatcherBase { matcherType: 'LESS_THAN_OR_EQUAL_TO', unaryNumericMatcherData: IUnaryNumericMatcherData } -interface IBetweenMatcher extends ISplitMatcherBase { +interface IBetweenMatcher extends IDefinitionMatcherBase { matcherType: 'BETWEEN' betweenMatcherData: IBetweenMatcherData } -interface IEqualToSetMatcher extends ISplitMatcherBase { +interface IEqualToSetMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IContainsAnyOfSetMatcher extends ISplitMatcherBase { +interface IContainsAnyOfSetMatcher extends IDefinitionMatcherBase { matcherType: 'CONTAINS_ANY_OF_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IContainsAllOfSetMatcher extends ISplitMatcherBase { +interface IContainsAllOfSetMatcher extends IDefinitionMatcherBase { matcherType: 'CONTAINS_ALL_OF_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IPartOfSetMatcher extends ISplitMatcherBase { +interface IPartOfSetMatcher extends IDefinitionMatcherBase { matcherType: 'PART_OF_SET', whitelistMatcherData: IWhitelistMatcherData } -interface IStartsWithMatcher extends ISplitMatcherBase { +interface IStartsWithMatcher extends IDefinitionMatcherBase { matcherType: 'STARTS_WITH', whitelistMatcherData: IWhitelistMatcherData } -interface IEndsWithMatcher extends ISplitMatcherBase { +interface IEndsWithMatcher extends IDefinitionMatcherBase { matcherType: 'ENDS_WITH', whitelistMatcherData: IWhitelistMatcherData } -interface IContainsStringMatcher extends ISplitMatcherBase { +interface IContainsStringMatcher extends IDefinitionMatcherBase { matcherType: 'CONTAINS_STRING', whitelistMatcherData: IWhitelistMatcherData } -interface IInSplitTreatmentMatcher extends ISplitMatcherBase { +interface IInSplitTreatmentMatcher extends IDefinitionMatcherBase { matcherType: 'IN_SPLIT_TREATMENT', dependencyMatcherData: IDependencyMatcherData, } -interface IEqualToBooleanMatcher extends ISplitMatcherBase { +interface IEqualToBooleanMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO_BOOLEAN', booleanMatcherData: boolean } -interface IMatchesStringMatcher extends ISplitMatcherBase { +interface IMatchesStringMatcher extends IDefinitionMatcherBase { matcherType: 'MATCHES_STRING', stringMatcherData: string } -interface IEqualToSemverMatcher extends ISplitMatcherBase { +interface IEqualToSemverMatcher extends IDefinitionMatcherBase { matcherType: 'EQUAL_TO_SEMVER', stringMatcherData: string } -interface IGreaterThanOrEqualToSemverMatcher extends ISplitMatcherBase { +interface IGreaterThanOrEqualToSemverMatcher extends IDefinitionMatcherBase { matcherType: 'GREATER_THAN_OR_EQUAL_TO_SEMVER', stringMatcherData: string } -interface ILessThanOrEqualToSemverMatcher extends ISplitMatcherBase { +interface ILessThanOrEqualToSemverMatcher extends IDefinitionMatcherBase { matcherType: 'LESS_THAN_OR_EQUAL_TO_SEMVER', stringMatcherData: string } -interface IBetweenSemverMatcher extends ISplitMatcherBase { +interface IBetweenSemverMatcher extends IDefinitionMatcherBase { matcherType: 'BETWEEN_SEMVER' betweenStringMatcherData: IBetweenStringMatcherData } -interface IInListSemverMatcher extends ISplitMatcherBase { +interface IInListSemverMatcher extends IDefinitionMatcherBase { matcherType: 'IN_LIST_SEMVER', whitelistMatcherData: IWhitelistMatcherData } -export type ISplitMatcher = IAllKeysMatcher | IInSegmentMatcher | IWhitelistMatcher | IEqualToMatcher | IGreaterThanOrEqualToMatcher | +export type IDefinitionMatcher = IAllKeysMatcher | IInSegmentMatcher | IWhitelistMatcher | IEqualToMatcher | IGreaterThanOrEqualToMatcher | ILessThanOrEqualToMatcher | IBetweenMatcher | IEqualToSetMatcher | IContainsAnyOfSetMatcher | IContainsAllOfSetMatcher | IPartOfSetMatcher | IStartsWithMatcher | IEndsWithMatcher | IContainsStringMatcher | IInSplitTreatmentMatcher | IEqualToBooleanMatcher | IMatchesStringMatcher | IEqualToSemverMatcher | IGreaterThanOrEqualToSemverMatcher | ILessThanOrEqualToSemverMatcher | IBetweenSemverMatcher | IInListSemverMatcher | IInLargeSegmentMatcher | IInRBSegmentMatcher -/** Split object */ -export interface ISplitPartition { +export interface IDefinitionPartition { treatment: string size: number } -export interface ISplitCondition { +export interface IDefinitionCondition { matcherGroup: { combiner: 'AND', - matchers: ISplitMatcher[] + matchers: IDefinitionMatcher[] } - partitions?: ISplitPartition[] + partitions?: IDefinitionPartition[] label?: string conditionType?: 'ROLLOUT' | 'WHITELIST' } @@ -204,48 +201,44 @@ export interface IExcludedSegment { name: string, } -export interface IRBSegment { - name: string, - changeNumber: number, - status?: 'ACTIVE' | 'ARCHIVED', - conditions?: ISplitCondition[] | null, +export interface TargetingEntity { + name: string; + changeNumber: number; + status: 'ACTIVE' | 'ARCHIVED'; + conditions: IDefinitionCondition[]; +} + +export interface IRBSegment extends TargetingEntity { excluded?: { keys?: string[] | null, segments?: IExcludedSegment[] | null } | null } -export interface ISplit { - name: string, - changeNumber: number, - status?: 'ACTIVE' | 'ARCHIVED', - conditions: ISplitCondition[], +export interface IDefinition extends TargetingEntity { + trafficTypeName: string; + sets?: string[]; + impressionsDisabled?: boolean; prerequisites?: null | { n: string, ts: string[] - }[] - killed: boolean, - defaultTreatment: string, - trafficTypeName: string, - seed: number, - trafficAllocation?: number, - trafficAllocationSeed?: number + }[]; + killed: boolean; + defaultTreatment: string; + seed: number; + trafficAllocation?: number; + trafficAllocationSeed?: number; configurations?: { - [treatmentName: string]: string - }, - sets?: string[], - impressionsDisabled?: boolean + [treatmentName: string]: string | SplitIO.JsonObject + }; } -// Split definition used in offline mode -export type ISplitPartial = Pick - /** Interface of the parsed JSON response of `/splitChanges` */ -export interface ISplitChangesResponse { +export interface IDefinitionChangesResponse { ff?: { t: number, s?: number, - d: ISplit[] + d: IDefinition[] }, rbs?: { t: number, diff --git a/src/evaluator/Engine.ts b/src/evaluator/Engine.ts index 4228316f..3ef683e5 100644 --- a/src/evaluator/Engine.ts +++ b/src/evaluator/Engine.ts @@ -4,10 +4,10 @@ import { keyParser } from '../utils/key'; import { thenable } from '../utils/promise/thenable'; import { NO_CONDITION_MATCH, SPLIT_ARCHIVED, SPLIT_KILLED, PREREQUISITES_NOT_MET } from '../utils/labels'; import { CONTROL } from '../utils/constants'; -import { ISplit, MaybeThenable } from '../dtos/types'; +import { IDefinition, MaybeThenable } from '../dtos/types'; import SplitIO from '../../types/splitio'; import { IStorageAsync, IStorageSync } from '../storages/types'; -import { IEvaluation, IEvaluationResult, ISplitEvaluator } from './types'; +import { IEvaluation, IEvaluationResult, IDefinitionEvaluator } from './types'; import { ILogger } from '../logger/types'; import { ENGINE_DEFAULT } from '../logger/constants'; import { prerequisitesMatcherContext } from './matchers/prerequisites'; @@ -19,7 +19,7 @@ function evaluationResult(result: IEvaluation | undefined, defaultTreatment: str }; } -export function engineParser(log: ILogger, split: ISplit, storage: IStorageSync | IStorageAsync) { +export function engineParser(log: ILogger, split: IDefinition, storage: IStorageSync | IStorageAsync) { const { killed, seed, trafficAllocation, trafficAllocationSeed, status, conditions, prerequisites } = split; const defaultTreatment = isString(split.defaultTreatment) ? split.defaultTreatment : CONTROL; @@ -29,7 +29,7 @@ export function engineParser(log: ILogger, split: ISplit, storage: IStorageSync return { - getTreatment(key: SplitIO.SplitKey, attributes: SplitIO.Attributes | undefined, splitEvaluator: ISplitEvaluator): MaybeThenable { + getTreatment(key: SplitIO.SplitKey, attributes: SplitIO.Attributes | undefined, splitEvaluator: IDefinitionEvaluator): MaybeThenable { const parsedKey = keyParser(key); diff --git a/src/evaluator/__tests__/evaluate-feature.spec.ts b/src/evaluator/__tests__/evaluate-feature.spec.ts index 85db31e7..500bf5ca 100644 --- a/src/evaluator/__tests__/evaluate-feature.spec.ts +++ b/src/evaluator/__tests__/evaluate-feature.spec.ts @@ -1,10 +1,10 @@ import { evaluateFeature } from '../index'; -import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, SPLIT_NOT_FOUND } from '../../utils/labels'; +import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, DEFINITION_NOT_FOUND } from '../../utils/labels'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { ISplit } from '../../dtos/types'; +import { IDefinition } from '../../dtos/types'; import { IStorageSync } from '../../storages/types'; -const splitsMock: Record = { +const splitsMock: Record = { regular: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, config: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': { 'on': "{color:'black'}" }, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, killed: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on2', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': true, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, @@ -53,7 +53,7 @@ test('EVALUATOR / should return right label, treatment and config if storage ret config: '{color:\'black\'}', changeNumber: 1487277320548 }; const expectedOutputControl = { - treatment: 'control', label: SPLIT_NOT_FOUND, config: null + treatment: 'control', label: DEFINITION_NOT_FOUND, config: null }; const evaluationWithConfig = evaluateFeature( diff --git a/src/evaluator/__tests__/evaluate-features.spec.ts b/src/evaluator/__tests__/evaluate-features.spec.ts index 45832bd0..d4225cc2 100644 --- a/src/evaluator/__tests__/evaluate-features.spec.ts +++ b/src/evaluator/__tests__/evaluate-features.spec.ts @@ -1,11 +1,11 @@ import { evaluateFeatures, evaluateFeaturesByFlagSets } from '../index'; -import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, SPLIT_NOT_FOUND } from '../../utils/labels'; +import { EXCEPTION, NOT_IN_SPLIT, SPLIT_ARCHIVED, SPLIT_KILLED, DEFINITION_NOT_FOUND } from '../../utils/labels'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../../logger/constants'; -import { ISplit } from '../../dtos/types'; +import { IDefinition } from '../../dtos/types'; import { IStorageSync } from '../../storages/types'; -const splitsMock: Record = { +const splitsMock: Record = { regular: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, config: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on', 'seed': 1684183541, 'configurations': { 'on': "{color:'black'}" }, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, killed: { 'changeNumber': 1487277320548, 'trafficAllocationSeed': 1667452163, 'trafficAllocation': 100, 'trafficTypeName': 'user', 'name': 'always-on2', 'seed': 1684183541, 'configurations': {}, 'status': 'ACTIVE', 'killed': true, 'defaultTreatment': 'off', 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': '' }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': { 'segmentName': '' }, 'unaryNumericMatcherData': { 'dataType': null, 'value': 0 }, 'whitelistMatcherData': { 'whitelist': null }, 'betweenMatcherData': { 'dataType': null, 'start': 0, 'end': 0 } }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }] }, @@ -33,7 +33,7 @@ const mockStorage = { return names.reduce((acc, name) => { acc[name] = this.getSplit(name); return acc; - }, {} as Record); + }, {} as Record); }, getNamesByFlagSets(flagSets: string[]) { return flagSets.map(flagset => flagSetsMock[flagset] || new Set()); @@ -71,7 +71,7 @@ test('EVALUATOR - Multiple evaluations at once / should return right labels, tre config: '{color:\'black\'}', changeNumber: 1487277320548 }, not_existent_split: { - treatment: 'control', label: SPLIT_NOT_FOUND, config: null + treatment: 'control', label: DEFINITION_NOT_FOUND, config: null }, }; @@ -122,7 +122,7 @@ describe('EVALUATOR - Multiple evaluations at once by flag sets', () => { config: '{color:\'black\'}', changeNumber: 1487277320548 }, not_existent_split: { - treatment: 'control', label: SPLIT_NOT_FOUND, config: null + treatment: 'control', label: DEFINITION_NOT_FOUND, config: null }, }; diff --git a/src/evaluator/combiners/and.ts b/src/evaluator/combiners/and.ts index fd239753..a2547c25 100644 --- a/src/evaluator/combiners/and.ts +++ b/src/evaluator/combiners/and.ts @@ -2,11 +2,11 @@ import { findIndex } from '../../utils/lang'; import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; import { MaybeThenable } from '../../dtos/types'; -import { ISplitEvaluator } from '../types'; +import { IDefinitionEvaluator } from '../types'; import { ENGINE_COMBINER_AND } from '../../logger/constants'; import SplitIO from '../../../types/splitio'; -export function andCombinerContext(log: ILogger, matchers: Array<(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => MaybeThenable>) { +export function andCombinerContext(log: ILogger, matchers: Array<(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => MaybeThenable>) { function andResults(results: boolean[]): boolean { // Array.prototype.every is supported by target environments @@ -16,7 +16,7 @@ export function andCombinerContext(log: ILogger, matchers: Array<(key: SplitIO.S return hasMatchedAll; } - return function andCombiner(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator): MaybeThenable { + return function andCombiner(key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator): MaybeThenable { const matcherResults = matchers.map(matcher => matcher(key, attributes, splitEvaluator)); // If any matching result is a thenable we should use Promise.all diff --git a/src/evaluator/combiners/ifelseif.ts b/src/evaluator/combiners/ifelseif.ts index aaba4b27..a28013c6 100644 --- a/src/evaluator/combiners/ifelseif.ts +++ b/src/evaluator/combiners/ifelseif.ts @@ -4,7 +4,7 @@ import { thenable } from '../../utils/promise/thenable'; import { UNSUPPORTED_MATCHER_TYPE } from '../../utils/labels'; import { CONTROL } from '../../utils/constants'; import SplitIO from '../../../types/splitio'; -import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; +import { IEvaluation, IEvaluator, IDefinitionEvaluator } from '../types'; import { ENGINE_COMBINER_IFELSEIF, ENGINE_COMBINER_IFELSEIF_NO_TREATMENT, ERROR_ENGINE_COMBINER_IFELSEIF } from '../../logger/constants'; export function ifElseIfCombinerContext(log: ILogger, predicates: IEvaluator[]): IEvaluator { @@ -33,7 +33,7 @@ export function ifElseIfCombinerContext(log: ILogger, predicates: IEvaluator[]): return undefined; } - function ifElseIfCombiner(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) { + function ifElseIfCombiner(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) { // In Async environments we are going to have async predicates. There is none way to know // before hand so we need to evaluate all the predicates, verify for thenables, and finally, // define how to return the treatment (wrap result into a Promise or not). diff --git a/src/evaluator/condition/index.ts b/src/evaluator/condition/index.ts index 5facaa5c..996a38f1 100644 --- a/src/evaluator/condition/index.ts +++ b/src/evaluator/condition/index.ts @@ -2,7 +2,7 @@ import { getTreatment, shouldApplyRollout } from './engineUtils'; import { thenable } from '../../utils/promise/thenable'; import { NOT_IN_SPLIT } from '../../utils/labels'; import { MaybeThenable } from '../../dtos/types'; -import { IEvaluation, IEvaluator, ISplitEvaluator } from '../types'; +import { IEvaluation, IEvaluator, IDefinitionEvaluator } from '../types'; import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; @@ -22,9 +22,9 @@ function match(log: ILogger, matchingResult: boolean, bucketingKey: string | und } // Condition factory -export function conditionContext(log: ILogger, matcherEvaluator: (key: SplitIO.SplitKeyObject, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => MaybeThenable, treatments?: { getTreatmentFor: (x: number) => string }, label?: string, conditionType?: 'ROLLOUT' | 'WHITELIST'): IEvaluator { +export function conditionContext(log: ILogger, matcherEvaluator: (key: SplitIO.SplitKeyObject, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => MaybeThenable, treatments?: { getTreatmentFor: (x: number) => string }, label?: string, conditionType?: 'ROLLOUT' | 'WHITELIST'): IEvaluator { - return function conditionEvaluator(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) { + return function conditionEvaluator(key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) { // Whitelisting has more priority than traffic allocation, so we don't apply this filtering to those conditions. if (conditionType === 'ROLLOUT' && !shouldApplyRollout(trafficAllocation!, key.bucketingKey, trafficAllocationSeed!)) { diff --git a/src/evaluator/fallbackConfigsCalculator/__tests__/fallback-calculator.spec.ts b/src/evaluator/fallbackConfigsCalculator/__tests__/fallback-calculator.spec.ts new file mode 100644 index 00000000..239185fb --- /dev/null +++ b/src/evaluator/fallbackConfigsCalculator/__tests__/fallback-calculator.spec.ts @@ -0,0 +1,50 @@ +import { FallbackConfigsCalculator } from '../'; +import SplitIO from '../../../../types/splitio'; +import { CONTROL } from '../../../utils/constants'; + +describe('FallbackConfigsCalculator', () => { + test('returns specific fallback if config name exists', () => { + const fallbacks: SplitIO.FallbackConfigs = { + byName: { + 'configA': { variant: 'VARIANT_A', value: { key: 1 } }, + }, + }; + const calculator = FallbackConfigsCalculator(fallbacks); + const result = calculator('configA', 'label by name'); + + expect(result).toEqual({ + treatment: 'VARIANT_A', + config: { key: 1 }, + label: 'fallback - label by name', + }); + }); + + test('returns global fallback if config name is missing and global exists', () => { + const fallbacks: SplitIO.FallbackConfigs = { + byName: {}, + global: { variant: 'GLOBAL_VARIANT', value: { global: true } }, + }; + const calculator = FallbackConfigsCalculator(fallbacks); + const result = calculator('missingConfig', 'label by global'); + + expect(result).toEqual({ + treatment: 'GLOBAL_VARIANT', + config: { global: true }, + label: 'fallback - label by global', + }); + }); + + test('returns control fallback if config name and global are missing', () => { + const fallbacks: SplitIO.FallbackConfigs = { + byName: {}, + }; + const calculator = FallbackConfigsCalculator(fallbacks); + const result = calculator('missingConfig', 'label by noFallback'); + + expect(result).toEqual({ + treatment: CONTROL, + config: null, + label: 'label by noFallback', + }); + }); +}); diff --git a/src/evaluator/fallbackConfigsCalculator/__tests__/fallback-sanitizer.spec.ts b/src/evaluator/fallbackConfigsCalculator/__tests__/fallback-sanitizer.spec.ts new file mode 100644 index 00000000..12e5807b --- /dev/null +++ b/src/evaluator/fallbackConfigsCalculator/__tests__/fallback-sanitizer.spec.ts @@ -0,0 +1,147 @@ +import { isValidConfigName, isValidConfig, sanitizeFallbacks } from '../fallbackSanitizer'; +import SplitIO from '../../../../types/splitio'; +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; + +describe('FallbackConfigsSanitizer', () => { + const validConfig: SplitIO.Config = { variant: 'on', value: { color: 'blue' } }; + const invalidVariantConfig: SplitIO.Config = { variant: ' ', value: { color: 'blue' } }; + const invalidValueConfig = { variant: 'on', value: 'not_an_object' } as unknown as SplitIO.Config; + const fallbackMock = { + global: undefined, + byName: {} + }; + + beforeEach(() => { + loggerMock.mockClear(); + }); + + describe('isValidConfigName', () => { + test('returns true for a valid config name', () => { + expect(isValidConfigName('my_config')).toBe(true); + }); + + test('returns false for a name longer than 100 chars', () => { + const longName = 'a'.repeat(101); + expect(isValidConfigName(longName)).toBe(false); + }); + + test('returns false if the name contains spaces', () => { + expect(isValidConfigName('invalid config')).toBe(false); + }); + + test('returns false if the name is not a string', () => { + // @ts-ignore + expect(isValidConfigName(true)).toBe(false); + }); + }); + + describe('isValidConfig', () => { + test('returns true for a valid config', () => { + expect(isValidConfig(validConfig)).toBe(true); + }); + + test('returns false for null or undefined', () => { + expect(isValidConfig()).toBe(false); + expect(isValidConfig(undefined)).toBe(false); + }); + + test('returns false for a variant longer than 100 chars', () => { + const long: SplitIO.Config = { variant: 'a'.repeat(101), value: {} }; + expect(isValidConfig(long)).toBe(false); + }); + + test('returns false if variant does not match regex pattern', () => { + const invalid: SplitIO.Config = { variant: 'invalid variant!', value: {} }; + expect(isValidConfig(invalid)).toBe(false); + }); + + test('returns false if value is not an object', () => { + expect(isValidConfig(invalidValueConfig)).toBe(false); + }); + }); + + describe('sanitizeGlobal', () => { + test('returns the config if valid', () => { + expect(sanitizeFallbacks(loggerMock, { ...fallbackMock, global: validConfig })).toEqual({ ...fallbackMock, global: validConfig }); + expect(loggerMock.error).not.toHaveBeenCalled(); + }); + + test('returns undefined and logs error if variant is invalid', () => { + const result = sanitizeFallbacks(loggerMock, { ...fallbackMock, global: invalidVariantConfig }); + expect(result).toEqual(fallbackMock); + expect(loggerMock.error).toHaveBeenCalledWith( + expect.stringContaining('Fallback configs - Discarded fallback') + ); + }); + + test('returns undefined and logs error if value is invalid', () => { + const result = sanitizeFallbacks(loggerMock, { ...fallbackMock, global: invalidValueConfig }); + expect(result).toEqual(fallbackMock); + expect(loggerMock.error).toHaveBeenCalledWith( + expect.stringContaining('Fallback configs - Discarded fallback') + ); + }); + }); + + describe('sanitizeByName', () => { + test('returns a sanitized map with valid entries only', () => { + const input = { + valid_config: validConfig, + 'invalid config': validConfig, + bad_variant: invalidVariantConfig, + }; + + const result = sanitizeFallbacks(loggerMock, { ...fallbackMock, byName: input }); + + expect(result).toEqual({ ...fallbackMock, byName: { valid_config: validConfig } }); + expect(loggerMock.error).toHaveBeenCalledTimes(2); // invalid config name + bad_variant + }); + + test('returns empty object if all invalid', () => { + const input = { + 'invalid config': invalidVariantConfig, + }; + + const result = sanitizeFallbacks(loggerMock, { ...fallbackMock, byName: input }); + expect(result).toEqual(fallbackMock); + expect(loggerMock.error).toHaveBeenCalled(); + }); + + test('returns same object if all valid', () => { + const input = { + ...fallbackMock, + byName: { + config_one: validConfig, + config_two: { variant: 'valid_2', value: { key: 'val' } }, + } + }; + + const result = sanitizeFallbacks(loggerMock, input); + expect(result).toEqual(input); + expect(loggerMock.error).not.toHaveBeenCalled(); + }); + }); + + describe('sanitizeFallbacks', () => { + test('returns undefined and logs error if fallbacks is not an object', () => { // @ts-expect-error + const result = sanitizeFallbacks(loggerMock, 'invalid_fallbacks'); + expect(result).toBeUndefined(); + expect(loggerMock.error).toHaveBeenCalledWith( + 'Fallback configs - Discarded configuration: it must be an object with optional `global` and `byName` properties' + ); + }); + + test('returns undefined and logs error if fallbacks is not an object', () => { // @ts-expect-error + const result = sanitizeFallbacks(loggerMock, true); + expect(result).toBeUndefined(); + expect(loggerMock.error).toHaveBeenCalledWith( + 'Fallback configs - Discarded configuration: it must be an object with optional `global` and `byName` properties' + ); + }); + + test('sanitizes both global and byName fallbacks for empty object', () => { // @ts-expect-error + const result = sanitizeFallbacks(loggerMock, { global: {} }); + expect(result).toEqual({ global: undefined, byName: {} }); + }); + }); +}); diff --git a/src/evaluator/fallbackConfigsCalculator/fallbackSanitizer/index.ts b/src/evaluator/fallbackConfigsCalculator/fallbackSanitizer/index.ts new file mode 100644 index 00000000..d4ba9754 --- /dev/null +++ b/src/evaluator/fallbackConfigsCalculator/fallbackSanitizer/index.ts @@ -0,0 +1,76 @@ +import SplitIO from '../../../../types/splitio'; +import { ILogger } from '../../../logger/types'; +import { isObject, isString } from '../../../utils/lang'; + +const CONFIG_NAME_DISCARD_REASON = 'Invalid config name (max 100 chars, no spaces)'; +const VARIANT_DISCARD_REASON = 'Invalid variant (max 100 chars and must match pattern)'; +const VALUE_DISCARD_REASON = 'Invalid value (must be an object)'; + +const VARIANT_PATTERN = /^[0-9]+[.a-zA-Z0-9_-]*$|^[a-zA-Z]+[a-zA-Z0-9_-]*$/; + +export function isValidConfigName(name: string): boolean { + return name.length <= 100 && !name.includes(' '); +} + +export function isValidConfig(config?: SplitIO.Config): boolean { + if (!isObject(config)) return false; + if (!isString(config!.variant) || config!.variant.length > 100 || !VARIANT_PATTERN.test(config!.variant)) return false; + if (!isObject(config!.value)) return false; + return true; +} + +function sanitizeGlobal(logger: ILogger, config?: SplitIO.Config): SplitIO.Config | undefined { + if (config === undefined) return undefined; + if (!isValidConfig(config)) { + if (!isObject(config) || !isString(config!.variant) || config!.variant.length > 100 || !VARIANT_PATTERN.test(config!.variant)) { + logger.error(`Fallback configs - Discarded fallback: ${VARIANT_DISCARD_REASON}`); + } else { + logger.error(`Fallback configs - Discarded fallback: ${VALUE_DISCARD_REASON}`); + } + return undefined; + } + return config; +} + +function sanitizeByName( + logger: ILogger, + byNameFallbacks?: Record +): Record { + const sanitizedByName: Record = {}; + + if (!isObject(byNameFallbacks)) return sanitizedByName; + + Object.keys(byNameFallbacks!).forEach((configName) => { + const config = byNameFallbacks![configName]; + + if (!isValidConfigName(configName)) { + logger.error(`Fallback configs - Discarded config '${configName}': ${CONFIG_NAME_DISCARD_REASON}`); + return; + } + + if (!isValidConfig(config)) { + if (!isObject(config) || !isString(config.variant) || config.variant.length > 100 || !VARIANT_PATTERN.test(config.variant)) { + logger.error(`Fallback configs - Discarded config '${configName}': ${VARIANT_DISCARD_REASON}`); + } else { + logger.error(`Fallback configs - Discarded config '${configName}': ${VALUE_DISCARD_REASON}`); + } + return; + } + + sanitizedByName[configName] = config; + }); + + return sanitizedByName; +} + +export function sanitizeFallbacks(logger: ILogger, fallbacks: SplitIO.FallbackConfigs): SplitIO.FallbackConfigs | undefined { + if (!isObject(fallbacks)) { + logger.error('Fallback configs - Discarded configuration: it must be an object with optional `global` and `byName` properties'); + return; + } + + return { + global: sanitizeGlobal(logger, fallbacks.global), + byName: sanitizeByName(logger, fallbacks.byName) + }; +} diff --git a/src/evaluator/fallbackConfigsCalculator/index.ts b/src/evaluator/fallbackConfigsCalculator/index.ts new file mode 100644 index 00000000..fa80e9bd --- /dev/null +++ b/src/evaluator/fallbackConfigsCalculator/index.ts @@ -0,0 +1,24 @@ +import { IFallbackCalculator } from '../fallbackTreatmentsCalculator/index'; +import { CONTROL } from '../../utils/constants'; +import SplitIO from '../../../types/splitio'; + +export const FALLBACK_PREFIX = 'fallback - '; + +export function FallbackConfigsCalculator(fallbacks: SplitIO.FallbackConfigs = {}): IFallbackCalculator { + + return (configName: string, label = '') => { + const fallback = fallbacks.byName?.[configName] || fallbacks.global; + + return fallback ? + { + treatment: fallback.variant, + config: fallback.value, + label: `${FALLBACK_PREFIX}${label}`, + } : + { + treatment: CONTROL, + config: null, + label, + }; + }; +} diff --git a/src/evaluator/fallbackTreatmentsCalculator/fallbackSanitizer/index.ts b/src/evaluator/fallbackTreatmentsCalculator/fallbackSanitizer/index.ts index 95ac25e1..db8aab26 100644 --- a/src/evaluator/fallbackTreatmentsCalculator/fallbackSanitizer/index.ts +++ b/src/evaluator/fallbackTreatmentsCalculator/fallbackSanitizer/index.ts @@ -2,10 +2,8 @@ import { FallbackTreatmentConfiguration, Treatment, TreatmentWithConfig } from ' import { ILogger } from '../../../logger/types'; import { isObject, isString } from '../../../utils/lang'; -enum FallbackDiscardReason { - FlagName = 'Invalid flag name (max 100 chars, no spaces)', - Treatment = 'Invalid treatment (max 100 chars and must match pattern)', -} +const FLAG_NAME_DISCARD_REASON = 'Invalid flag name (max 100 chars, no spaces)'; +const TREATMENT_DISCARD_REASON = 'Invalid treatment (max 100 chars and must match pattern)'; const TREATMENT_PATTERN = /^[0-9]+[.a-zA-Z0-9_-]*$|^[a-zA-Z]+[a-zA-Z0-9_-]*$/; @@ -25,7 +23,7 @@ export function isValidTreatment(t?: Treatment | TreatmentWithConfig): boolean { function sanitizeGlobal(logger: ILogger, treatment?: Treatment | TreatmentWithConfig): Treatment | TreatmentWithConfig | undefined { if (treatment === undefined) return undefined; if (!isValidTreatment(treatment)) { - logger.error(`Fallback treatments - Discarded fallback: ${FallbackDiscardReason.Treatment}`); + logger.error(`Fallback treatments - Discarded fallback: ${TREATMENT_DISCARD_REASON}`); return undefined; } return treatment; @@ -43,12 +41,12 @@ function sanitizeByFlag( const t = byFlagFallbacks![flag]; if (!isValidFlagName(flag)) { - logger.error(`Fallback treatments - Discarded flag '${flag}': ${FallbackDiscardReason.FlagName}`); + logger.error(`Fallback treatments - Discarded flag '${flag}': ${FLAG_NAME_DISCARD_REASON}`); return; } if (!isValidTreatment(t)) { - logger.error(`Fallback treatments - Discarded treatment for flag '${flag}': ${FallbackDiscardReason.Treatment}`); + logger.error(`Fallback treatments - Discarded treatment for flag '${flag}': ${TREATMENT_DISCARD_REASON}`); return; } diff --git a/src/evaluator/fallbackTreatmentsCalculator/index.ts b/src/evaluator/fallbackTreatmentsCalculator/index.ts index 5c2b4663..fb213d25 100644 --- a/src/evaluator/fallbackTreatmentsCalculator/index.ts +++ b/src/evaluator/fallbackTreatmentsCalculator/index.ts @@ -1,12 +1,16 @@ -import { FallbackTreatmentConfiguration, TreatmentWithConfig } from '../../../types/splitio'; import { CONTROL } from '../../utils/constants'; import { isString } from '../../utils/lang'; +import SplitIO from '../../../types/splitio'; -export type IFallbackTreatmentsCalculator = (flagName: string, label?: string) => TreatmentWithConfig & { label: string }; +export type IFallbackCalculator = (definitionName: string, label?: string) => { + treatment: string; + config: string | null | SplitIO.JsonObject; + label: string +}; export const FALLBACK_PREFIX = 'fallback - '; -export function FallbackTreatmentsCalculator(fallbacks: FallbackTreatmentConfiguration = {}): IFallbackTreatmentsCalculator { +export function FallbackTreatmentsCalculator(fallbacks: SplitIO.FallbackTreatmentConfiguration = {}): IFallbackCalculator { return (flagName: string, label = '') => { const fallback = fallbacks.byFlag?.[flagName] || fallbacks.global; diff --git a/src/evaluator/index.ts b/src/evaluator/index.ts index 574a8337..bc0a1e07 100644 --- a/src/evaluator/index.ts +++ b/src/evaluator/index.ts @@ -1,8 +1,8 @@ import { engineParser } from './Engine'; import { thenable } from '../utils/promise/thenable'; -import { EXCEPTION, SPLIT_NOT_FOUND } from '../utils/labels'; +import { EXCEPTION, NO_CONDITION_MATCH, DEFINITION_NOT_FOUND } from '../utils/labels'; import { CONTROL } from '../utils/constants'; -import { ISplit, MaybeThenable } from '../dtos/types'; +import { IDefinition, MaybeThenable } from '../dtos/types'; import { IStorageAsync, IStorageSync } from '../storages/types'; import { IEvaluationResult } from './types'; import SplitIO from '../../types/splitio'; @@ -10,16 +10,22 @@ import { ILogger } from '../logger/types'; import { returnSetsUnion, setToArray } from '../utils/lang/sets'; import { WARN_FLAGSET_WITHOUT_FLAGS } from '../logger/constants'; -const treatmentException = { +const EVALUATION_EXCEPTION = { treatment: CONTROL, label: EXCEPTION, config: null }; -function treatmentsException(splitNames: string[]) { +const EVALUATION_DEFINITION_NOT_FOUND = { + treatment: CONTROL, + label: DEFINITION_NOT_FOUND, + config: null +}; + +function treatmentsException(definitionNames: string[]) { const evaluations: Record = {}; - splitNames.forEach(splitName => { - evaluations[splitName] = treatmentException; + definitionNames.forEach(definitionName => { + evaluations[definitionName] = EVALUATION_EXCEPTION; }); return evaluations; } @@ -27,39 +33,39 @@ function treatmentsException(splitNames: string[]) { export function evaluateFeature( log: ILogger, key: SplitIO.SplitKey, - splitName: string, + definitionName: string, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions ): MaybeThenable { - let parsedSplit; + let definition; try { - parsedSplit = storage.splits.getSplit(splitName); + definition = storage.splits.getSplit(definitionName); } catch (e) { // Exception on sync `getSplit` storage. Not possible ATM with InMemory and InLocal storages. - return treatmentException; + return EVALUATION_EXCEPTION; } - if (thenable(parsedSplit)) { - return parsedSplit.then((split) => getEvaluation( + if (thenable(definition)) { + return definition.then((definition) => getEvaluation( log, key, - split, + definition, attributes, storage, options, )).catch( // Exception on async `getSplit` storage. For example, when the storage is redis or // pluggable and there is a connection issue and we can't retrieve the split to be evaluated - () => treatmentException + () => EVALUATION_EXCEPTION ); } return getEvaluation( log, key, - parsedSplit, + definition, attributes, storage, options, @@ -69,28 +75,28 @@ export function evaluateFeature( export function evaluateFeatures( log: ILogger, key: SplitIO.SplitKey, - splitNames: string[], + definitionNames: string[], attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, ): MaybeThenable> { - let parsedSplits; + let definitions; try { - parsedSplits = storage.splits.getSplits(splitNames); + definitions = storage.splits.getSplits(definitionNames); } catch (e) { // Exception on sync `getSplits` storage. Not possible ATM with InMemory and InLocal storages. - return treatmentsException(splitNames); + return treatmentsException(definitionNames); } - return thenable(parsedSplits) ? - parsedSplits.then(splits => getEvaluations(log, key, splitNames, splits, attributes, storage, options)) + return thenable(definitions) ? + definitions.then(definitions => getEvaluations(log, key, definitionNames, definitions, attributes, storage, options)) .catch(() => { // Exception on async `getSplits` storage. For example, when the storage is redis or // pluggable and there is a connection issue and we can't retrieve the split to be evaluated - return treatmentsException(splitNames); + return treatmentsException(definitionNames); }) : - getEvaluations(log, key, splitNames, parsedSplits, attributes, storage, options); + getEvaluations(log, key, definitionNames, definitions, attributes, storage, options); } export function evaluateFeaturesByFlagSets( @@ -140,70 +146,99 @@ export function evaluateFeaturesByFlagSets( function getEvaluation( log: ILogger, key: SplitIO.SplitKey, - splitJSON: ISplit | null, + definition: IDefinition | null, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, ): MaybeThenable { - let evaluation: MaybeThenable = { - treatment: CONTROL, - label: SPLIT_NOT_FOUND, - config: null - }; - if (splitJSON) { - const split = engineParser(log, splitJSON, storage); - evaluation = split.getTreatment(key, attributes, evaluateFeature); + if (definition) { + const split = engineParser(log, definition, storage); + const evaluation = split.getTreatment(key, attributes, evaluateFeature); // If the storage is async and the evaluated flag uses segments or dependencies, evaluation is thenable if (thenable(evaluation)) { return evaluation.then(result => { - result.changeNumber = splitJSON.changeNumber; - result.config = splitJSON.configurations && splitJSON.configurations[result.treatment] || null; + result.changeNumber = definition.changeNumber; + result.config = definition.configurations && definition.configurations[result.treatment] || null; // @ts-expect-error impressionsDisabled is not exposed in the public typings yet. - result.impressionsDisabled = options?.impressionsDisabled || splitJSON.impressionsDisabled; + result.impressionsDisabled = options?.impressionsDisabled || definition.impressionsDisabled; return result; }); } else { - evaluation.changeNumber = splitJSON.changeNumber; - evaluation.config = splitJSON.configurations && splitJSON.configurations[evaluation.treatment] || null; + evaluation.changeNumber = definition.changeNumber; + evaluation.config = definition.configurations && definition.configurations[evaluation.treatment] || null; // @ts-expect-error impressionsDisabled is not exposed in the public typings yet. - evaluation.impressionsDisabled = options?.impressionsDisabled || splitJSON.impressionsDisabled; + evaluation.impressionsDisabled = options?.impressionsDisabled || definition.impressionsDisabled; } + + return evaluation; } - return evaluation; + return EVALUATION_DEFINITION_NOT_FOUND; } function getEvaluations( log: ILogger, key: SplitIO.SplitKey, - splitNames: string[], - splits: Record, + definitionNames: string[], + splits: Record, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync, options?: SplitIO.EvaluationOptions, ): MaybeThenable> { const result: Record = {}; const thenables: Promise[] = []; - splitNames.forEach(splitName => { + definitionNames.forEach(definitionName => { const evaluation = getEvaluation( log, key, - splits[splitName], + splits[definitionName], attributes, storage, options ); if (thenable(evaluation)) { thenables.push(evaluation.then(res => { - result[splitName] = res; + result[definitionName] = res; })); } else { - result[splitName] = evaluation; + result[definitionName] = evaluation; } }); return thenables.length > 0 ? Promise.all(thenables).then(() => result) : result; } + +export function evaluateDefaultTreatment( + definitionName: string, + storage: IStorageSync | IStorageAsync, +): MaybeThenable { + let definition; + + try { + definition = storage.splits.getSplit(definitionName); + } catch (e) { + return EVALUATION_EXCEPTION; + } + + return thenable(definition) ? + definition.then(getDefaultTreatment).catch(() => EVALUATION_EXCEPTION) : + getDefaultTreatment(definition); +} + +function getDefaultTreatment( + definition: IDefinition | null, +): MaybeThenable { + if (definition) { + return { + treatment: definition.defaultTreatment, + label: NO_CONDITION_MATCH, // "default rule" + config: definition.configurations && definition.configurations[definition.defaultTreatment] || null, + changeNumber: definition.changeNumber + }; + } + + return EVALUATION_DEFINITION_NOT_FOUND; +} diff --git a/src/evaluator/matchers/__tests__/dependency.spec.ts b/src/evaluator/matchers/__tests__/dependency.spec.ts index 7cb184d6..c9463e18 100644 --- a/src/evaluator/matchers/__tests__/dependency.spec.ts +++ b/src/evaluator/matchers/__tests__/dependency.spec.ts @@ -4,10 +4,10 @@ import { evaluateFeature } from '../../index'; import { IMatcher, IMatcherDto } from '../../types'; import { IStorageSync } from '../../../storages/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { ALWAYS_ON_SPLIT, ALWAYS_OFF_SPLIT } from '../../../storages/__tests__/testUtils'; -const STORED_SPLITS: Record = { +const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT, 'always-off': ALWAYS_OFF_SPLIT }; diff --git a/src/evaluator/matchers/__tests__/prerequisites.spec.ts b/src/evaluator/matchers/__tests__/prerequisites.spec.ts index 2d67eefa..7b1a55cd 100644 --- a/src/evaluator/matchers/__tests__/prerequisites.spec.ts +++ b/src/evaluator/matchers/__tests__/prerequisites.spec.ts @@ -1,11 +1,11 @@ import { evaluateFeature } from '../../index'; import { IStorageSync } from '../../../storages/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { ALWAYS_ON_SPLIT, ALWAYS_OFF_SPLIT } from '../../../storages/__tests__/testUtils'; import { prerequisitesMatcherContext } from '../prerequisites'; -const STORED_SPLITS: Record = { +const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT, 'always-off': ALWAYS_OFF_SPLIT }; diff --git a/src/evaluator/matchers/__tests__/rbsegment.spec.ts b/src/evaluator/matchers/__tests__/rbsegment.spec.ts index db597738..f1048d17 100644 --- a/src/evaluator/matchers/__tests__/rbsegment.spec.ts +++ b/src/evaluator/matchers/__tests__/rbsegment.spec.ts @@ -3,12 +3,12 @@ import { matcherFactory } from '..'; import { evaluateFeature } from '../../index'; import { IMatcherDto } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { IRBSegment, ISplit } from '../../../dtos/types'; +import { IRBSegment, IDefinition } from '../../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../../storages/types'; import { thenable } from '../../../utils/promise/thenable'; import { ALWAYS_ON_SPLIT } from '../../../storages/__tests__/testUtils'; -const STORED_SPLITS: Record = { +const STORED_SPLITS: Record = { 'always-on': ALWAYS_ON_SPLIT }; diff --git a/src/evaluator/matchers/dependency.ts b/src/evaluator/matchers/dependency.ts index 68448a8c..2b1128f8 100644 --- a/src/evaluator/matchers/dependency.ts +++ b/src/evaluator/matchers/dependency.ts @@ -2,7 +2,7 @@ import { IDependencyMatcherData, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; -import { IDependencyMatcherValue, IEvaluation, ISplitEvaluator } from '../types'; +import { IDependencyMatcherValue, IEvaluation, IDefinitionEvaluator } from '../types'; import { ENGINE_MATCHER_DEPENDENCY, ENGINE_MATCHER_DEPENDENCY_PRE } from '../../logger/constants'; export function dependencyMatcherContext({ split, treatments }: IDependencyMatcherData, storage: IStorageSync | IStorageAsync, log: ILogger) { @@ -19,7 +19,7 @@ export function dependencyMatcherContext({ split, treatments }: IDependencyMatch return matches; } - return function dependencyMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + return function dependencyMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: IDefinitionEvaluator): MaybeThenable { log.debug(ENGINE_MATCHER_DEPENDENCY_PRE, [split, JSON.stringify(key), attributes ? '\n attributes: ' + JSON.stringify(attributes) : '']); const evaluation = splitEvaluator(log, key, split, attributes, storage); diff --git a/src/evaluator/matchers/prerequisites.ts b/src/evaluator/matchers/prerequisites.ts index 247e9948..2c4868f3 100644 --- a/src/evaluator/matchers/prerequisites.ts +++ b/src/evaluator/matchers/prerequisites.ts @@ -1,12 +1,12 @@ -import { ISplit, MaybeThenable } from '../../dtos/types'; +import { IDefinition, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { ILogger } from '../../logger/types'; import { thenable } from '../../utils/promise/thenable'; -import { IDependencyMatcherValue, ISplitEvaluator } from '../types'; +import { IDependencyMatcherValue, IDefinitionEvaluator } from '../types'; -export function prerequisitesMatcherContext(prerequisites: ISplit['prerequisites'], storage: IStorageSync | IStorageAsync, log: ILogger) { +export function prerequisitesMatcherContext(prerequisites: IDefinition['prerequisites'], storage: IStorageSync | IStorageAsync, log: ILogger) { - return function prerequisitesMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + return function prerequisitesMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: IDefinitionEvaluator): MaybeThenable { prerequisites = prerequisites == null ? [] : prerequisites; diff --git a/src/evaluator/matchers/rbsegment.ts b/src/evaluator/matchers/rbsegment.ts index f9cc12e4..01cfbed2 100644 --- a/src/evaluator/matchers/rbsegment.ts +++ b/src/evaluator/matchers/rbsegment.ts @@ -1,7 +1,7 @@ import { IExcludedSegment, IRBSegment, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import { ILogger } from '../../logger/types'; -import { IDependencyMatcherValue, ISplitEvaluator } from '../types'; +import { IDependencyMatcherValue, IDefinitionEvaluator } from '../types'; import { thenable } from '../../utils/promise/thenable'; import { getMatching, keyParser } from '../../utils/key'; import { parser } from '../parser'; @@ -10,7 +10,7 @@ import { STANDARD_SEGMENT, RULE_BASED_SEGMENT, LARGE_SEGMENT } from '../../utils export function ruleBasedSegmentMatcherContext(segmentName: string, storage: IStorageSync | IStorageAsync, log: ILogger) { - return function ruleBasedSegmentMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: ISplitEvaluator): MaybeThenable { + return function ruleBasedSegmentMatcher({ key, attributes }: IDependencyMatcherValue, splitEvaluator: IDefinitionEvaluator): MaybeThenable { const matchingKey = getMatching(key); function matchConditions(rbsegment: IRBSegment) { diff --git a/src/evaluator/matchersTransform/index.ts b/src/evaluator/matchersTransform/index.ts index 075ea9f0..c842790f 100644 --- a/src/evaluator/matchersTransform/index.ts +++ b/src/evaluator/matchersTransform/index.ts @@ -4,13 +4,13 @@ import { segmentTransform } from './segment'; import { whitelistTransform } from './whitelist'; import { numericTransform } from './unaryNumeric'; import { zeroSinceHH, zeroSinceSS, betweenDateTimeTransform } from '../convertions'; -import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, ISplitMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; +import { IBetweenMatcherData, IInLargeSegmentMatcherData, IInSegmentMatcherData, IDefinitionMatcher, IUnaryNumericMatcherData } from '../../dtos/types'; import { IMatcherDto } from '../types'; /** * Flat the complex matcherGroup structure into something handy. */ -export function matchersTransform(matchers: ISplitMatcher[]): IMatcherDto[] { +export function matchersTransform(matchers: IDefinitionMatcher[]): IMatcherDto[] { let parsedMatchers = matchers.map(matcher => { let { diff --git a/src/evaluator/matchersTransform/whitelist.ts b/src/evaluator/matchersTransform/whitelist.ts index eab5966f..6eb64592 100644 --- a/src/evaluator/matchersTransform/whitelist.ts +++ b/src/evaluator/matchersTransform/whitelist.ts @@ -1,8 +1,8 @@ -import { ISplitMatcher } from '../../dtos/types'; +import { IDefinitionMatcher } from '../../dtos/types'; /** * Extract whitelist array. */ -export function whitelistTransform(whitelistObject: ISplitMatcher['whitelistMatcherData']) { +export function whitelistTransform(whitelistObject: IDefinitionMatcher['whitelistMatcherData']) { return whitelistObject && whitelistObject.whitelist; } diff --git a/src/evaluator/parser/__tests__/boolean.spec.ts b/src/evaluator/parser/__tests__/boolean.spec.ts index 255a5cf6..06f397b2 100644 --- a/src/evaluator/parser/__tests__/boolean.spec.ts +++ b/src/evaluator/parser/__tests__/boolean.spec.ts @@ -1,6 +1,6 @@ import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { IEvaluation } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -24,7 +24,7 @@ test('PARSER / if user.boolean is true then split 100%:on', async () => { treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('testing'), 31, 100, 31, { bool: false diff --git a/src/evaluator/parser/__tests__/index.spec.ts b/src/evaluator/parser/__tests__/index.spec.ts index c3829a3d..ba00f2df 100644 --- a/src/evaluator/parser/__tests__/index.spec.ts +++ b/src/evaluator/parser/__tests__/index.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; test('PARSER / if user is in segment all 100%:on', async () => { @@ -21,7 +21,7 @@ test('PARSER / if user is in segment all 100%:on', async () => { size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -50,7 +50,7 @@ test('PARSER / if user is in segment all 100%:off', async () => { size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -75,7 +75,7 @@ test('PARSER / NEGATED if user is in segment all 100%:on, then no match', async size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -107,7 +107,7 @@ test('PARSER / if user is in segment ["u1", "u2", "u3", "u4"] then split 100%:on size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation === undefined).toBe(true); // evaluation should throw undefined @@ -146,7 +146,7 @@ test('PARSER / NEGATED if user is in segment ["u1", "u2", "u3", "u4"] then split size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -190,7 +190,7 @@ test('PARSER / if user.account is in list ["v1", "v2", "v3"] then split 100:on', size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { account: 'v1' @@ -236,7 +236,7 @@ test('PARSER / NEGATED if user.account is in list ["v1", "v2", "v3"] then split size: 100 }], label: 'whitelisted' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { account: 'v1' @@ -273,7 +273,7 @@ test('PARSER / if user.account is in segment all then split 100:on', async () => size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31); expect(evaluation.treatment === 'on').toBe(true); // ALL_KEYS always matches @@ -305,7 +305,7 @@ test('PARSER / if user.attr is between 10 and 20 then split 100:on', async () => treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 10 @@ -346,7 +346,7 @@ test('PARSER / NEGATED if user.attr is between 10 and 20 then split 100:on, nega treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 10 @@ -387,7 +387,7 @@ test('PARSER / if user.attr <= datetime 1458240947021 then split 100:on', async treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -432,7 +432,7 @@ test('PARSER / NEGATED if user.attr <= datetime 1458240947021 then split 100:on, treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -478,7 +478,7 @@ test('PARSER / if user.attr >= datetime 1458240947021 then split 100:on', async treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -523,7 +523,7 @@ test('PARSER / NEGATED if user.attr >= datetime 1458240947021 then split 100:on, treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: new Date('2016-03-17T18:55:47.021Z').getTime() @@ -569,7 +569,7 @@ test('PARSER / if user.attr = datetime 1458240947021 then split 100:on', async ( treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 1458240947021 @@ -614,7 +614,7 @@ test('PARSER / NEGATED if user.attr = datetime 1458240947021 then split 100:on, treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('test@split.io'), 31, 100, 31, { attr: 1458240947021 @@ -656,7 +656,7 @@ test('PARSER / if user is in segment all then split 20%:A,20%:B,60%:A', async () treatment: 'A', size: 60 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('aa'), 31, 100, 31); expect(evaluation.treatment).toBe('A'); // 20%:A // bucket 6 with murmur3 diff --git a/src/evaluator/parser/__tests__/invalidMatcher.spec.ts b/src/evaluator/parser/__tests__/invalidMatcher.spec.ts index 87cfc422..9eabbbe8 100644 --- a/src/evaluator/parser/__tests__/invalidMatcher.spec.ts +++ b/src/evaluator/parser/__tests__/invalidMatcher.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { parser } from '..'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; test('PARSER / handle invalid matcher as control', async () => { @@ -24,7 +24,7 @@ test('PARSER / handle invalid matcher as control', async () => { treatment: 'A', size: 60 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator('aaaaa', 31); diff --git a/src/evaluator/parser/__tests__/regex.spec.ts b/src/evaluator/parser/__tests__/regex.spec.ts index 736e93d3..1775301b 100644 --- a/src/evaluator/parser/__tests__/regex.spec.ts +++ b/src/evaluator/parser/__tests__/regex.spec.ts @@ -1,6 +1,6 @@ import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { IEvaluation } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -23,7 +23,7 @@ test('PARSER / if user.string is true then split 100%:on', async () => { treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('testing'), 31, 100, 31, { string: 'ehllo dude' diff --git a/src/evaluator/parser/__tests__/set.spec.ts b/src/evaluator/parser/__tests__/set.spec.ts index 6a6d8c35..5e217237 100644 --- a/src/evaluator/parser/__tests__/set.spec.ts +++ b/src/evaluator/parser/__tests__/set.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; // @@ -31,7 +31,7 @@ test('PARSER / if user.permissions ["read", "write"] equal to set ["read", "writ size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'write'] @@ -66,7 +66,7 @@ test('PARSER / if user.permissions ["write", "read"] equal to set ["read", "writ size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['write', 'read'] @@ -101,7 +101,7 @@ test('PARSER / if user.permissions ["1", 2] equal to set ["1", "2"] then split 1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['1', 2] @@ -136,7 +136,7 @@ test('PARSER / if user.permissions ["read", "write", "delete"] equal to set ["re size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'write', 'delete'] @@ -170,7 +170,7 @@ test('PARSER / if user.permissions ["read"] equal to set ["read", "write"] then size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -204,7 +204,7 @@ test('PARSER / if user.permissions ["read", "delete"] equal to set ["read", "wri size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'delete'] @@ -238,7 +238,7 @@ test('PARSER / if user.countries ["argentina", "usa"] equal to set ["usa","argen size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { countries: ['argentina', 'usa'] @@ -272,7 +272,7 @@ test('PARSER / if attribute is not an array we should not match equal to set', a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -307,7 +307,7 @@ test('PARSER / if attribute is an EMPTY array we should not match equal to set', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { countries: [] @@ -340,7 +340,7 @@ test('PARSER / NEGATED if user.permissions ["read", "write"] equal to set ["read size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'write'] @@ -373,7 +373,7 @@ test('PARSER / NEGATED if user.permissions ["read"] equal to set ["read", "write size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -407,7 +407,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match equal to size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -444,7 +444,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match equal size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { countries: [] @@ -481,7 +481,7 @@ test('PARSER / if user.permissions ["read", "edit", "delete"] contains all of se size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit', 'delete'] @@ -516,7 +516,7 @@ test('PARSER / if user.permissions ["edit", "read", "delete"] contains all of se size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['edit', 'read', 'delete'] @@ -551,7 +551,7 @@ test('PARSER / if user.permissions [1, "edit", "delete"] contains all of set ["1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [1, 'edit', 'delete'] @@ -586,7 +586,7 @@ test('PARSER / if user.permissions ["read"] contains all of set ["read", "edit"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -620,7 +620,7 @@ test('PARSER / if user.permissions ["read", "delete", "manage"] contains all of size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'delete', 'manage'] @@ -654,7 +654,7 @@ test('PARSER / if attribute is not an array we should not match contains all', a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -689,7 +689,7 @@ test('PARSER / if attribute is an EMPTY array we should not match contains all', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -722,7 +722,7 @@ test('PARSER / NEGATED if user.permissions ["read", "edit", "delete"] contains a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit', 'delete'] @@ -755,7 +755,7 @@ test('PARSER / NEGATED if user.permissions ["read"] contains all of set ["read", size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read'] @@ -789,7 +789,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match contains size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -826,7 +826,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match contai size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -863,7 +863,7 @@ test('PARSER / if user.permissions ["read", "edit"] is part of set ["read", "edi size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit'] @@ -898,7 +898,7 @@ test('PARSER / if user.permissions ["edit", "read"] is part of set ["read", "edi size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['edit', 'read'] @@ -933,7 +933,7 @@ test('PARSER / if user.permissions [1, "edit"] is part of set ["1", "edit", "del size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [1, 'edit'] @@ -968,7 +968,7 @@ test('PARSER / if user.permissions ["admin", "magic"] is part of set ["read", "e size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1002,7 +1002,7 @@ test('PARSER / if attribute is not an array we should not match part of', async size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -1037,7 +1037,7 @@ test('PARSER / if attribute is an EMPTY array we should not match part of', asyn size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -1070,7 +1070,7 @@ test('PARSER / NEGATED if user.permissions ["read", "edit"] is part of set ["rea size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['read', 'edit'] @@ -1103,7 +1103,7 @@ test('PARSER / NEGATED if user.permissions ["admin", "magic"] is part of set ["r size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1137,7 +1137,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match part of, size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation.treatment).toBe('on'); // on @@ -1174,7 +1174,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match part o size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -1211,7 +1211,7 @@ test('PARSER / if user.permissions ["admin", "edit"] contains any of set ["read" size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'edit'] @@ -1246,7 +1246,7 @@ test('PARSER / if user.permissions ["admin", 1] contains any of set ["read", "1" size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 1] @@ -1281,7 +1281,7 @@ test('PARSER / if user.permissions ["admin", "magic"] contains any of set ["read size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1315,7 +1315,7 @@ test('PARSER / if attribute is not an array we should not match contains any', a size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31); expect(evaluation).toBe(undefined); // evaluator should not match @@ -1350,7 +1350,7 @@ test('PARSER / if attribute is an EMPTY array we should not match contains any', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] @@ -1383,7 +1383,7 @@ test('PARSER / NEGATED if user.permissions ["admin", "edit"] contains any of set size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'edit'] @@ -1416,7 +1416,7 @@ test('PARSER / NEGATED if user.permissions ["admin", "magic"] contains any of se size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: ['admin', 'magic'] @@ -1450,7 +1450,7 @@ test('PARSER / NEGATED if attribute is not an array we should not match contains size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31); @@ -1482,7 +1482,7 @@ test('PARSER / NEGATED if attribute is an EMPTY array we should not match contai size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { permissions: [] diff --git a/src/evaluator/parser/__tests__/string.spec.ts b/src/evaluator/parser/__tests__/string.spec.ts index 81fe9b9d..6d59326c 100644 --- a/src/evaluator/parser/__tests__/string.spec.ts +++ b/src/evaluator/parser/__tests__/string.spec.ts @@ -1,6 +1,6 @@ // @ts-nocheck import { parser } from '..'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { keyParser } from '../../../utils/key'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -30,7 +30,7 @@ test('PARSER / if user.email starts with ["nico"] then split 100:on', async () = size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -64,7 +64,7 @@ test('PARSER / if user.email = 123, starts with ["1"] then split 100:on should m size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 123 @@ -98,7 +98,7 @@ test('PARSER / if user.email starts with ["nico", "marcio", "facu"] then split 1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@split.io' @@ -132,7 +132,7 @@ test('PARSER / if user.email starts with ["nico", "marcio", "facu"] then split 1 size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'marciomisi@split.io' @@ -165,7 +165,7 @@ test('PARSER / if user.email does not start with ["nico"] then not match', async treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@split.io' @@ -197,7 +197,7 @@ test('PARSER / if user.email is an EMPTY string, start with ["nico"] should not treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -227,7 +227,7 @@ test('PARSER / if user.email is not a string, start with ["nico"] should not mat treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: {} @@ -261,7 +261,7 @@ test('PARSER / NEGATED if user.email starts with ["nico"] then split 100:on, so size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -294,7 +294,7 @@ test('PARSER / NEGATED if user.email does not start with ["nico"] should not mat size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@split.io' @@ -328,7 +328,7 @@ test('PARSER / NEGATED if user.email is an EMPTY string, start with ["nico"] sho size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -360,7 +360,7 @@ test('PARSER / NEGATED if user.email is not a string, start with ["nico"] should size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: /asd4?/ @@ -399,7 +399,7 @@ test('PARSER / if user.email ends with ["split.io"] then split 100:on', async () size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -433,7 +433,7 @@ test('PARSER / if user.email = 123, ends with ["3"] then split 100:on should mat size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 123 @@ -466,7 +466,7 @@ test('PARSER / if user.email ends with ["gmail.com", "split.io", "hotmail.com"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -500,7 +500,7 @@ test('PARSER / if user.email ends with ["gmail.com", "split.io", "hotmail.com"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@hotmail.com' @@ -534,7 +534,7 @@ test('PARSER / if user.email ends with ["gmail.com", "split.io", "hotmail.com"] size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -567,7 +567,7 @@ test('PARSER / if user.email does not end with ["split.io"] then not match', asy size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -599,7 +599,7 @@ test('PARSER / if user.email is an EMPTY string, end with ["nico"] should not ma treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -629,7 +629,7 @@ test('PARSER / if user.email is not a string, end with ["nico"] should not match treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: [] @@ -665,7 +665,7 @@ test('PARSER / NEGATED if user.email ends with ["split.io"] then split 100:on, s size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -697,7 +697,7 @@ test('PARSER / NEGATED if user.email does not end with ["split.io"] then no matc size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -730,7 +730,7 @@ test('PARSER / NEGATED if user.email is an EMPTY string, end with ["nico"] shoul size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -762,7 +762,7 @@ test('PARSER / NEGATED if user.email is not a string, end with ["nico"] should n size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: NaN @@ -801,7 +801,7 @@ test('PARSER / if user.email contains ["@split"] then split 100:on', async () => size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -835,7 +835,7 @@ test('PARSER / if user.email = 123, contains ["2"] then split 100:on should matc size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 123 @@ -869,7 +869,7 @@ test('PARSER / if user.email contains ["@split"] (beginning) then split 100:on', size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '@split.io.com.ar' @@ -903,7 +903,7 @@ test('PARSER / if user.email contains ["@split"] (end) then split 100:on', async size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split' @@ -937,7 +937,7 @@ test('PARSER / if user.email contains ["@split"] (whole string matches) then spl size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '@split' @@ -971,7 +971,7 @@ test('PARSER / if user.email contains ["@split", "@gmail", "@hotmail"] then spli size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nico@hotmail.com' @@ -1005,7 +1005,7 @@ test('PARSER / if user.email contains ["@split", "@gmail", "@hotmail"] then spli size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nico@gmail.com' @@ -1039,7 +1039,7 @@ test('PARSER / if user.email does not contain ["@split"] then not match', async size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -1071,7 +1071,7 @@ test('PARSER / if user.email is an EMPTY string, contains ["nico"] should not ma treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -1101,7 +1101,7 @@ test('PARSER / if user.email is not a string, contains ["nico"] should not match treatment: 'on', size: 100 }] - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: null @@ -1137,7 +1137,7 @@ test('PARSER / NEGATED if user.email contains ["@split"] then split 100:on, then size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'nicolas.zelaya@split.io' @@ -1170,7 +1170,7 @@ test('PARSER / NEGATED if user.email does not contain ["@split"] then not match, size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: 'facundo@gmail.io' @@ -1203,7 +1203,7 @@ test('PARSER / NEGATED if user.email is an EMPTY string, contains ["nico"] shoul size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); const evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: '' @@ -1235,7 +1235,7 @@ test('PARSER / NEGATED if user.email is not a string, contains ["nico"] should n size: 100 }], label: label - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); let evaluation = await evaluator(keyParser('a key'), 31, 100, 31, { email: () => { } diff --git a/src/evaluator/parser/__tests__/trafficAllocation.spec.ts b/src/evaluator/parser/__tests__/trafficAllocation.spec.ts index a71d6dee..2debd783 100644 --- a/src/evaluator/parser/__tests__/trafficAllocation.spec.ts +++ b/src/evaluator/parser/__tests__/trafficAllocation.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import { parser } from '..'; import { keyParser } from '../../../utils/key'; -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { IEvaluation } from '../../types'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; @@ -23,7 +23,7 @@ test('PARSER / if user is in segment all 100%:on but trafficAllocation is 0%', a size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a key'), 31, 0, 31) as IEvaluation; @@ -50,7 +50,7 @@ test('PARSER / if user is in segment all 100%:on but trafficAllocation is 99% wi size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a key'), 31, 99, 31) as IEvaluation; @@ -77,7 +77,7 @@ test('PARSER / if user is in segment all 100%:on but trafficAllocation is 99% an size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a48'), 31, 99, 14) as IEvaluation; // murmur3.bucket('a48', 14) === 100 @@ -124,7 +124,7 @@ test('PARSER / if user is whitelisted and in segment all 100%:off with trafficAl size: 100 }], label: 'in segment all' - }] as ISplitCondition[]); + }] as IDefinitionCondition[]); // @ts-ignore const evaluation = await evaluator(keyParser('a key'), 31, 0, 31) as IEvaluation; diff --git a/src/evaluator/parser/index.ts b/src/evaluator/parser/index.ts index d12edf1a..ce66d67d 100644 --- a/src/evaluator/parser/index.ts +++ b/src/evaluator/parser/index.ts @@ -6,14 +6,14 @@ import { conditionContext } from '../condition'; import { ifElseIfCombinerContext } from '../combiners/ifelseif'; import { andCombinerContext } from '../combiners/and'; import { thenable } from '../../utils/promise/thenable'; -import { IEvaluator, IMatcherDto, ISplitEvaluator } from '../types'; -import { ISplitCondition, MaybeThenable } from '../../dtos/types'; +import { IEvaluator, IMatcherDto, IDefinitionEvaluator } from '../types'; +import { IDefinitionCondition, MaybeThenable } from '../../dtos/types'; import { IStorageAsync, IStorageSync } from '../../storages/types'; import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; import { ENGINE_MATCHER_ERROR, ENGINE_MATCHER_RESULT } from '../../logger/constants'; -export function parser(log: ILogger, conditions: ISplitCondition[], storage: IStorageSync | IStorageAsync): IEvaluator { +export function parser(log: ILogger, conditions: IDefinitionCondition[], storage: IStorageSync | IStorageAsync): IEvaluator { let predicates = []; for (let i = 0; i < conditions.length; i++) { @@ -37,7 +37,7 @@ export function parser(log: ILogger, conditions: ISplitCondition[], storage: ISt } // Evaluator function. - return (key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => { + return (key: SplitIO.SplitKey, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => { const value = sanitizeValue(log, key, matcherDto, attributes); let result: MaybeThenable = false; diff --git a/src/evaluator/treatments/index.ts b/src/evaluator/treatments/index.ts index 35f7da28..d9812aa7 100644 --- a/src/evaluator/treatments/index.ts +++ b/src/evaluator/treatments/index.ts @@ -1,4 +1,4 @@ -import { ISplitPartition } from '../../dtos/types'; +import { IDefinitionPartition } from '../../dtos/types'; import { findIndex } from '../../utils/lang'; export class Treatments { @@ -15,7 +15,7 @@ export class Treatments { this._treatments = treatments; } - static parse(data: ISplitPartition[]) { + static parse(data: IDefinitionPartition[]) { let { ranges, treatments } = data.reduce((accum, value) => { let { size, treatment } = value; diff --git a/src/evaluator/types.ts b/src/evaluator/types.ts index 42900f06..d87e2489 100644 --- a/src/evaluator/types.ts +++ b/src/evaluator/types.ts @@ -22,13 +22,13 @@ export interface IEvaluation { treatment?: string, label: string, changeNumber?: number, - config?: string | null + config?: string | null | SplitIO.JsonObject } export type IEvaluationResult = IEvaluation & { treatment: string; impressionsDisabled?: boolean } -export type ISplitEvaluator = (log: ILogger, key: SplitIO.SplitKey, splitName: string, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync) => MaybeThenable +export type IDefinitionEvaluator = (log: ILogger, key: SplitIO.SplitKey, definitionName: string, attributes: SplitIO.Attributes | undefined, storage: IStorageSync | IStorageAsync) => MaybeThenable -export type IEvaluator = (key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: ISplitEvaluator) => MaybeThenable +export type IEvaluator = (key: SplitIO.SplitKeyObject, seed?: number, trafficAllocation?: number, trafficAllocationSeed?: number, attributes?: SplitIO.Attributes, splitEvaluator?: IDefinitionEvaluator) => MaybeThenable -export type IMatcher = (value: string | number | boolean | string[] | IDependencyMatcherValue, splitEvaluator?: ISplitEvaluator) => MaybeThenable +export type IMatcher = (value: string | number | boolean | string[] | IDependencyMatcherValue, definitionEvaluator?: IDefinitionEvaluator) => MaybeThenable diff --git a/src/listeners/__tests__/browser.spec.ts b/src/listeners/__tests__/browser.spec.ts index e1d532df..38a53769 100644 --- a/src/listeners/__tests__/browser.spec.ts +++ b/src/listeners/__tests__/browser.spec.ts @@ -161,7 +161,7 @@ function assertStop(listener: BrowserSignalListener) { test('Browser JS listener / consumer mode', () => { // No SyncManager ==> consumer mode // @ts-expect-error - const listener = new BrowserSignalListener(undefined, fullSettings, fakeStorageOptimized, fakeSplitApi); + const listener = new BrowserSignalListener({ syncManager: undefined, settings: fullSettings, storage: fakeStorageOptimized, splitApi: fakeSplitApi }); listener.start(); assertStart(listener); @@ -186,7 +186,7 @@ test('Browser JS listener / standalone mode / Impressions optimized mode with te const syncManagerMock = {}; // @ts-expect-error - const listener = new BrowserSignalListener(syncManagerMock, fullSettings, fakeStorageOptimized, fakeSplitApi); + const listener = new BrowserSignalListener({ syncManager: syncManagerMock, settings: fullSettings, storage: fakeStorageOptimized, splitApi: fakeSplitApi }); listener.start(); assertStart(listener); @@ -211,7 +211,7 @@ test('Browser JS listener / standalone mode / Impressions debug mode', () => { const syncManagerMock = {}; // @ts-expect-error - const listener = new BrowserSignalListener(syncManagerMock, fullSettings, fakeStorageDebug, fakeSplitApi); + const listener = new BrowserSignalListener({ syncManager: syncManagerMock, settings: fullSettings, storage: fakeStorageDebug, splitApi: fakeSplitApi }); listener.start(); assertStart(listener); @@ -240,7 +240,7 @@ test('Browser JS listener / standalone mode / Impressions debug mode', () => { test('Browser JS listener / standalone mode / Fallback to regular Fetch transport', () => { function runBrowserListener() { // @ts-expect-error - const listener = new BrowserSignalListener({}, fullSettings, fakeStorageDebug, fakeSplitApi); + const listener = new BrowserSignalListener({ syncManager: {}, settings: fullSettings, storage: fakeStorageDebug, splitApi: fakeSplitApi }); listener.start(); // Trigger data flush triggerEvent(VISIBILITYCHANGE_EVENT, 'hidden'); @@ -276,7 +276,7 @@ test('Browser JS listener / standalone mode / user consent status', () => { const settings = { ...fullSettings }; // @ts-expect-error - const listener = new BrowserSignalListener(syncManagerMock, settings, fakeStorageOptimized, fakeSplitApi); + const listener = new BrowserSignalListener({ syncManager: syncManagerMock, settings, storage: fakeStorageOptimized, splitApi: fakeSplitApi }); listener.start(); diff --git a/src/listeners/__tests__/node.spec.ts b/src/listeners/__tests__/node.spec.ts index 15fe6253..910b0021 100644 --- a/src/listeners/__tests__/node.spec.ts +++ b/src/listeners/__tests__/node.spec.ts @@ -8,7 +8,7 @@ const processKillSpy = jest.spyOn(process, 'kill').mockImplementation(() => true test('NodeSignalListener / Signal Listener class methods and start/stop functionality', () => { const syncManagerMock = { flush: jest.fn() }; // @ts-expect-error - const listener = new NodeSignalListener(syncManagerMock, fullSettings); + const listener = new NodeSignalListener({ syncManager: syncManagerMock, settings: fullSettings }); listener.start(); @@ -28,7 +28,7 @@ test('NodeSignalListener / Signal Listener class methods and start/stop function test('NodeSignalListener / Signal Listener SIGTERM callback with sync handler', () => { const syncManagerMock = { flush: jest.fn() }; // @ts-expect-error - const listener = new NodeSignalListener(syncManagerMock, fullSettings); + const listener = new NodeSignalListener({ syncManager: syncManagerMock, settings: fullSettings }); listener.start(); // Stub stop function since we don't want side effects on test. @@ -57,7 +57,7 @@ test('NodeSignalListener / Signal Listener SIGTERM callback with sync handler', test('NodeSignalListener / Signal Listener SIGTERM callback with sync handler that throws an error', () => { const syncManagerMock = { flush: jest.fn(() => { throw 'some error'; }) }; // @ts-expect-error - const listener = new NodeSignalListener(syncManagerMock, fullSettings); + const listener = new NodeSignalListener({ syncManager: syncManagerMock, settings: fullSettings }); listener.start(); // Stub stop function since we don't want side effects on test. @@ -94,7 +94,7 @@ test('NodeSignalListener / Signal Listener SIGTERM callback with async handler', }); const syncManagerMock = { flush: jest.fn(() => fakePromise) }; // @ts-expect-error - const listener = new NodeSignalListener(syncManagerMock, fullSettings); + const listener = new NodeSignalListener({ syncManager: syncManagerMock, settings: fullSettings }); // Stub stop function since we don't want side effects on test. jest.spyOn(listener, 'stop'); @@ -134,7 +134,7 @@ test('NodeSignalListener / Signal Listener SIGTERM callback with async handler t }); const syncManagerMock = { flush: jest.fn(() => fakePromise) }; // @ts-expect-error - const listener = new NodeSignalListener(syncManagerMock, fullSettings); + const listener = new NodeSignalListener({ syncManager: syncManagerMock, settings: fullSettings }); // Stub stop function since we don't want side effects on test. jest.spyOn(listener, 'stop'); diff --git a/src/listeners/browser.ts b/src/listeners/browser.ts index 0a1616d1..0792c3a6 100644 --- a/src/listeners/browser.ts +++ b/src/listeners/browser.ts @@ -1,5 +1,4 @@ /* eslint-disable no-undef */ -// @TODO eventually migrate to JS-Browser-SDK package. import { ISignalListener } from './types'; import { IRecorderCacheSync, IStorageSync } from '../storages/types'; import { fromImpressionsCollector } from '../sync/submitters/impressionsSubmitter'; @@ -12,6 +11,7 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { CLEANUP_REGISTERING, CLEANUP_DEREGISTERING, SUBMITTERS_PUSH_PAGE_HIDDEN } from '../logger/constants'; import { ISyncManager } from '../sync/types'; import { isConsentGranted } from '../consent'; +import { ISdkFactoryContextSync } from '../sdkFactory/types'; const VISIBILITYCHANGE_EVENT = 'visibilitychange'; const PAGEHIDE_EVENT = 'pagehide'; @@ -22,14 +22,17 @@ const EVENT_NAME = 'for visibilitychange and pagehide events.'; */ export class BrowserSignalListener implements ISignalListener { + private syncManager: ISyncManager | undefined; + private settings: ISettings; + private storage: IStorageSync; + private serviceApi: ISplitApi; private fromImpressionsCollector: (data: SplitIO.ImpressionDTO[]) => ImpressionsPayload; - constructor( - private syncManager: ISyncManager | undefined, - private settings: ISettings, - private storage: IStorageSync, - private serviceApi: ISplitApi, - ) { + constructor({ syncManager, settings, storage, splitApi }: ISdkFactoryContextSync) { + this.syncManager = syncManager; + this.settings = settings; + this.storage = storage; + this.serviceApi = splitApi; this.flushData = this.flushData.bind(this); this.flushDataIfHidden = this.flushDataIfHidden.bind(this); this.fromImpressionsCollector = fromImpressionsCollector.bind(undefined, settings.core.labelsEnabled); diff --git a/src/listeners/node.ts b/src/listeners/node.ts index cd957a40..ab382bc7 100644 --- a/src/listeners/node.ts +++ b/src/listeners/node.ts @@ -4,7 +4,7 @@ import { thenable } from '../utils/promise/thenable'; import { MaybeThenable } from '../dtos/types'; import { ISettings } from '../types'; import { LOG_PREFIX_CLEANUP, CLEANUP_REGISTERING, CLEANUP_DEREGISTERING } from '../logger/constants'; -import { ISyncManager } from '../sync/types'; +import { ISdkFactoryContext } from '../sdkFactory/types'; const SIGTERM = 'SIGTERM'; const EVENT_NAME = 'for SIGTERM signal.'; @@ -21,10 +21,7 @@ export class NodeSignalListener implements ISignalListener { private handler: () => MaybeThenable; private settings: ISettings; - constructor( - syncManager: ISyncManager | undefined, // private handler: () => MaybeThenable, - settings: ISettings - ) { + constructor({ syncManager, settings }: ISdkFactoryContext) { // @TODO review handler logic when implementing Node.js SDK this.handler = function () { if (syncManager) { diff --git a/src/logger/constants.ts b/src/logger/constants.ts index 0a541f95..b7c11d89 100644 --- a/src/logger/constants.ts +++ b/src/logger/constants.ts @@ -19,9 +19,8 @@ export const RETRIEVE_CLIENT_DEFAULT = 27; export const RETRIEVE_CLIENT_EXISTING = 28; export const RETRIEVE_MANAGER = 29; export const SYNC_OFFLINE_DATA = 30; -export const SYNC_SPLITS_FETCH = 31; -export const SYNC_SPLITS_UPDATE = 32; -export const SYNC_RBS_UPDATE = 33; +export const SYNC_FETCH = 31; +export const SYNC_UPDATE = 32; export const STREAMING_NEW_MESSAGE = 35; export const SYNC_TASK_START = 36; export const SYNC_TASK_EXECUTE = 37; @@ -38,7 +37,6 @@ export const NEW_FACTORY = 105; export const POLLING_SMART_PAUSING = 106; export const POLLING_START = 107; export const POLLING_STOP = 108; -export const SYNC_SPLITS_FETCH_RETRY = 109; export const STREAMING_REFRESH_TOKEN = 110; export const STREAMING_RECONNECT = 111; export const STREAMING_CONNECTING = 112; @@ -60,8 +58,8 @@ export const ENGINE_VALUE_INVALID = 200; export const ENGINE_VALUE_NO_ATTRIBUTES = 201; export const CLIENT_NO_LISTENER = 202; export const CLIENT_NOT_READY_FROM_CACHE = 203; -export const SYNC_MYSEGMENTS_FETCH_RETRY = 204; -export const SYNC_SPLITS_FETCH_FAILS = 205; +export const SYNC_FETCH_RETRY = 204; +export const SYNC_FETCH_FAILS = 205; export const STREAMING_PARSING_ERROR_FAILS = 206; export const STREAMING_PARSING_MESSAGE_FAILS = 207; export const STREAMING_FALLBACK = 208; @@ -71,7 +69,7 @@ export const WARN_SETTING_NULL = 211; export const WARN_TRIMMING_PROPERTIES = 212; export const WARN_CONVERTING = 213; export const WARN_TRIMMING = 214; -export const WARN_NOT_EXISTENT_SPLIT = 215; +export const WARN_NOT_EXISTENT_DEFINITION = 215; export const WARN_LOWERCASE_TRAFFIC_TYPE = 216; export const WARN_NOT_EXISTENT_TT = 217; export const WARN_INTEGRATION_INVALID = 218; @@ -124,15 +122,12 @@ export const LOG_PREFIX_ENGINE = 'engine'; export const LOG_PREFIX_ENGINE_COMBINER = LOG_PREFIX_ENGINE + ':combiner: '; export const LOG_PREFIX_ENGINE_MATCHER = LOG_PREFIX_ENGINE + ':matcher: '; export const LOG_PREFIX_ENGINE_VALUE = LOG_PREFIX_ENGINE + ':value: '; -export const LOG_PREFIX_SYNC = 'sync'; -export const LOG_PREFIX_SYNC_MANAGER = LOG_PREFIX_SYNC + ':sync-manager: '; -export const LOG_PREFIX_SYNC_OFFLINE = LOG_PREFIX_SYNC + ':offline: '; -export const LOG_PREFIX_SYNC_STREAMING = LOG_PREFIX_SYNC + ':streaming: '; -export const LOG_PREFIX_SYNC_SPLITS = LOG_PREFIX_SYNC + ':featureflag-changes: '; -export const LOG_PREFIX_SYNC_SEGMENTS = LOG_PREFIX_SYNC + ':segment-changes: '; -export const LOG_PREFIX_SYNC_MYSEGMENTS = LOG_PREFIX_SYNC + ':my-segments: '; -export const LOG_PREFIX_SYNC_POLLING = LOG_PREFIX_SYNC + ':polling-manager: '; -export const LOG_PREFIX_SYNC_SUBMITTERS = LOG_PREFIX_SYNC + ':submitter: '; +export const LOG_PREFIX_SYNC = 'sync: '; +export const LOG_PREFIX_SYNC_MANAGER = 'sync:sync-manager: '; +export const LOG_PREFIX_SYNC_OFFLINE = 'sync:offline: '; +export const LOG_PREFIX_SYNC_STREAMING = 'sync:streaming: '; +export const LOG_PREFIX_SYNC_POLLING = 'sync:polling-manager: '; +export const LOG_PREFIX_SYNC_SUBMITTERS = 'sync:submitter: '; export const LOG_PREFIX_IMPRESSIONS_TRACKER = 'impressions-tracker: '; export const LOG_PREFIX_EVENTS_TRACKER = 'events-tracker: '; export const LOG_PREFIX_UNIQUE_KEYS_TRACKER = 'unique-keys-tracker: '; diff --git a/src/logger/messages/debug.ts b/src/logger/messages/debug.ts index c5e67dff..f5110029 100644 --- a/src/logger/messages/debug.ts +++ b/src/logger/messages/debug.ts @@ -21,13 +21,12 @@ export const codesDebug: [number, string][] = codesInfo.concat([ [c.RETRIEVE_MANAGER, 'Retrieving manager instance.'], // synchronizer [c.SYNC_OFFLINE_DATA, c.LOG_PREFIX_SYNC_OFFLINE + 'Feature flags data: \n%s'], - [c.SYNC_SPLITS_FETCH, c.LOG_PREFIX_SYNC_SPLITS + 'Spin up feature flags update using since = %s and rbSince = %s.'], - [c.SYNC_SPLITS_UPDATE, c.LOG_PREFIX_SYNC_SPLITS + 'New feature flags: %s. Removed feature flags: %s.'], - [c.SYNC_RBS_UPDATE, c.LOG_PREFIX_SYNC_SPLITS + 'New rule-based segments: %s. Removed rule-based segments: %s.'], + [c.SYNC_FETCH, c.LOG_PREFIX_SYNC + 'Spin up %s updates using since = %s and rbSince = %s.'], + [c.SYNC_UPDATE, c.LOG_PREFIX_SYNC + 'New %s: %s. Removed: %s.'], [c.STREAMING_NEW_MESSAGE, c.LOG_PREFIX_SYNC_STREAMING + 'New SSE message received, with data: %s.'], - [c.SYNC_TASK_START, c.LOG_PREFIX_SYNC + ': Starting %s. Running each %s millis'], - [c.SYNC_TASK_EXECUTE, c.LOG_PREFIX_SYNC + ': Running %s'], - [c.SYNC_TASK_STOP, c.LOG_PREFIX_SYNC + ': Stopping %s'], + [c.SYNC_TASK_START, c.LOG_PREFIX_SYNC + 'Starting %s. Running each %s millis'], + [c.SYNC_TASK_EXECUTE, c.LOG_PREFIX_SYNC + 'Running %s'], + [c.SYNC_TASK_STOP, c.LOG_PREFIX_SYNC + 'Stopping %s'], // initialization / settings validation [c.SETTINGS_SPLITS_FILTER, c.LOG_PREFIX_SETTINGS + ': feature flags filtering criteria is "%s".'] ]); diff --git a/src/logger/messages/info.ts b/src/logger/messages/info.ts index f8e230ac..ae9c0ac4 100644 --- a/src/logger/messages/info.ts +++ b/src/logger/messages/info.ts @@ -8,7 +8,7 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.CLIENT_READY_FROM_CACHE, READY_MSG + ' from cache'], [c.CLIENT_READY, READY_MSG], // SDK - [c.IMPRESSION_QUEUEING, c.LOG_PREFIX_IMPRESSIONS_TRACKER +'Queueing impression. Feature flag: %s. Key: %s. Evaluation: %s. Label: %s'], + [c.IMPRESSION_QUEUEING, c.LOG_PREFIX_IMPRESSIONS_TRACKER +'Queueing impression. %s: %s. Key: %s. Evaluation: %s. Label: %s'], [c.NEW_SHARED_CLIENT, 'New shared client instance created.'], [c.NEW_FACTORY, 'New Split SDK instance created. %s'], [c.EVENTS_TRACKER_SUCCESS, c.LOG_PREFIX_EVENTS_TRACKER + 'Successfully queued %s'], @@ -21,7 +21,6 @@ export const codesInfo: [number, string][] = codesWarn.concat([ [c.POLLING_SMART_PAUSING, c.LOG_PREFIX_SYNC_POLLING + 'Turning segments data polling %s.'], [c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'], [c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'], - [c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying fetch of feature flags (attempt #%s). Reason: %s'], [c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'], [c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'], [c.SUBMITTERS_PUSH_PAGE_HIDDEN, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing %s because page became hidden.'], diff --git a/src/logger/messages/warn.ts b/src/logger/messages/warn.ts index 4bd74dd6..16fe06ca 100644 --- a/src/logger/messages/warn.ts +++ b/src/logger/messages/warn.ts @@ -6,24 +6,24 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.ENGINE_VALUE_INVALID, c.LOG_PREFIX_ENGINE_VALUE + 'Value %s doesn\'t match with expected type.'], [c.ENGINE_VALUE_NO_ATTRIBUTES, c.LOG_PREFIX_ENGINE_VALUE + 'Defined attribute `%s`. No attributes received.'], // synchronizer - [c.SYNC_MYSEGMENTS_FETCH_RETRY, c.LOG_PREFIX_SYNC_MYSEGMENTS + 'Retrying fetch of memberships (attempt #%s). Reason: %s'], - [c.SYNC_SPLITS_FETCH_FAILS, c.LOG_PREFIX_SYNC_SPLITS + 'Error while doing fetch of feature flags. %s'], + [c.SYNC_FETCH_RETRY, c.LOG_PREFIX_SYNC + 'Retrying fetch of %s (attempt #%s). Reason: %s'], + [c.SYNC_FETCH_FAILS, c.LOG_PREFIX_SYNC + 'Error while doing fetch of %s. %s'], [c.STREAMING_PARSING_ERROR_FAILS, c.LOG_PREFIX_SYNC_STREAMING + 'Error parsing SSE error notification: %s'], [c.STREAMING_PARSING_MESSAGE_FAILS, c.LOG_PREFIX_SYNC_STREAMING + 'Error parsing SSE message notification: %s'], [c.STREAMING_FALLBACK, c.LOG_PREFIX_SYNC_STREAMING + 'Falling back to polling mode. Reason: %s'], [c.SUBMITTERS_PUSH_FAILS, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Dropping %s after retry. Reason: %s.'], [c.SUBMITTERS_PUSH_RETRY, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Failed to push %s, keeping data to retry on next iteration. Reason: %s.'], // client status - [c.CLIENT_NOT_READY_FROM_CACHE, '%s: the SDK is not ready to evaluate. Results may be incorrect%s. Make sure to wait for SDK readiness before using this method.'], + [c.CLIENT_NOT_READY_FROM_CACHE, '%s: the SDK is not ready to evaluate. Results may be incorrect. Make sure to wait for SDK readiness before using this method.'], [c.CLIENT_NO_LISTENER, 'No listeners for SDK_READY event detected. Incorrect control treatments could have been logged if you called getTreatment/s while the SDK was not yet synchronized with the backend.'], // input validation [c.WARN_SETTING_NULL, '%s: Property "%s" is of invalid type. Setting value to null.'], [c.WARN_TRIMMING_PROPERTIES, '%s: more than 300 properties were provided. Some of them will be trimmed when processed.'], [c.WARN_CONVERTING, '%s: %s "%s" is not of type string, converting.'], [c.WARN_TRIMMING, '%s: %s "%s" has extra whitespace, trimming.'], - [c.WARN_NOT_EXISTENT_SPLIT, '%s: feature flag "%s" does not exist in this environment. Please double check what feature flags exist in the Split user interface.'], + [c.WARN_NOT_EXISTENT_DEFINITION, '%s: "%s" does not exist in this environment. Please double check that it exists in Harness UI.'], [c.WARN_LOWERCASE_TRAFFIC_TYPE, '%s: traffic_type_name should be all lowercase - converting string to lowercase.'], - [c.WARN_NOT_EXISTENT_TT, '%s: traffic type "%s" does not have any corresponding feature flag in this environment, make sure you\'re tracking your events to a valid traffic type defined in the Split user interface.'], + [c.WARN_NOT_EXISTENT_TT, '%s: traffic type "%s" does not have any corresponding feature flag in this environment, make sure you\'re tracking your events to a valid traffic type defined in Harness UI.'], [c.WARN_FLAGSET_NOT_CONFIGURED, '%s: you passed %s which is not part of the configured FlagSetsFilter, ignoring Flag Set.'], // initialization / settings validation [c.WARN_INTEGRATION_INVALID, c.LOG_PREFIX_SETTINGS + ': %s integration item(s) at settings is invalid. %s'], @@ -36,5 +36,5 @@ export const codesWarn: [number, string][] = codesError.concat([ [c.STREAMING_PARSING_SPLIT_UPDATE, c.LOG_PREFIX_SYNC_STREAMING + 'Fetching SplitChanges due to an error processing %s notification: %s'], [c.WARN_INVALID_FLAGSET, '%s: you passed %s, flag set must adhere to the regular expressions %s. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. %s was discarded.'], [c.WARN_LOWERCASE_FLAGSET, '%s: flag set %s should be all lowercase - converting string to lowercase.'], - [c.WARN_FLAGSET_WITHOUT_FLAGS, '%s: you passed %s flag set that does not contain cached feature flag names. Please double check what flag sets are in use in the Split user interface.'], + [c.WARN_FLAGSET_WITHOUT_FLAGS, '%s: you passed %s flag set that does not contain cached feature flag names. Please double check what flag sets are in use in Harness UI.'], ]); diff --git a/src/platform/browser.ts b/src/platform/browser.ts new file mode 100644 index 00000000..e2e78059 --- /dev/null +++ b/src/platform/browser.ts @@ -0,0 +1,14 @@ +import { EventEmitter } from '../utils/EventEmitter'; +import { getFetch } from '../platform/getFetch/browser'; +import { getEventSource } from '../platform/getEventSource/browser'; +import { BrowserSignalListener } from '../listeners/browser'; +import { now } from '../utils/timeTracker/now/browser'; +import { IPlatform } from '../sdkFactory/types'; + +export const platform: IPlatform = { + getFetch, + getEventSource, + EventEmitter, + now, + SignalListener: BrowserSignalListener as IPlatform['SignalListener'] +}; diff --git a/src/platform/getEventSource/__tests__/browser.spec.ts b/src/platform/getEventSource/__tests__/browser.spec.ts new file mode 100644 index 00000000..4526a6ee --- /dev/null +++ b/src/platform/getEventSource/__tests__/browser.spec.ts @@ -0,0 +1,15 @@ +import { getEventSource } from '../browser'; + +test('getEventSource returns global EventSource in Browser', () => { + const EventSourceMock = jest.fn(); + const originalEventSource = global.EventSource; + global.EventSource = EventSourceMock as any; + + expect(getEventSource()).toBe(EventSourceMock); + + global.EventSource = originalEventSource; +}); + +test('getEventSource returns undefined when EventSource is not available', () => { + expect(getEventSource()).toBeUndefined(); +}); diff --git a/src/platform/getEventSource/__tests__/node.spec.ts b/src/platform/getEventSource/__tests__/node.spec.ts new file mode 100644 index 00000000..f0f235e3 --- /dev/null +++ b/src/platform/getEventSource/__tests__/node.spec.ts @@ -0,0 +1,5 @@ +import { getEventSource } from '../node'; + +test('getEventSource returns eventsource module in Node', () => { + expect(getEventSource()).toBe(require('../eventsource')); +}); diff --git a/src/platform/getEventSource/browser.ts b/src/platform/getEventSource/browser.ts new file mode 100644 index 00000000..aee4b510 --- /dev/null +++ b/src/platform/getEventSource/browser.ts @@ -0,0 +1,4 @@ +export function getEventSource() { + // eslint-disable-next-line no-undef -- Feature detection for EventSource + return typeof EventSource === 'function' ? EventSource : undefined; +} diff --git a/src/platform/getEventSource/eventsource.js b/src/platform/getEventSource/eventsource.js new file mode 100644 index 00000000..25316e74 --- /dev/null +++ b/src/platform/getEventSource/eventsource.js @@ -0,0 +1,519 @@ +/* eslint-disable no-prototype-builtins */ +/* eslint-disable no-restricted-syntax */ +/* +Modified version of "eventsource" v1.1.2 package (https://www.npmjs.com/package/eventsource/v/1.1.2) that accepts a custom agent. +Requires CommonJS and Node.js globals (require, process, Buffer). + +The MIT License + +Copyright (c) EventSource GitHub organization + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ +var parse = require('url').parse; +var events = require('events'); +var https = require('https'); +var http = require('http'); +var util = require('util'); + +var httpsOptions = [ + 'pfx', 'key', 'passphrase', 'cert', 'ca', 'ciphers', + 'rejectUnauthorized', 'secureProtocol', 'servername', 'checkServerIdentity' +]; + +var bom = [239, 187, 191]; +var colon = 58; +var space = 32; +var lineFeed = 10; +var carriageReturn = 13; + +function hasBom(buf) { + return bom.every(function (charCode, index) { + return buf[index] === charCode; + }); +} + +/** + * Creates a new EventSource object + * + * @param {String} url the URL to which to connect + * @param {Object} [eventSourceInitDict] extra init params. See README for details. + * @api public + **/ +function EventSource(url, eventSourceInitDict) { + var readyState = EventSource.CONNECTING; + var headers = eventSourceInitDict && eventSourceInitDict.headers; + var hasNewOrigin = false; + Object.defineProperty(this, 'readyState', { + get: function () { + return readyState; + } + }); + + Object.defineProperty(this, 'url', { + get: function () { + return url; + } + }); + + var self = this; + self.reconnectInterval = 1000; + self.connectionInProgress = false; + + var reconnectUrl = null; + + function onConnectionClosed(message) { + if (readyState === EventSource.CLOSED) return; + readyState = EventSource.CONNECTING; + _emit('error', new Event('error', { message: message })); + + // The url may have been changed by a temporary redirect. If that's the case, + // revert it now, and flag that we are no longer pointing to a new origin + if (reconnectUrl) { + url = reconnectUrl; + reconnectUrl = null; + hasNewOrigin = false; + } + setTimeout(function () { + if (readyState !== EventSource.CONNECTING || self.connectionInProgress) { + return; + } + self.connectionInProgress = true; + connect(); + }, self.reconnectInterval); + } + + var req; + var lastEventId = ''; + if (headers && headers['Last-Event-ID']) { + lastEventId = headers['Last-Event-ID']; + delete headers['Last-Event-ID']; + } + + var discardTrailingNewline = false; + var data = ''; + var eventName = ''; + + function connect() { + var options = parse(url); + var isSecure = options.protocol === 'https:'; + options.headers = { 'Cache-Control': 'no-cache', 'Accept': 'text/event-stream' }; + if (lastEventId) options.headers['Last-Event-ID'] = lastEventId; + if (headers) { + var reqHeaders = hasNewOrigin ? removeUnsafeHeaders(headers) : headers; + for (var i in reqHeaders) { + var header = reqHeaders[i]; + if (header) { + options.headers[i] = header; + } + } + } + + // Legacy: this should be specified as `eventSourceInitDict.https.rejectUnauthorized`, + // but for now exists as a backwards-compatibility layer + options.rejectUnauthorized = !(eventSourceInitDict && !eventSourceInitDict.rejectUnauthorized); + + if (eventSourceInitDict && eventSourceInitDict.createConnection !== undefined) { + options.createConnection = eventSourceInitDict.createConnection; + } + + // If specify agent, use it. + if (eventSourceInitDict && eventSourceInitDict.agent !== undefined) { + options.agent = eventSourceInitDict.agent; + } + + // If specify http proxy, make the request to sent to the proxy server, + // and include the original url in path and Host headers + var useProxy = eventSourceInitDict && eventSourceInitDict.proxy; + if (useProxy) { + var proxy = parse(eventSourceInitDict.proxy); + isSecure = proxy.protocol === 'https:'; + + options.protocol = isSecure ? 'https:' : 'http:'; + options.path = url; + options.headers.Host = options.host; + options.hostname = proxy.hostname; + options.host = proxy.host; + options.port = proxy.port; + } + + // If https options are specified, merge them into the request options + if (eventSourceInitDict && eventSourceInitDict.https) { + for (var optName in eventSourceInitDict.https) { + if (httpsOptions.indexOf(optName) === -1) { + continue; + } + + var option = eventSourceInitDict.https[optName]; + if (option !== undefined) { + options[optName] = option; + } + } + } + + // Pass this on to the XHR + if (eventSourceInitDict && eventSourceInitDict.withCredentials !== undefined) { + options.withCredentials = eventSourceInitDict.withCredentials; + } + + req = (isSecure ? https : http).request(options, function (res) { + self.connectionInProgress = false; + // Handle HTTP errors + if (res.statusCode === 500 || res.statusCode === 502 || res.statusCode === 503 || res.statusCode === 504) { + _emit('error', new Event('error', { status: res.statusCode, message: res.statusMessage })); + onConnectionClosed(); + return; + } + + // Handle HTTP redirects + if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307) { + var location = res.headers.location; + if (!location) { + // Server sent redirect response without Location header. + _emit('error', new Event('error', { status: res.statusCode, message: res.statusMessage })); + return; + } + var prevOrigin = getOrigin(url); + var nextOrigin = getOrigin(location); + hasNewOrigin = prevOrigin !== nextOrigin; + if (res.statusCode === 307) reconnectUrl = url; + url = location; + process.nextTick(connect); + return; + } + + if (res.statusCode !== 200) { + _emit('error', new Event('error', { status: res.statusCode, message: res.statusMessage })); + return self.close(); + } + + readyState = EventSource.OPEN; + res.on('close', function () { + res.removeAllListeners('close'); + res.removeAllListeners('end'); + onConnectionClosed(); + }); + + res.on('end', function () { + res.removeAllListeners('close'); + res.removeAllListeners('end'); + onConnectionClosed(); + }); + _emit('open', new Event('open')); + + // text/event-stream parser adapted from webkit's + // Source/WebCore/page/EventSource.cpp + var isFirst = true; + var buf; + var startingPos = 0; + var startingFieldLength = -1; + res.on('data', function (chunk) { + buf = buf ? Buffer.concat([buf, chunk]) : chunk; + if (isFirst && hasBom(buf)) { + buf = buf.slice(bom.length); + } + + isFirst = false; + var pos = 0; + var length = buf.length; + + while (pos < length) { + if (discardTrailingNewline) { + if (buf[pos] === lineFeed) { + ++pos; + } + discardTrailingNewline = false; + } + + var lineLength = -1; + var fieldLength = startingFieldLength; + var c; + + for (var i = startingPos; lineLength < 0 && i < length; ++i) { + c = buf[i]; + if (c === colon) { + if (fieldLength < 0) { + fieldLength = i - pos; + } + } else if (c === carriageReturn) { + discardTrailingNewline = true; + lineLength = i - pos; + } else if (c === lineFeed) { + lineLength = i - pos; + } + } + + if (lineLength < 0) { + startingPos = length - pos; + startingFieldLength = fieldLength; + break; + } else { + startingPos = 0; + startingFieldLength = -1; + } + + parseEventStreamLine(buf, pos, fieldLength, lineLength); + + pos += lineLength + 1; + } + + if (pos === length) { + buf = void 0; + } else if (pos > 0) { + buf = buf.slice(pos); + } + }); + }); + + req.on('error', function (err) { + self.connectionInProgress = false; + onConnectionClosed(err.message); + }); + + if (req.setNoDelay) req.setNoDelay(true); + req.end(); + } + + connect(); + + function _emit() { + if (self.listeners(arguments[0]).length > 0) { + self.emit.apply(self, arguments); + } + } + + this._close = function () { + if (readyState === EventSource.CLOSED) return; + readyState = EventSource.CLOSED; + if (req.abort) req.abort(); + if (req.xhr && req.xhr.abort) req.xhr.abort(); + }; + + function parseEventStreamLine(buf, pos, fieldLength, lineLength) { + if (lineLength === 0) { + if (data.length > 0) { + var type = eventName || 'message'; + _emit(type, new MessageEvent(type, { + data: data.slice(0, -1), // remove trailing newline + lastEventId: lastEventId, + origin: getOrigin(url) + })); + data = ''; + } + eventName = void 0; + } else if (fieldLength > 0) { + var noValue = fieldLength < 0; + var step = 0; + var field = buf.slice(pos, pos + (noValue ? lineLength : fieldLength)).toString(); + + if (noValue) { + step = lineLength; + } else if (buf[pos + fieldLength + 1] !== space) { + step = fieldLength + 1; + } else { + step = fieldLength + 2; + } + pos += step; + + var valueLength = lineLength - step; + var value = buf.slice(pos, pos + valueLength).toString(); + + if (field === 'data') { + data += value + '\n'; + } else if (field === 'event') { + eventName = value; + } else if (field === 'id') { + lastEventId = value; + } else if (field === 'retry') { + var retry = parseInt(value, 10); + if (!Number.isNaN(retry)) { + self.reconnectInterval = retry; + } + } + } + } +} + +module.exports = EventSource; + +util.inherits(EventSource, events.EventEmitter); +EventSource.prototype.constructor = EventSource; // make stacktraces readable + +['open', 'error', 'message'].forEach(function (method) { + Object.defineProperty(EventSource.prototype, 'on' + method, { + /** + * Returns the current listener + * + * @return {Mixed} the set function or undefined + * @api private + */ + get: function get() { + var listener = this.listeners(method)[0]; + return listener ? (listener._listener ? listener._listener : listener) : undefined; + }, + + /** + * Start listening for events + * + * @param {Function} listener the listener + * @return {Mixed} the set function or undefined + * @api private + */ + set: function set(listener) { + this.removeAllListeners(method); + this.addEventListener(method, listener); + } + }); +}); + +/** + * Ready states + */ +Object.defineProperty(EventSource, 'CONNECTING', { enumerable: true, value: 0 }); +Object.defineProperty(EventSource, 'OPEN', { enumerable: true, value: 1 }); +Object.defineProperty(EventSource, 'CLOSED', { enumerable: true, value: 2 }); + +EventSource.prototype.CONNECTING = 0; +EventSource.prototype.OPEN = 1; +EventSource.prototype.CLOSED = 2; + +/** + * Closes the connection, if one is made, and sets the readyState attribute to 2 (closed) + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventSource/close + * @api public + */ +EventSource.prototype.close = function () { + this._close(); +}; + +/** + * Emulates the W3C Browser based WebSocket interface using addEventListener. + * + * @param {String} type A string representing the event type to listen out for + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.addEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.addEventListener = function addEventListener(type, listener) { + if (typeof listener === 'function') { + // store a reference so we can return the original function again + listener._listener = listener; + this.on(type, listener); + } +}; + +/** + * Emulates the W3C Browser based WebSocket interface using dispatchEvent. + * + * @param {Event} event An event to be dispatched + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/dispatchEvent + * @api public + */ +EventSource.prototype.dispatchEvent = function dispatchEvent(event) { + if (!event.type) { + throw new Error('UNSPECIFIED_EVENT_TYPE_ERR'); + } + // if event is instance of an CustomEvent (or has 'details' property), + // send the detail object as the payload for the event + this.emit(event.type, event.detail); +}; + +/** + * Emulates the W3C Browser based WebSocket interface using removeEventListener. + * + * @param {String} type A string representing the event type to remove + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.removeEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.removeEventListener = function removeEventListener(type, listener) { + if (typeof listener === 'function') { + listener._listener = undefined; + this.removeListener(type, listener); + } +}; + +/** + * W3C Event + * + * @see http://www.w3.org/TR/DOM-Level-3-Events/#interface-Event + * @api private + */ +function Event(type, optionalProperties) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }); + if (optionalProperties) { + for (var f in optionalProperties) { + if (optionalProperties.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: optionalProperties[f], enumerable: true }); + } + } + } +} + +/** + * W3C MessageEvent + * + * @see http://www.w3.org/TR/webmessaging/#event-definitions + * @api private + */ +function MessageEvent(type, eventInitDict) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }); + for (var f in eventInitDict) { + if (eventInitDict.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: eventInitDict[f], enumerable: true }); + } + } +} + +/** + * Returns a new object of headers that does not include any authorization and cookie headers + * + * @param {Object} headers An object of headers ({[headerName]: headerValue}) + * @return {Object} a new object of headers + * @api private + */ +function removeUnsafeHeaders(headers) { + var safe = {}; + for (var key in headers) { + if (/^(cookie|authorization)$/i.test(key)) { + continue; + } + + safe[key] = headers[key]; + } + + return safe; +} + +/** + * Transform an URL to a valid origin value. + * + * @param {String|Object} url URL to transform to it's origin. + * @returns {String} The origin. + * @api private + */ +function getOrigin(url) { + if (typeof url === 'string') url = parse(url); + if (!url.protocol || !url.hostname) return 'null'; + return (url.protocol + '//' + url.host).toLowerCase(); +} diff --git a/src/platform/getEventSource/node.ts b/src/platform/getEventSource/node.ts new file mode 100644 index 00000000..7df59b86 --- /dev/null +++ b/src/platform/getEventSource/node.ts @@ -0,0 +1,23 @@ +import { IEventSourceConstructor } from '../../services/types'; + +let __isCustom = false; +let __eventSource: IEventSourceConstructor | undefined; + +// This function is only exposed for testing purposes. +export function __setEventSource(eventSource: IEventSourceConstructor) { + __eventSource = eventSource; + __isCustom = true; +} +export function __restore() { + __isCustom = false; +} + +export function getEventSource() { + // returns EventSource at `eventsource` package. If not available, return global EventSource or undefined + try { + return __isCustom ? __eventSource : require('./eventsource'); + } catch (error) { + // eslint-disable-next-line no-undef -- Feature detection for EventSource + return typeof EventSource === 'function' ? EventSource : undefined; + } +} diff --git a/src/platform/getFetch/__tests__/browser.spec.ts b/src/platform/getFetch/__tests__/browser.spec.ts new file mode 100644 index 00000000..a3caad3e --- /dev/null +++ b/src/platform/getFetch/__tests__/browser.spec.ts @@ -0,0 +1,15 @@ +import { getFetch } from '../browser'; + +test('getFetch returns global fetch in Browser', () => { + const fetchMock = jest.fn(); + const originalFetch = global.fetch; + global.fetch = fetchMock; + + expect(getFetch()).toBe(fetchMock); + + global.fetch = originalFetch; +}); + +test('getFetch returns undefined when fetch is not available', () => { + expect(getFetch()).toBeUndefined(); +}); diff --git a/src/platform/getFetch/__tests__/node.spec.ts b/src/platform/getFetch/__tests__/node.spec.ts new file mode 100644 index 00000000..20fbd7e3 --- /dev/null +++ b/src/platform/getFetch/__tests__/node.spec.ts @@ -0,0 +1,5 @@ +import { getFetch } from '../node'; + +test('getFetch returns node-fetch module in Node', () => { + expect(getFetch()).toBe(require('node-fetch')); +}); diff --git a/src/platform/getFetch/browser.ts b/src/platform/getFetch/browser.ts new file mode 100644 index 00000000..e4baee2f --- /dev/null +++ b/src/platform/getFetch/browser.ts @@ -0,0 +1,5 @@ +// It doesn't return a ponyfill if global fetch is not available +export function getFetch() { + // eslint-disable-next-line no-undef + return typeof fetch === 'function' ? fetch : undefined; +} diff --git a/src/platform/getFetch/node.ts b/src/platform/getFetch/node.ts new file mode 100644 index 00000000..e85e3c20 --- /dev/null +++ b/src/platform/getFetch/node.ts @@ -0,0 +1,27 @@ +import { IFetch } from '../../services/types'; + +let nodeFetch: IFetch | undefined; + +try { + nodeFetch = require('node-fetch'); + + // Handle node-fetch issue https://github.com/node-fetch/node-fetch/issues/1037 + if (typeof nodeFetch !== 'function') nodeFetch = (nodeFetch as any).default; + +} catch (error) { + // Try to access global fetch if `node-fetch` package couldn't be imported (e.g., not in a Node environment) + // eslint-disable-next-line no-undef + nodeFetch = typeof fetch === 'function' ? fetch : undefined; +} + +// This function is only exposed for testing purposes. +export function __setFetch(fetch: IFetch) { + nodeFetch = fetch; +} + +/** + * Retrieves 'node-fetch', a Fetch API polyfill for Node.js, with fallback to global 'fetch' if available. + */ +export function getFetch() { + return nodeFetch; +} diff --git a/src/platform/getOptions/__tests__/node.spec.ts b/src/platform/getOptions/__tests__/node.spec.ts new file mode 100644 index 00000000..c7b841d3 --- /dev/null +++ b/src/platform/getOptions/__tests__/node.spec.ts @@ -0,0 +1,21 @@ +import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; +import { getOptions } from '../node'; + +describe('getOptions', () => { + + test('returns an object with a custom agent if all urls are https', () => { + expect(typeof (getOptions(fullSettings) as any).agent).toBe('object'); + }); + + test('returns undefined if some url is not https', () => { + const settings = { ...fullSettings, urls: { ...fullSettings.urls, sdk: 'http://sdk.split.io' } }; + expect(getOptions(settings)).toBeUndefined(); + }); + + test('returns the provided options from settings', () => { + const customRequestOptions = { agent: false }; + const settings = { ...fullSettings, sync: { ...fullSettings.sync, requestOptions: customRequestOptions } }; + expect(getOptions(settings)).toBe(customRequestOptions); + }); + +}); diff --git a/src/platform/getOptions/node.ts b/src/platform/getOptions/node.ts new file mode 100644 index 00000000..407e3471 --- /dev/null +++ b/src/platform/getOptions/node.ts @@ -0,0 +1,24 @@ +// @TODO +// 1- handle multiple protocols automatically +// 2- destroy it once the sdk is destroyed +import https from 'https'; + +import { find } from '../../utils/lang'; +import { ISettings } from '../../types'; + +const agent = new https.Agent({ + keepAlive: true, + keepAliveMsecs: 1500 +}); + +export function getOptions(settings: ISettings) { + // User provided options take precedence + if (settings.sync.requestOptions) return settings.sync.requestOptions; + + // If some URL is not HTTPS, we don't use the agent, to let the SDK connect to HTTP endpoints + if (find(settings.urls, url => !url.startsWith('https:'))) return; + + return { + agent + }; +} diff --git a/src/platform/node.ts b/src/platform/node.ts new file mode 100644 index 00000000..4ca911a6 --- /dev/null +++ b/src/platform/node.ts @@ -0,0 +1,16 @@ +import { EventEmitter } from 'events'; +import { getFetch } from '../platform/getFetch/node'; +import { getEventSource } from '../platform/getEventSource/node'; +import { getOptions } from '../platform/getOptions/node'; +import { NodeSignalListener } from '../listeners/node'; +import { now } from '../utils/timeTracker/now/node'; +import { IPlatform } from '../sdkFactory/types'; + +export const platform: IPlatform = { + getFetch, + getEventSource, + getOptions, + EventEmitter, + now, + SignalListener: NodeSignalListener +}; diff --git a/src/readiness/constants.ts b/src/readiness/constants.ts index f08cf546..022100cc 100644 --- a/src/readiness/constants.ts +++ b/src/readiness/constants.ts @@ -14,3 +14,4 @@ export const SDK_UPDATE = 'state::update'; // SdkUpdateMetadata types: export const FLAGS_UPDATE = 'FLAGS_UPDATE'; export const SEGMENTS_UPDATE = 'SEGMENTS_UPDATE'; +export const CONFIGS_UPDATE = 'CONFIGS_UPDATE'; diff --git a/src/sdkClient/__tests__/clientInputValidation.spec.ts b/src/sdkClient/__tests__/clientInputValidation.spec.ts index e4de8f28..3f87782b 100644 --- a/src/sdkClient/__tests__/clientInputValidation.spec.ts +++ b/src/sdkClient/__tests__/clientInputValidation.spec.ts @@ -4,7 +4,7 @@ import { clientInputValidationDecorator } from '../clientInputValidation'; // Mocks import { DebugLogger } from '../../logger/browser/DebugLogger'; import { createClientMock } from './testUtils'; -import { FallbackTreatmentsCalculator, IFallbackTreatmentsCalculator } from '../../evaluator/fallbackTreatmentsCalculator'; +import { FallbackTreatmentsCalculator } from '../../evaluator/fallbackTreatmentsCalculator'; const settings: any = { log: DebugLogger(), @@ -14,7 +14,7 @@ const settings: any = { const EVALUATION_RESULT = 'on'; const client: any = createClientMock(EVALUATION_RESULT); -const fallbackTreatmentsCalculator: IFallbackTreatmentsCalculator = FallbackTreatmentsCalculator(); +const fallbackTreatmentsCalculator = FallbackTreatmentsCalculator(); const readinessManager: any = { isReadyFromCache: () => true, diff --git a/src/sdkClient/__tests__/sdkClientMethod.spec.ts b/src/sdkClient/__tests__/sdkClientMethod.spec.ts index e3cf4807..2cc468c0 100644 --- a/src/sdkClient/__tests__/sdkClientMethod.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethod.spec.ts @@ -8,13 +8,18 @@ import { FallbackTreatmentsCalculator } from '../../evaluator/fallbackTreatments const errorMessage = 'Shared Client not supported by the storage mechanism. Create isolated instances instead.'; +const signalListenerMock = { + start: jest.fn(), + stop: jest.fn() +}; + const paramMocks = [ // No SyncManager (i.e., Async SDK) and No signal listener { + platform: {}, storage: { destroy: jest.fn(() => Promise.resolve()) }, syncManager: undefined, sdkReadinessManager: { sdkStatus: jest.fn(), readinessManager: { destroy: jest.fn() } }, - signalListener: undefined, settings: { mode: CONSUMER_MODE, log: loggerMock, core: { authorizationKey: 'sdk key '} }, telemetryTracker: telemetryTrackerFactory(), clients: {}, @@ -23,10 +28,10 @@ const paramMocks = [ }, // SyncManager (i.e., Sync SDK) and Signal listener { + platform: { SignalListener: jest.fn(() => signalListenerMock) }, storage: { destroy: jest.fn() }, syncManager: { stop: jest.fn(), flush: jest.fn(() => Promise.resolve()) }, sdkReadinessManager: { sdkStatus: jest.fn(), readinessManager: { destroy: jest.fn() } }, - signalListener: { stop: jest.fn() }, settings: { mode: STANDALONE_MODE, log: loggerMock, core: { authorizationKey: 'sdk key '} }, telemetryTracker: telemetryTrackerFactory(), clients: {}, @@ -81,7 +86,7 @@ test.each(paramMocks)('sdkClientMethodFactory', (params, done: any) => { expect(params.syncManager.stop).toBeCalledTimes(1); expect(params.syncManager.flush).toBeCalledTimes(3); } - if (params.signalListener) expect(params.signalListener.stop).toBeCalledTimes(1); + if (params.platform.SignalListener) expect(signalListenerMock.stop).toBeCalledTimes(1); done(); }); diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index 1209d766..0b4b8344 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -38,11 +38,18 @@ const syncManagerMock = { }) }; +const signalListenerMock = { + start: jest.fn(), + stop: jest.fn() +}; + const params = { + platform:{ + SignalListener: jest.fn(() => signalListenerMock) + }, storage: storageMock, sdkReadinessManager: sdkReadinessManagerMock, syncManager: syncManagerMock, - signalListener: { stop: jest.fn() }, settings: settingsWithKey, telemetryTracker: telemetryTrackerFactory(), clients: {}, @@ -95,8 +102,8 @@ describe('sdkClientMethodCSFactory', () => { expect(params.storage.destroy).toBeCalledTimes(1); expect(params.syncManager.stop).toBeCalledTimes(1); expect(params.syncManager.flush).toBeCalledTimes(1); - expect(params.signalListener.stop).toBeCalledTimes(1); expect(params.impressionsTracker.stop).toBeCalledTimes(1); + expect(signalListenerMock.stop).toBeCalledTimes(1); }); }); @@ -140,7 +147,7 @@ describe('sdkClientMethodCSFactory', () => { expect(params.storage.destroy).not.toBeCalled(); expect(params.syncManager.stop).not.toBeCalled(); expect(params.syncManager.flush).not.toBeCalled(); - expect(params.signalListener.stop).not.toBeCalled(); + expect(signalListenerMock.stop).not.toBeCalled(); }); diff --git a/src/sdkClient/__tests__/trackMethod.spec.ts b/src/sdkClient/__tests__/trackMethod.spec.ts new file mode 100644 index 00000000..d64be4b9 --- /dev/null +++ b/src/sdkClient/__tests__/trackMethod.spec.ts @@ -0,0 +1,88 @@ +import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; +import { trackMethodFactory } from '../trackMethod'; +import { STANDALONE_MODE } from '../../utils/constants'; +import { ISdkFactoryContext } from '../../sdkFactory/types'; + +const readinessManagerMock = { + isReady: jest.fn(() => true), + isReadyFromCache: jest.fn(() => true), + isDestroyed: jest.fn(() => false), + isTimedout: jest.fn(() => false), + hasTimedout: jest.fn(() => false), + destroy: jest.fn(), +}; + +const eventTrackerMock = { + track: jest.fn(() => true), +}; + +const telemetryTrackerMock = { + trackEval: jest.fn(() => jest.fn()), +}; + +const definitionsMock = { + getAll: jest.fn(() => []), + getSplit: jest.fn(() => null), + trafficTypeExists: jest.fn(() => true), +}; + +const trackMethodParams = { + settings: { log: loggerMock, mode: STANDALONE_MODE } as any, + eventTracker: eventTrackerMock as any, + telemetryTracker: telemetryTrackerMock as any, + sdkReadinessManager: { readinessManager: readinessManagerMock }, + storage: { splits: definitionsMock }, +} as unknown as ISdkFactoryContext; + +describe('trackMethodFactory', () => { + + beforeEach(() => { + loggerMock.mockClear(); + eventTrackerMock.track.mockClear(); + telemetryTrackerMock.trackEval.mockClear(); + readinessManagerMock.isDestroyed.mockReturnValue(false); + }); + + test('Should return true when the event was successfully tracked', () => { + const track = trackMethodFactory(trackMethodParams); + + const result = track('validKey', 'user', 'my.event', 10, { prop: 'value' }); + + expect(result).toBe(true); + expect(eventTrackerMock.track).toBeCalledTimes(1); + expect(eventTrackerMock.track).toBeCalledWith( + expect.objectContaining({ + eventTypeId: 'my.event', + trafficTypeName: 'user', + value: 10, + key: 'validKey', + properties: { prop: 'value' }, + }), + expect.any(Number), + ); + expect(telemetryTrackerMock.trackEval).toBeCalledTimes(1); + expect(loggerMock.error).not.toBeCalled(); + }); + + test('Should return false when SDK is destroyed', () => { + readinessManagerMock.isDestroyed.mockReturnValue(true); + const track = trackMethodFactory(trackMethodParams); + + const result = track('validKey', 'user', 'my.event'); + + expect(result).toBe(false); + expect(eventTrackerMock.track).not.toBeCalled(); + expect(loggerMock.error).toBeCalled(); + }); + + test('Should return false when no key is provided', () => { + const track = trackMethodFactory(trackMethodParams); + + // @ts-expect-error testing invalid input + const result = track(undefined, 'user', 'my.event'); + + expect(result).toBe(false); + expect(eventTrackerMock.track).not.toBeCalled(); + expect(loggerMock.error).toBeCalled(); + }); +}); diff --git a/src/sdkClient/client.ts b/src/sdkClient/client.ts index 6eded6c3..9eeb4f41 100644 --- a/src/sdkClient/client.ts +++ b/src/sdkClient/client.ts @@ -1,10 +1,9 @@ import { evaluateFeature, evaluateFeatures, evaluateFeaturesByFlagSets } from '../evaluator'; import { thenable } from '../utils/promise/thenable'; import { getMatching, getBucketing } from '../utils/key'; -import { validateSplitExistence } from '../utils/inputValidation/splitExistence'; -import { validateTrafficTypeExistence } from '../utils/inputValidation/trafficTypeExistence'; +import { validateDefinitionExistence } from '../utils/inputValidation/definitionExistence'; import { SDK_NOT_READY } from '../utils/labels'; -import { CONTROL, TREATMENT, TREATMENTS, TREATMENT_WITH_CONFIG, TREATMENTS_WITH_CONFIG, TRACK, TREATMENTS_WITH_CONFIG_BY_FLAGSETS, TREATMENTS_BY_FLAGSETS, TREATMENTS_BY_FLAGSET, TREATMENTS_WITH_CONFIG_BY_FLAGSET, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENT_WITH_CONFIG, GET_TREATMENT, GET_TREATMENTS, TRACK_FN_LABEL } from '../utils/constants'; +import { CONTROL, TREATMENT, TREATMENTS, TREATMENT_WITH_CONFIG, TREATMENTS_WITH_CONFIG, TREATMENTS_WITH_CONFIG_BY_FLAGSETS, TREATMENTS_BY_FLAGSETS, TREATMENTS_BY_FLAGSET, TREATMENTS_WITH_CONFIG_BY_FLAGSET, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENT_WITH_CONFIG, GET_TREATMENT, GET_TREATMENTS } from '../utils/constants'; import { IEvaluationResult } from '../evaluator/types'; import SplitIO from '../../types/splitio'; import { IMPRESSION_QUEUEING } from '../logger/constants'; @@ -12,6 +11,7 @@ import { ISdkFactoryContext } from '../sdkFactory/types'; import { isConsumerMode } from '../utils/settingsValidation/mode'; import { Method } from '../sync/submitters/types'; import { ImpressionDecorated } from '../trackers/types'; +import { trackMethodFactory } from './trackMethod'; const treatmentNotReady = { treatment: CONTROL, label: SDK_NOT_READY }; @@ -23,7 +23,7 @@ function treatmentsNotReady(featureFlagNames: string[]) { return evaluations; } -function stringify(options?: SplitIO.EvaluationOptions) { +export function stringify(options?: SplitIO.EvaluationOptions) { if (options && options.properties) { try { return JSON.stringify(options.properties); @@ -35,7 +35,7 @@ function stringify(options?: SplitIO.EvaluationOptions) { * Creator of base client with getTreatments and track methods. */ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | SplitIO.IAsyncClient { - const { sdkReadinessManager: { readinessManager }, storage, settings, impressionsTracker, eventTracker, telemetryTracker, fallbackTreatmentsCalculator } = params; + const { sdkReadinessManager: { readinessManager }, storage, settings, impressionsTracker, telemetryTracker, fallbackCalculator } = params; const { log, mode } = settings; const isAsync = isConsumerMode(mode); @@ -147,14 +147,14 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl let { treatment, label, config = null } = evaluation; if (treatment === CONTROL) { - const fallbackTreatment = fallbackTreatmentsCalculator(featureFlagName, label); + const fallbackTreatment = fallbackCalculator(featureFlagName, label); treatment = fallbackTreatment.treatment; label = fallbackTreatment.label; config = fallbackTreatment.config; } - if (validateSplitExistence(log, readinessManager, featureFlagName, label, invokingMethodName)) { - log.info(IMPRESSION_QUEUEING, [featureFlagName, matchingKey, treatment, label]); + if (validateDefinitionExistence(log, readinessManager, featureFlagName, label, invokingMethodName)) { + log.info(IMPRESSION_QUEUEING, ['Feature flag', featureFlagName, matchingKey, treatment, label]); queue.push({ imp: { feature: featureFlagName, @@ -173,42 +173,14 @@ export function clientFactory(params: ISdkFactoryContext): SplitIO.IClient | Spl if (withConfig) { return { treatment, - config + config: config as string | null }; } return treatment; } - function track(key: SplitIO.SplitKey, trafficTypeName: string, eventTypeId: string, value?: number, properties?: SplitIO.Properties, size = 1024) { - const stopTelemetryTracker = telemetryTracker.trackEval(TRACK); - - const matchingKey = getMatching(key); - const timestamp = Date.now(); - const eventData: SplitIO.EventData = { - eventTypeId, - trafficTypeName, - value, - timestamp, - key: matchingKey, - properties - }; - - // This may be async but we only warn, we don't actually care if it is valid or not in terms of queueing the event. - validateTrafficTypeExistence(log, readinessManager, storage.splits, mode, trafficTypeName, TRACK_FN_LABEL); - - const result = eventTracker.track(eventData, size); - - if (thenable(result)) { - return result.then((result) => { - stopTelemetryTracker(); - return result; - }); - } else { - stopTelemetryTracker(); - return result; - } - } + const track = trackMethodFactory(params); return { getTreatment, diff --git a/src/sdkClient/clientInputValidation.ts b/src/sdkClient/clientInputValidation.ts index 9ed2a722..68996b9a 100644 --- a/src/sdkClient/clientInputValidation.ts +++ b/src/sdkClient/clientInputValidation.ts @@ -1,31 +1,27 @@ import { validateAttributes, - validateEvent, - validateEventValue, - validateEventProperties, validateKey, - validateSplit, - validateSplits, - validateTrafficType, + validateDefinition, + validateDefinitions, validateIfNotDestroyed, validateIfReadyFromCache, validateEvaluationOptions } from '../utils/inputValidation'; import { startsWith } from '../utils/lang'; -import { GET_TREATMENT, GET_TREATMENTS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENT_WITH_CONFIG, TRACK_FN_LABEL } from '../utils/constants'; +import { GET_TREATMENT, GET_TREATMENTS, GET_TREATMENTS_BY_FLAG_SET, GET_TREATMENTS_BY_FLAG_SETS, GET_TREATMENTS_WITH_CONFIG, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SET, GET_TREATMENTS_WITH_CONFIG_BY_FLAG_SETS, GET_TREATMENT_WITH_CONFIG } from '../utils/constants'; import { IReadinessManager } from '../readiness/types'; import { MaybeThenable } from '../dtos/types'; import { ISettings } from '../types'; import SplitIO from '../../types/splitio'; import { isConsumerMode } from '../utils/settingsValidation/mode'; import { validateFlagSets } from '../utils/settingsValidation/splitFilters'; -import { IFallbackTreatmentsCalculator } from '../evaluator/fallbackTreatmentsCalculator'; +import { IFallbackCalculator } from '../evaluator/fallbackTreatmentsCalculator'; /** * Decorator that validates the input before actually executing the client methods. * We should "guard" the client here, while not polluting the "real" implementation of those methods. */ -export function clientInputValidationDecorator(settings: ISettings, client: TClient, readinessManager: IReadinessManager, fallbackTreatmentsCalculator: IFallbackTreatmentsCalculator): TClient { +export function clientInputValidationDecorator(settings: ISettings, client: TClient, readinessManager: IReadinessManager, fallbackCalculator: IFallbackCalculator): TClient { const { log, mode } = settings; const isAsync = isConsumerMode(mode); @@ -39,14 +35,14 @@ export function clientInputValidationDecorator -1 ? validateFlagSets(log, methodName, maybeNameOrNames as string[], settings.sync.__splitFiltersValidation.groupedFilters.bySet) : startsWith(methodName, GET_TREATMENTS) ? - validateSplits(log, maybeNameOrNames, methodName) : - validateSplit(log, maybeNameOrNames, methodName); + validateDefinitions(log, maybeNameOrNames, methodName) : + validateDefinition(log, maybeNameOrNames, methodName); const attributes = validateAttributes(log, maybeAttributes, methodName); const isNotDestroyed = validateIfNotDestroyed(log, readinessManager, methodName); const options = validateEvaluationOptions(log, maybeOptions, methodName); - validateIfReadyFromCache(log, readinessManager, methodName, nameOrNames); + validateIfReadyFromCache(log, readinessManager, methodName); const valid = isNotDestroyed && key && nameOrNames && attributes !== false; @@ -66,7 +62,7 @@ export function clientInputValidationDecorator { - // Cleanup storage - return storage.destroy(); - }); - } - } + sdkLifecycleFactory(params, isSharedClient) ); } diff --git a/src/sdkClient/sdkLifecycle.ts b/src/sdkClient/sdkLifecycle.ts new file mode 100644 index 00000000..1322460c --- /dev/null +++ b/src/sdkClient/sdkLifecycle.ts @@ -0,0 +1,78 @@ +import { releaseApiKey, validateAndTrackApiKey } from '../utils/inputValidation/apiKey'; +import { ISdkFactoryContext } from '../sdkFactory/types'; + +const COOLDOWN_TIME_IN_MILLIS = 1000; + +/** + * Creates an Sdk client, i.e., a base client with status, init, flush and destroy interface + */ +export function sdkLifecycleFactory(params: ISdkFactoryContext, isSharedClient?: boolean): { init(): void; flush(): Promise; destroy(): Promise } { + const { sdkReadinessManager, syncManager, storage, settings, telemetryTracker, impressionsTracker, platform } = params; + + let hasInit = false; + let lastActionTime = 0; + + const signalListener = platform.SignalListener && new platform.SignalListener(params); + + function __cooldown(func: Function, time: number) { + const now = Date.now(); + //get the actual time elapsed in ms + const timeElapsed = now - lastActionTime; + //check if the time elapsed is less than desired cooldown + if (timeElapsed < time) { + //if yes, return message with remaining time in seconds + settings.log.warn(`Flush cooldown, remaining time ${(time - timeElapsed) / 1000} seconds`); + return Promise.resolve(); + } else { + //Do the requested action and re-assign the lastActionTime + lastActionTime = now; + return func(); + } + } + + function __flush() { + return syncManager ? syncManager.flush() : Promise.resolve(); + } + + return { + init() { + if (hasInit) return; + hasInit = true; + + if (!isSharedClient) { + validateAndTrackApiKey(settings.log, settings.core.authorizationKey); + sdkReadinessManager.readinessManager.init(); + impressionsTracker.start(); + syncManager && syncManager.start(); + signalListener && signalListener.start(); + } + }, + + flush() { + // @TODO define cooldown time + return __cooldown(__flush, COOLDOWN_TIME_IN_MILLIS); + }, + + destroy() { + hasInit = false; + // Mark the SDK as destroyed immediately + sdkReadinessManager.readinessManager.destroy(); + + // For main client, cleanup the SDK Key, listeners and scheduled jobs, and record stat before flushing data + if (!isSharedClient) { + releaseApiKey(settings.core.authorizationKey); + telemetryTracker.sessionLength(); + signalListener && signalListener.stop(); + impressionsTracker.stop(); + } + + // Stop background jobs + syncManager && syncManager.stop(); + + return __flush().then(() => { + // Cleanup storage + return storage.destroy(); + }); + } + }; +} diff --git a/src/sdkClient/trackMethod.ts b/src/sdkClient/trackMethod.ts new file mode 100644 index 00000000..99464a67 --- /dev/null +++ b/src/sdkClient/trackMethod.ts @@ -0,0 +1,59 @@ +import { thenable } from '../utils/promise/thenable'; +import { getMatching } from '../utils/key'; +import { validateTrafficTypeExistence } from '../utils/inputValidation/trafficTypeExistence'; +import { validateKey, validateTrafficType, validateEvent, validateEventValue, validateEventProperties, validateIfNotDestroyed } from '../utils/inputValidation'; +import { TRACK, TRACK_FN_LABEL } from '../utils/constants'; +import { isConsumerMode } from '../utils/settingsValidation/mode'; +import SplitIO from '../../types/splitio'; +import { ISdkFactoryContext } from '../sdkFactory/types'; + +/** + * Creates a standalone `track` function with input validation. + * Reusable by FF SDK client, Configs SDK, and thin-client SDK. + */ +export function trackMethodFactory(params: Pick) { + const { settings, storage: { splits }, telemetryTracker, eventTracker, sdkReadinessManager: { readinessManager } } = params; + const { log, mode } = settings; + const isAsync = isConsumerMode(mode); + + return function track(maybeKey: SplitIO.SplitKey, maybeTT: string, maybeEvent: string, maybeEventValue?: number, maybeProperties?: SplitIO.Properties) { + // Input validation + const key = validateKey(log, maybeKey, TRACK_FN_LABEL); + const trafficTypeName = validateTrafficType(log, maybeTT, TRACK_FN_LABEL); + const eventTypeId = validateEvent(log, maybeEvent, TRACK_FN_LABEL); + const value = validateEventValue(log, maybeEventValue, TRACK_FN_LABEL); + const { properties, size } = validateEventProperties(log, maybeProperties, TRACK_FN_LABEL); + const isNotDestroyed = validateIfNotDestroyed(log, readinessManager, TRACK_FN_LABEL); + + if (!(isNotDestroyed && key && trafficTypeName && eventTypeId && value !== false && properties !== false)) { + return isAsync ? Promise.resolve(false) : false; + } + + // Core logic + const stopTelemetryTracker = telemetryTracker.trackEval(TRACK); + + const eventData: SplitIO.EventData = { + eventTypeId, + trafficTypeName, + value, + timestamp: Date.now(), + key: getMatching(key), + properties: properties as SplitIO.Properties | undefined + }; + + // This may be async but we only warn, we don't actually care if it is valid or not in terms of queueing the event. + validateTrafficTypeExistence(log, readinessManager, splits, mode, trafficTypeName, TRACK_FN_LABEL); + + const result = eventTracker.track(eventData, size); + + if (thenable(result)) { + return result.then((result) => { + stopTelemetryTracker(); + return result; + }); + } else { + stopTelemetryTracker(); + return result; + } + }; +} diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 32c86949..2e7b47e9 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -18,8 +18,7 @@ import { FallbackTreatmentsCalculator } from '../evaluator/fallbackTreatmentsCal */ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IAsyncSDK | SplitIO.IBrowserSDK | SplitIO.IBrowserAsyncSDK { - const { settings, platform, storageFactory, splitApiFactory, extraProps, - syncManagerFactory, SignalListener, + const { settings, platform, storageFactory, splitApiFactory, extraProps, syncManagerFactory, integrationsManagerFactory, sdkManagerFactory, sdkClientMethodFactory, lazyInit } = params; const { log, initialRolloutPlan, core: { key } } = settings; @@ -45,7 +44,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA } }); - const fallbackTreatmentsCalculator = FallbackTreatmentsCalculator(settings.fallbackTreatments); + const fallbackCalculator = FallbackTreatmentsCalculator(settings.fallbackTreatments); if (initialRolloutPlan) { setRolloutPlan(log, initialRolloutPlan, storage as IStorageSync, key && getMatching(key)); @@ -62,14 +61,8 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA // splitApi is used by SyncManager and Browser signal listener const splitApi = splitApiFactory && splitApiFactory(settings, platform, telemetryTracker); - const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, sdkReadinessManager, readiness, settings, storage, platform, fallbackTreatmentsCalculator }; - - const syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); - ctx.syncManager = syncManager; - - // @TODO: move into platform, and call inside sdkClientFactory (if it's used only there) - const signalListener = SignalListener && new SignalListener(syncManager, settings, storage, splitApi); - ctx.signalListener = signalListener; + const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, sdkReadinessManager, readiness, settings, storage, platform, fallbackCalculator }; + ctx.syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); // SDK client and manager const clientMethod = sdkClientMethodFactory(ctx); diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index dcccc2bf..2b822dd0 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -3,7 +3,7 @@ import { ISignalListener } from '../listeners/types'; import { IReadinessManager, ISdkReadinessManager } from '../readiness/types'; import type { sdkManagerFactory } from '../sdkManager'; import type { splitApiFactory } from '../services/splitApi'; -import type { IFallbackTreatmentsCalculator } from '../evaluator/fallbackTreatmentsCalculator'; +import type { IFallbackCalculator } from '../evaluator/fallbackTreatmentsCalculator'; import { IFetch, ISplitApi, IEventSourceConstructor } from '../services/types'; import { IStorageAsync, IStorageSync, IStorageFactoryParams } from '../storages/types'; import { ISyncManager } from '../sync/types'; @@ -35,7 +35,12 @@ export interface IPlatform { /** * Function used to track latencies for telemetry. */ - now?: () => number + now?: () => number, + /** + * Optional signal listener constructor. Used to listen and handle runtime environment states, like server shutdown, app paused or resumed. + */ + // eslint-disable-next-line no-use-before-define + SignalListener?: new (params: ISdkFactoryContext) => ISignalListener, // Used by BrowserSignalListener } export interface ISdkFactoryContext { @@ -47,11 +52,10 @@ export interface ISdkFactoryContext { eventTracker: IEventTracker, telemetryTracker: ITelemetryTracker, storage: IStorageSync | IStorageAsync, - signalListener?: ISignalListener - splitApi?: ISplitApi + splitApi?: ISplitApi, syncManager?: ISyncManager, clients: Record, - fallbackTreatmentsCalculator: IFallbackTreatmentsCalculator + fallbackCalculator: IFallbackCalculator } export interface ISdkFactoryContextSync extends ISdkFactoryContext { @@ -108,14 +112,6 @@ export interface ISdkFactoryParams { filterAdapterFactory?: () => IFilterAdapter - // Optional signal listener constructor. Used to handle special app states, like shutdown, app paused or resumed. - // Pass only if `syncManager` (used by NodeSignalListener) and `splitApi` (used by Browser listener) are passed. - SignalListener?: new ( - syncManager: ISyncManager | undefined, // Used by NodeSignalListener to flush data, and by BrowserSignalListener to close streaming connection. - settings: ISettings, // Used by BrowserSignalListener - storage: IStorageSync | IStorageAsync, // Used by BrowserSignalListener - serviceApi: ISplitApi | undefined) => ISignalListener, // Used by BrowserSignalListener - // @TODO review impressionListener and integrations interfaces. What about handling impressionListener as an integration ? integrationsManagerFactory?: (params: IIntegrationFactoryParams) => IIntegrationManager | undefined, diff --git a/src/sdkManager/index.ts b/src/sdkManager/index.ts index 5260170c..90bf59ff 100644 --- a/src/sdkManager/index.ts +++ b/src/sdkManager/index.ts @@ -1,16 +1,16 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { thenable } from '../utils/promise/thenable'; import { find } from '../utils/lang'; -import { validateSplit, validateSplitExistence, validateIfOperational } from '../utils/inputValidation'; +import { validateDefinition, validateDefinitionExistence, validateIfOperational } from '../utils/inputValidation'; import { ISplitsCacheAsync, ISplitsCacheSync } from '../storages/types'; import { ISdkReadinessManager } from '../readiness/types'; -import { ISplit } from '../dtos/types'; +import { IDefinition } from '../dtos/types'; import { ISettings } from '../types'; import SplitIO from '../../types/splitio'; import { isConsumerMode } from '../utils/settingsValidation/mode'; import { SPLIT_FN_LABEL, SPLITS_FN_LABEL, NAMES_FN_LABEL } from '../utils/constants'; -function collectTreatments(splitObject: ISplit) { +function collectTreatments(splitObject: IDefinition) { const conditions = splitObject.conditions; // Rollout conditions are supposed to have the entire partitions list, so we find the first one. let allTreatmentsCondition = find(conditions, (cond) => cond.conditionType === 'ROLLOUT'); @@ -20,7 +20,7 @@ function collectTreatments(splitObject: ISplit) { return allTreatmentsCondition ? allTreatmentsCondition.partitions!.map(v => v.treatment) : []; } -function objectToView(splitObject: ISplit | null): SplitIO.SplitView | null { +function objectToView(splitObject: IDefinition | null): SplitIO.SplitView | null { if (!splitObject) return null; return { @@ -29,7 +29,7 @@ function objectToView(splitObject: ISplit | null): SplitIO.SplitView | null { killed: splitObject.killed, changeNumber: splitObject.changeNumber || 0, treatments: collectTreatments(splitObject), - configs: splitObject.configurations || {}, + configs: splitObject.configurations as SplitIO.SplitView['configs'] || {}, sets: splitObject.sets || [], defaultTreatment: splitObject.defaultTreatment, impressionsDisabled: splitObject.impressionsDisabled === true, @@ -37,7 +37,7 @@ function objectToView(splitObject: ISplit | null): SplitIO.SplitView | null { }; } -function objectsToViews(splitObjects: ISplit[]) { +function objectsToViews(splitObjects: IDefinition[]) { let views: SplitIO.SplitView[] = []; splitObjects.forEach(split => { @@ -65,7 +65,7 @@ export function sdkManagerFactory null).then(result => { // handle possible rejections when using pluggable storage - validateSplitExistence(log, readinessManager, splitName, result, SPLIT_FN_LABEL); + validateDefinitionExistence(log, readinessManager, splitName, result, SPLIT_FN_LABEL); return objectToView(result); }); } - validateSplitExistence(log, readinessManager, splitName, split, SPLIT_FN_LABEL); + validateDefinitionExistence(log, readinessManager, splitName, split, SPLIT_FN_LABEL); return objectToView(split); }, diff --git a/src/services/__tests__/splitApi.spec.ts b/src/services/__tests__/splitApi.spec.ts index 196266a3..9d869409 100644 --- a/src/services/__tests__/splitApi.spec.ts +++ b/src/services/__tests__/splitApi.spec.ts @@ -45,20 +45,25 @@ describe('splitApi', () => { assertHeaders(settings, headers); expect(url).toBe(expectedFlagsUrl(-1, 100, settings.validateFilters || false, settings, -1)); + splitApi.fetchConfigs(-1, false, 100, -1); + [url, { headers }] = fetchMock.mock.calls[4]; + assertHeaders(settings, headers); + expect(url).toBe(expectedConfigsUrl(-1, 100, settings.validateFilters || false, settings, -1)); + splitApi.postEventsBulk('fake-body'); - assertHeaders(settings, fetchMock.mock.calls[4][1].headers); + assertHeaders(settings, fetchMock.mock.calls[5][1].headers); splitApi.postTestImpressionsBulk('fake-body'); - assertHeaders(settings, fetchMock.mock.calls[5][1].headers); - expect(fetchMock.mock.calls[5][1].headers['SplitSDKImpressionsMode']).toBe(settings.sync.impressionsMode); + assertHeaders(settings, fetchMock.mock.calls[6][1].headers); + expect(fetchMock.mock.calls[6][1].headers['SplitSDKImpressionsMode']).toBe(settings.sync.impressionsMode); splitApi.postTestImpressionsCount('fake-body'); - assertHeaders(settings, fetchMock.mock.calls[6][1].headers); + assertHeaders(settings, fetchMock.mock.calls[7][1].headers); splitApi.postMetricsConfig('fake-body'); - assertHeaders(settings, fetchMock.mock.calls[7][1].headers); - splitApi.postMetricsUsage('fake-body'); assertHeaders(settings, fetchMock.mock.calls[8][1].headers); + splitApi.postMetricsUsage('fake-body'); + assertHeaders(settings, fetchMock.mock.calls[9][1].headers); expect(telemetryTrackerMock.trackHttp).toBeCalledTimes(9); @@ -70,6 +75,11 @@ describe('splitApi', () => { const filterQueryString = settings.sync.__splitFiltersValidation && settings.sync.__splitFiltersValidation.queryString; return `sdk/splitChanges?s=1.1&since=${since}${rbSince ? '&rbSince=' + rbSince : ''}${usesFilter ? filterQueryString : ''}${till ? '&till=' + till : ''}`; } + + function expectedConfigsUrl(since: number, till: number, usesFilter: boolean, settings: ISettings, rbSince?: number) { + const filterQueryString = settings.sync.__splitFiltersValidation && settings.sync.__splitFiltersValidation.queryString; + return `sdk/v1/configs?${settings.sync.flagSpecVersion ? `s=${settings.sync.flagSpecVersion}&` : ''}since=${since}${rbSince ? '&rbSince=' + rbSince : ''}${usesFilter ? filterQueryString : ''}${till ? '&till=' + till : ''}`; + } }); test('rejects requests if fetch Api is not provided', (done) => { diff --git a/src/services/splitApi.ts b/src/services/splitApi.ts index 6860b022..09926c1d 100644 --- a/src/services/splitApi.ts +++ b/src/services/splitApi.ts @@ -61,6 +61,11 @@ export function splitApiFactory( }); }, + fetchConfigs(since: number, noCache?: boolean, till?: number, rbSince?: number) { + const url = `${urls.sdk}/v1/configs?${settings.sync.flagSpecVersion ? `s=${settings.sync.flagSpecVersion}&` : ''}since=${since}${rbSince ? '&rbSince=' + rbSince : ''}${filterQueryString || ''}${till ? '&till=' + till : ''}`; + return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined); + }, + fetchSegmentChanges(since: number, segmentName: string, noCache?: boolean, till?: number) { const url = `${urls.sdk}/segmentChanges/${segmentName}?since=${since}${till ? '&till=' + till : ''}`; return splitHttpClient(url, noCache ? noCacheHeaderOptions : undefined, telemetryTracker.trackHttp(SEGMENT)); diff --git a/src/services/types.ts b/src/services/types.ts index b747dbb5..01595280 100644 --- a/src/services/types.ts +++ b/src/services/types.ts @@ -35,7 +35,7 @@ export type ISplitHttpClient = (url: string, options?: IRequestOptions, latencyT export type IFetchAuth = (userKeys?: string[]) => Promise -export type IFetchSplitChanges = (since: number, noCache?: boolean, till?: number, rbSince?: number) => Promise +export type IFetchDefinitionChanges = (since: number, noCache?: boolean, till?: number, rbSince?: number) => Promise export type IFetchSegmentChanges = (since: number, segmentName: string, noCache?: boolean, till?: number) => Promise @@ -59,7 +59,8 @@ export interface ISplitApi { getSdkAPIHealthCheck: IHealthCheckAPI getEventsAPIHealthCheck: IHealthCheckAPI fetchAuth: IFetchAuth - fetchSplitChanges: IFetchSplitChanges + fetchSplitChanges: IFetchDefinitionChanges + fetchConfigs: IFetchDefinitionChanges fetchSegmentChanges: IFetchSegmentChanges fetchMemberships: IFetchMemberships postEventsBulk: IPostEventsBulk diff --git a/src/storages/AbstractSplitsCacheAsync.ts b/src/storages/AbstractSplitsCacheAsync.ts index 420b9202..407a35b4 100644 --- a/src/storages/AbstractSplitsCacheAsync.ts +++ b/src/storages/AbstractSplitsCacheAsync.ts @@ -1,5 +1,5 @@ import { ISplitsCacheAsync } from './types'; -import { ISplit } from '../dtos/types'; +import { IDefinition } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; /** @@ -8,11 +8,11 @@ import { objectAssign } from '../utils/lang/objectAssign'; */ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { - protected abstract addSplit(split: ISplit): Promise + protected abstract addSplit(split: IDefinition): Promise protected abstract removeSplit(name: string): Promise protected abstract setChangeNumber(changeNumber: number): Promise - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): Promise { + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): Promise { return Promise.all([ this.setChangeNumber(changeNumber), Promise.all(toAdd.map(addedFF => this.addSplit(addedFF))), @@ -22,10 +22,10 @@ export abstract class AbstractSplitsCacheAsync implements ISplitsCacheAsync { }); } - abstract getSplit(name: string): Promise - abstract getSplits(names: string[]): Promise> + abstract getSplit(name: string): Promise + abstract getSplits(names: string[]): Promise> abstract getChangeNumber(): Promise - abstract getAll(): Promise + abstract getAll(): Promise abstract getSplitNames(): Promise abstract getNamesByFlagSets(flagSets: string[]): Promise[]> abstract trafficTypeExists(trafficType: string): Promise diff --git a/src/storages/AbstractSplitsCacheSync.ts b/src/storages/AbstractSplitsCacheSync.ts index 64194561..8c748dcc 100644 --- a/src/storages/AbstractSplitsCacheSync.ts +++ b/src/storages/AbstractSplitsCacheSync.ts @@ -1,5 +1,5 @@ import { ISplitsCacheSync, IStorageSync } from './types'; -import { IRBSegment, ISplit } from '../dtos/types'; +import { IRBSegment, IDefinition } from '../dtos/types'; import { objectAssign } from '../utils/lang/objectAssign'; import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; @@ -9,21 +9,21 @@ import { IN_SEGMENT, IN_LARGE_SEGMENT } from '../utils/constants'; */ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { - protected abstract addSplit(split: ISplit): boolean + protected abstract addSplit(split: IDefinition): boolean protected abstract removeSplit(name: string): boolean protected abstract setChangeNumber(changeNumber: number): boolean | void - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): boolean { + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): boolean { let updated = toAdd.map(addedFF => this.addSplit(addedFF)).some(result => result); updated = toRemove.map(removedFF => this.removeSplit(removedFF.name)).some(result => result) || updated; this.setChangeNumber(changeNumber); return updated; } - abstract getSplit(name: string): ISplit | null + abstract getSplit(name: string): IDefinition | null - getSplits(names: string[]): Record { - const splits: Record = {}; + getSplits(names: string[]): Record { + const splits: Record = {}; names.forEach(name => { splits[name] = this.getSplit(name); }); @@ -32,8 +32,8 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { abstract getChangeNumber(): number - getAll(): ISplit[] { - return this.getSplitNames().map(key => this.getSplit(key) as ISplit); + getAll(): IDefinition[] { + return this.getSplitNames().map(key => this.getSplit(key) as IDefinition); } abstract getSplitNames(): string[] @@ -73,7 +73,7 @@ export abstract class AbstractSplitsCacheSync implements ISplitsCacheSync { * Given a parsed split, it returns a boolean flagging if its conditions use segments matchers (rules & whitelists). * This util is intended to simplify the implementation of `splitsCache::usesSegments` method */ -export function usesSegments(ruleEntity: ISplit | IRBSegment) { +export function usesSegments(ruleEntity: IDefinition | IRBSegment) { const conditions = ruleEntity.conditions || []; for (let i = 0; i < conditions.length; i++) { const matchers = conditions[i].matcherGroup.matchers; diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts index 3f1de562..3a0c93ab 100644 --- a/src/storages/__tests__/dataLoader.spec.ts +++ b/src/storages/__tests__/dataLoader.spec.ts @@ -2,7 +2,7 @@ import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; import { loggerMock } from '../../logger/__tests__/sdkLogger.mock'; -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; import { validateRolloutPlan, setRolloutPlan } from '../setRolloutPlan'; import { getRolloutPlan } from '../getRolloutPlan'; @@ -50,7 +50,7 @@ describe('validateRolloutPlan', () => { describe('getRolloutPlan & setRolloutPlan (client-side)', () => { // @ts-expect-error Load server-side storage const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); - serverStorage.splits.update([{ name: 'split1' } as ISplit], [], 123); + serverStorage.splits.update([{ name: 'split1' } as IDefinition], [], 123); serverStorage.rbSegments.update([{ name: 'rbs1' } as IRBSegment], [], 321); serverStorage.segments.update('segment1', [fullSettings.core.key as string, otherKey], [], 123); diff --git a/src/storages/__tests__/testUtils.ts b/src/storages/__tests__/testUtils.ts index b2ae79dc..39db48f8 100644 --- a/src/storages/__tests__/testUtils.ts +++ b/src/storages/__tests__/testUtils.ts @@ -1,4 +1,4 @@ -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; import { IStorageSync, IStorageAsync, IImpressionsCacheSync, IEventsCacheSync } from '../types'; // Assert that instances created by storage factories have the expected interface @@ -23,26 +23,26 @@ export function assertSyncRecorderCacheInterface(cache: IEventsCacheSync | IImpr // Split mocks -export const ALWAYS_ON_SPLIT: ISplit = { 'trafficTypeName': 'user', 'name': 'always-on', 'trafficAllocation': 100, 'trafficAllocationSeed': 1012950810, 'seed': -725161385, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'changeNumber': 1494364996459, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }], 'sets': [] }; -export const ALWAYS_OFF_SPLIT: ISplit = { 'trafficTypeName': 'user', 'name': 'always-off', 'trafficAllocation': 100, 'trafficAllocationSeed': -331690370, 'seed': 403891040, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'on', 'changeNumber': 1494365020316, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 0 }, { 'treatment': 'off', 'size': 100 }], 'label': 'in segment all' }], 'sets': [] }; //@ts-ignore -export const splitWithUserTT: ISplit = { name: 'user_ff', trafficTypeName: 'user_tt', conditions: [] }; //@ts-ignore -export const splitWithAccountTT: ISplit = { name: 'account_ff', trafficTypeName: 'account_tt', conditions: [] }; //@ts-ignore -export const splitWithAccountTTAndUsesSegments: ISplit = { trafficTypeName: 'account_tt', conditions: [{ matcherGroup: { matchers: [{ matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: { segmentName: 'employees' } }] } }] }; //@ts-ignore -export const something: ISplit = { name: 'something' }; //@ts-ignore -export const somethingElse: ISplit = { name: 'something else' }; +export const ALWAYS_ON_SPLIT: IDefinition = { 'trafficTypeName': 'user', 'name': 'always-on', 'trafficAllocation': 100, 'trafficAllocationSeed': 1012950810, 'seed': -725161385, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'off', 'changeNumber': 1494364996459, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }, { 'treatment': 'off', 'size': 0 }], 'label': 'in segment all' }], 'sets': [] }; +export const ALWAYS_OFF_SPLIT: IDefinition = { 'trafficTypeName': 'user', 'name': 'always-off', 'trafficAllocation': 100, 'trafficAllocationSeed': -331690370, 'seed': 403891040, 'status': 'ACTIVE', 'killed': false, 'defaultTreatment': 'on', 'changeNumber': 1494365020316, 'conditions': [{ 'conditionType': 'ROLLOUT', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'keySelector': { 'trafficType': 'user', 'attribute': null }, 'matcherType': 'ALL_KEYS', 'negate': false, 'userDefinedSegmentMatcherData': null, 'whitelistMatcherData': null, 'unaryNumericMatcherData': null, 'betweenMatcherData': null }] }, 'partitions': [{ 'treatment': 'on', 'size': 0 }, { 'treatment': 'off', 'size': 100 }], 'label': 'in segment all' }], 'sets': [] }; //@ts-ignore +export const splitWithUserTT: IDefinition = { name: 'user_ff', trafficTypeName: 'user_tt', conditions: [] }; //@ts-ignore +export const splitWithAccountTT: IDefinition = { name: 'account_ff', trafficTypeName: 'account_tt', conditions: [] }; //@ts-ignore +export const splitWithAccountTTAndUsesSegments: IDefinition = { trafficTypeName: 'account_tt', conditions: [{ matcherGroup: { matchers: [{ matcherType: 'IN_SEGMENT', userDefinedSegmentMatcherData: { segmentName: 'employees' } }] } }] }; //@ts-ignore +export const something: IDefinition = { name: 'something' }; //@ts-ignore +export const somethingElse: IDefinition = { name: 'something else' }; // - With flag sets //@ts-ignore -export const featureFlagWithEmptyFS: ISplit = { name: 'ff_empty', sets: [] }; +export const featureFlagWithEmptyFS: IDefinition = { name: 'ff_empty', sets: [] }; //@ts-ignore -export const featureFlagOne: ISplit = { name: 'ff_one', sets: ['o', 'n', 'e'] }; +export const featureFlagOne: IDefinition = { name: 'ff_one', sets: ['o', 'n', 'e'] }; //@ts-ignore -export const featureFlagTwo: ISplit = { name: 'ff_two', sets: ['t', 'w', 'o'] }; +export const featureFlagTwo: IDefinition = { name: 'ff_two', sets: ['t', 'w', 'o'] }; //@ts-ignore -export const featureFlagThree: ISplit = { name: 'ff_three', sets: ['t', 'h', 'r', 'e'] }; +export const featureFlagThree: IDefinition = { name: 'ff_three', sets: ['t', 'h', 'r', 'e'] }; //@ts-ignore -export const featureFlagWithoutFS: ISplit = { name: 'ff_four' }; +export const featureFlagWithoutFS: IDefinition = { name: 'ff_four' }; // Rule-based segments //@ts-ignore diff --git a/src/storages/inLocalStorage/SplitsCacheInLocal.ts b/src/storages/inLocalStorage/SplitsCacheInLocal.ts index 30945684..a1315de3 100644 --- a/src/storages/inLocalStorage/SplitsCacheInLocal.ts +++ b/src/storages/inLocalStorage/SplitsCacheInLocal.ts @@ -1,4 +1,4 @@ -import { ISplit } from '../../dtos/types'; +import { IDefinition } from '../../dtos/types'; import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber, toNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderCS } from '../KeyBuilderCS'; @@ -30,7 +30,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { else this.storage.removeItem(key); } - private _decrementCounts(split: ISplit) { + private _decrementCounts(split: IDefinition) { try { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); this._decrementCount(ttKey); @@ -44,7 +44,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { } } - private _incrementCounts(split: ISplit) { + private _incrementCounts(split: IDefinition) { try { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); this.storage.setItem(ttKey, (toNumber(this.storage.getItem(ttKey)) + 1) + ''); @@ -79,7 +79,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { this.hasSync = false; } - addSplit(split: ISplit) { + addSplit(split: IDefinition) { const name = split.name; const splitKey = this.keys.buildSplitKey(name); const splitFromStorage = this.storage.getItem(splitKey); @@ -110,7 +110,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { return true; } - getSplit(name: string): ISplit | null { + getSplit(name: string): IDefinition | null { const item = this.storage.getItem(this.keys.buildSplitKey(name)); return item && JSON.parse(item); } @@ -184,7 +184,7 @@ export class SplitsCacheInLocal extends AbstractSplitsCacheSync { }); } - private addToFlagSets(featureFlag: ISplit) { + private addToFlagSets(featureFlag: IDefinition) { if (!featureFlag.sets) return; featureFlag.sets.forEach(featureFlagSet => { diff --git a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts index 976baa72..c8c79c5e 100644 --- a/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts +++ b/src/storages/inLocalStorage/__tests__/SplitsCacheInLocal.spec.ts @@ -1,7 +1,7 @@ import { SplitsCacheInLocal } from '../SplitsCacheInLocal'; import { KeyBuilderCS } from '../../KeyBuilderCS'; import { splitWithUserTT, splitWithAccountTT, splitWithAccountTTAndUsesSegments, something, somethingElse, featureFlagOne, featureFlagTwo, featureFlagThree, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { fullSettings } from '../../../utils/settingsValidation/__tests__/settings.mocks'; import { storages, PREFIX } from './wrapper.mock'; @@ -115,7 +115,7 @@ describe.each(storages)('SPLITS CACHE', (storage) => { // kill an existent split updated = cache.killLocally(something.name, 'some_treatment', 100); - let lol1Split = cache.getSplit(something.name) as ISplit; + let lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -125,7 +125,7 @@ describe.each(storages)('SPLITS CACHE', (storage) => { // not update if changeNumber is old updated = cache.killLocally(something.name, 'some_treatment_2', 90); - lol1Split = cache.getSplit(something.name) as ISplit; + lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/inMemory/SplitsCacheInMemory.ts b/src/storages/inMemory/SplitsCacheInMemory.ts index 461d15e6..7b70ccc6 100644 --- a/src/storages/inMemory/SplitsCacheInMemory.ts +++ b/src/storages/inMemory/SplitsCacheInMemory.ts @@ -1,4 +1,4 @@ -import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheSync, usesSegments } from '../AbstractSplitsCacheSync'; import { isFiniteNumber } from '../../utils/lang'; @@ -8,7 +8,7 @@ import { isFiniteNumber } from '../../utils/lang'; export class SplitsCacheInMemory extends AbstractSplitsCacheSync { private flagSetsFilter: string[]; - private splitsCache: Record = {}; + private splitsCache: Record = {}; private ttCache: Record = {}; private changeNumber: number = -1; private segmentsCount: number = 0; @@ -27,7 +27,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { this.flagSetsCache = {}; } - addSplit(split: ISplit): boolean { + addSplit(split: IDefinition): boolean { const name = split.name; const previousSplit = this.getSplit(name); if (previousSplit) { // We had this Split already @@ -73,7 +73,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return true; } - getSplit(name: string): ISplit | null { + getSplit(name: string): IDefinition | null { return this.splitsCache[name] || null; } @@ -102,7 +102,7 @@ export class SplitsCacheInMemory extends AbstractSplitsCacheSync { return flagSets.map(flagSet => this.flagSetsCache[flagSet] || new Set()); } - private addToFlagSets(featureFlag: ISplit) { + private addToFlagSets(featureFlag: IDefinition) { if (!featureFlag.sets) return; featureFlag.sets.forEach(featureFlagSet => { diff --git a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts index 2ed4478b..f755a295 100644 --- a/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts +++ b/src/storages/inMemory/__tests__/SplitsCacheInMemory.spec.ts @@ -1,5 +1,5 @@ import { SplitsCacheInMemory } from '../SplitsCacheInMemory'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { splitWithUserTT, splitWithAccountTT, something, somethingElse, featureFlagWithEmptyFS, featureFlagWithoutFS, featureFlagOne, featureFlagTwo, featureFlagThree } from '../../__tests__/testUtils'; test('SPLITS CACHE / In Memory', () => { @@ -106,7 +106,7 @@ test('SPLITS CACHE / In Memory / killLocally', () => { // kill an existent split updated = cache.killLocally(something.name, 'some_treatment', 100); - let lol1Split = cache.getSplit(something.name) as ISplit; + let lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -116,7 +116,7 @@ test('SPLITS CACHE / In Memory / killLocally', () => { // not update if changeNumber is old updated = cache.killLocally(something.name, 'some_treatment_2', 90); - lol1Split = cache.getSplit(something.name) as ISplit; + lol1Split = cache.getSplit(something.name) as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/inRedis/SplitsCacheInRedis.ts b/src/storages/inRedis/SplitsCacheInRedis.ts index 7258a770..539cdc80 100644 --- a/src/storages/inRedis/SplitsCacheInRedis.ts +++ b/src/storages/inRedis/SplitsCacheInRedis.ts @@ -2,7 +2,7 @@ import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilderSS } from '../KeyBuilderSS'; import { ILogger } from '../../logger/types'; import { LOG_PREFIX } from './constants'; -import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; import { returnDifference } from '../../utils/lang/sets'; import type { RedisAdapter } from './RedisAdapter'; @@ -47,14 +47,14 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { }); } - private _decrementCounts(split: ISplit) { + private _decrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.redis.decr(ttKey).then((count: number) => { if (count === 0) return this.redis.del(ttKey); }); } - private _incrementCounts(split: ISplit) { + private _incrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.redis.incr(ttKey); } @@ -82,13 +82,13 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * The returned promise is resolved when the operation success * or rejected if it fails (e.g., redis operation fails) */ - addSplit(split: ISplit): Promise { + addSplit(split: IDefinition): Promise { const name = split.name; const splitKey = this.keys.buildSplitKey(name); return this.redis.get(splitKey).then((splitFromStorage: string | null) => { // handling parsing error - let parsedPreviousSplit: ISplit, stringifiedNewSplit; + let parsedPreviousSplit: IDefinition, stringifiedNewSplit; try { parsedPreviousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : undefined; stringifiedNewSplit = JSON.stringify(split); @@ -127,7 +127,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * Get split definition or null if it's not defined. * Returned promise is rejected if redis operation fails. */ - getSplit(name: string): Promise { + getSplit(name: string): Promise { if (this.redisError) { this.log.error(LOG_PREFIX + this.redisError); @@ -171,7 +171,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * or rejected if redis operation fails. */ // @TODO we need to benchmark which is the maximun number of commands we could pipeline without kill redis performance. - getAll(): Promise { + getAll(): Promise { return this.redis.keys(this.keys.searchPatternForSplitKeys()) .then((listOfKeys: string[]) => this.redis.pipeline(listOfKeys.map((k: string) => ['get', k])).exec()) .then(processPipelineAnswer) @@ -242,7 +242,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { * Fetches multiple splits definitions. * Returned promise is rejected if redis operation fails. */ - getSplits(names: string[]): Promise> { + getSplits(names: string[]): Promise> { if (this.redisError) { this.log.error(LOG_PREFIX + this.redisError); @@ -252,7 +252,7 @@ export class SplitsCacheInRedis extends AbstractSplitsCacheAsync { const keys = names.map(name => this.keys.buildSplitKey(name)); return this.redis.mget(...keys) .then((splitDefinitions: (string | null)[]) => { - const splits: Record = {}; + const splits: Record = {}; names.forEach((name, idx) => { const split = splitDefinitions[idx]; splits[name] = split && JSON.parse(split); diff --git a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts index 0cbc8914..d42143ff 100644 --- a/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/SplitsCacheInRedis.spec.ts @@ -2,7 +2,7 @@ import { SplitsCacheInRedis } from '../SplitsCacheInRedis'; import { KeyBuilderSS } from '../../KeyBuilderSS'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { metadata } from '../../__tests__/KeyBuilder.spec'; import { RedisAdapter } from '../RedisAdapter'; @@ -122,7 +122,7 @@ describe('SPLITS CACHE REDIS', () => { // kill an existent split updated = await cache.killLocally('user_ff', 'some_treatment', 100); - let lol1Split = await cache.getSplit('user_ff') as ISplit; + let lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -132,7 +132,7 @@ describe('SPLITS CACHE REDIS', () => { // not update if changeNumber is old updated = await cache.killLocally('user_ff', 'some_treatment_2', 90); - lol1Split = await cache.getSplit('user_ff') as ISplit; + lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/pluggable/SplitsCachePluggable.ts b/src/storages/pluggable/SplitsCachePluggable.ts index 9b53f3a9..48eba081 100644 --- a/src/storages/pluggable/SplitsCachePluggable.ts +++ b/src/storages/pluggable/SplitsCachePluggable.ts @@ -2,7 +2,7 @@ import { isFiniteNumber, isNaNNumber } from '../../utils/lang'; import { KeyBuilder } from '../KeyBuilder'; import { IPluggableStorageWrapper } from '../types'; import { ILogger } from '../../logger/types'; -import { ISplit, ISplitFiltersValidation } from '../../dtos/types'; +import { IDefinition, ISplitFiltersValidation } from '../../dtos/types'; import { LOG_PREFIX } from './constants'; import { AbstractSplitsCacheAsync } from '../AbstractSplitsCacheAsync'; import { returnDifference } from '../../utils/lang/sets'; @@ -31,14 +31,14 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { this.flagSetsFilter = splitFiltersValidation ? splitFiltersValidation.groupedFilters.bySet : []; } - private _decrementCounts(split: ISplit) { + private _decrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.wrapper.decr(ttKey).then(count => { if (count === 0) return this.wrapper.del(ttKey); }); } - private _incrementCounts(split: ISplit) { + private _incrementCounts(split: IDefinition) { const ttKey = this.keys.buildTrafficTypeKey(split.trafficTypeName); return this.wrapper.incr(ttKey); } @@ -66,13 +66,13 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved when the operation success * or rejected if it fails (e.g., wrapper operation fails) */ - addSplit(split: ISplit): Promise { + addSplit(split: IDefinition): Promise { const name = split.name; const splitKey = this.keys.buildSplitKey(name); return this.wrapper.get(splitKey).then(splitFromStorage => { // handling parsing error - let parsedPreviousSplit: ISplit, stringifiedNewSplit; + let parsedPreviousSplit: IDefinition, stringifiedNewSplit; try { parsedPreviousSplit = splitFromStorage ? JSON.parse(splitFromStorage) : undefined; stringifiedNewSplit = JSON.stringify(split); @@ -112,7 +112,7 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with the split definition or null if it's not defined, * or rejected if wrapper operation fails. */ - getSplit(name: string): Promise { + getSplit(name: string): Promise { return this.wrapper.get(this.keys.buildSplitKey(name)) .then(maybeSplit => maybeSplit && JSON.parse(maybeSplit)); } @@ -122,11 +122,11 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with a map of split names to their split definition or null if it's not defined, * or rejected if wrapper operation fails. */ - getSplits(names: string[]): Promise> { + getSplits(names: string[]): Promise> { const keys = names.map(name => this.keys.buildSplitKey(name)); return this.wrapper.getMany(keys).then(splitDefinitions => { - const splits: Record = {}; + const splits: Record = {}; names.forEach((name, idx) => { const split = splitDefinitions[idx]; splits[name] = split && JSON.parse(split); @@ -140,7 +140,7 @@ export class SplitsCachePluggable extends AbstractSplitsCacheAsync { * The returned promise is resolved with the list of split definitions, * or rejected if wrapper operation fails. */ - getAll(): Promise { + getAll(): Promise { return this.wrapper.getKeysByPrefix(this.keys.buildSplitKeyPrefix()) .then((listOfKeys) => this.wrapper.getMany(listOfKeys)) .then((splitDefinitions) => splitDefinitions.map((splitDefinition) => { diff --git a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts index 03d1ee6e..8f9c966c 100644 --- a/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts +++ b/src/storages/pluggable/__tests__/SplitsCachePluggable.spec.ts @@ -3,7 +3,7 @@ import { KeyBuilder } from '../../KeyBuilder'; import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; import { wrapperMockFactory } from './wrapper.mock'; import { splitWithUserTT, splitWithAccountTT, featureFlagOne, featureFlagThree, featureFlagTwo, featureFlagWithEmptyFS, featureFlagWithoutFS } from '../../__tests__/testUtils'; -import { ISplit } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; const keysBuilder = new KeyBuilder(); @@ -20,7 +20,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // Assert getSplits let valuesObj = await cache.getSplits([splitWithUserTT.name, splitWithAccountTT.name]); - expect(valuesObj).toEqual(values.reduce>((acc, split) => { + expect(valuesObj).toEqual(values.reduce>((acc, split) => { acc[split.name] = split; return acc; }, {})); @@ -114,7 +114,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // kill an existent split updated = await cache.killLocally('user_ff', 'some_treatment', 100); - let lol1Split = await cache.getSplit('user_ff') as ISplit; + let lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(true); // killLocally resolves with update if split is changed expect(lol1Split.killed).toBe(true); // existing split must be killed @@ -124,7 +124,7 @@ describe('SPLITS CACHE PLUGGABLE', () => { // not update if changeNumber is old updated = await cache.killLocally('user_ff', 'some_treatment_2', 90); - lol1Split = await cache.getSplit('user_ff') as ISplit; + lol1Split = await cache.getSplit('user_ff') as IDefinition; expect(updated).toBe(false); // killLocally resolves without update if changeNumber is old expect(lol1Split.defaultTreatment).not.toBe('some_treatment_2'); // existing split is not updated if given changeNumber is older diff --git a/src/storages/types.ts b/src/storages/types.ts index fea0cc2b..222106d7 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -1,5 +1,5 @@ import SplitIO from '../../types/splitio'; -import { MaybeThenable, ISplit, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, ISplitChangesResponse } from '../dtos/types'; +import { MaybeThenable, IDefinition, IRBSegment, IMySegmentsResponse, IMembershipsResponse, ISegmentChangesResponse, IDefinitionChangesResponse } from '../dtos/types'; import { MySegmentsData } from '../sync/polling/types'; import { EventDataType, HttpErrors, HttpLatencies, ImpressionDataType, LastSync, Method, MethodExceptions, MethodLatencies, MultiMethodExceptions, MultiMethodLatencies, MultiConfigs, OperationType, StoredEventWithMetadata, StoredImpressionWithMetadata, StreamingEvent, UniqueKeysPayloadCs, UniqueKeysPayloadSs, TelemetryUsageStatsPayload, UpdatesFromSSEEnum } from '../sync/submitters/types'; import { ISettings } from '../types'; @@ -194,12 +194,12 @@ export interface IPluggableStorageWrapper { /** Splits cache */ export interface ISplitsCacheBase { - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): MaybeThenable, - getSplit(name: string): MaybeThenable, - getSplits(names: string[]): MaybeThenable>, // `fetchMany` in spec + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): MaybeThenable, + getSplit(name: string): MaybeThenable, + getSplits(names: string[]): MaybeThenable>, // `fetchMany` in spec // should never reject or throw an exception. Instead return -1 by default, assuming no splits are present in the storage. getChangeNumber(): MaybeThenable, - getAll(): MaybeThenable, + getAll(): MaybeThenable, getSplitNames(): MaybeThenable, // should never reject or throw an exception. Instead return true by default, asssuming the TT might exist. trafficTypeExists(trafficType: string): MaybeThenable, @@ -211,11 +211,11 @@ export interface ISplitsCacheBase { } export interface ISplitsCacheSync extends ISplitsCacheBase { - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): boolean, - getSplit(name: string): ISplit | null, - getSplits(names: string[]): Record, + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): boolean, + getSplit(name: string): IDefinition | null, + getSplits(names: string[]): Record, getChangeNumber(): number, - getAll(): ISplit[], + getAll(): IDefinition[], getSplitNames(): string[], trafficTypeExists(trafficType: string): boolean, usesSegments(): boolean, @@ -225,11 +225,11 @@ export interface ISplitsCacheSync extends ISplitsCacheBase { } export interface ISplitsCacheAsync extends ISplitsCacheBase { - update(toAdd: ISplit[], toRemove: ISplit[], changeNumber: number): Promise, - getSplit(name: string): Promise, - getSplits(names: string[]): Promise>, + update(toAdd: IDefinition[], toRemove: IDefinition[], changeNumber: number): Promise, + getSplit(name: string): Promise, + getSplits(names: string[]): Promise>, getChangeNumber(): Promise, - getAll(): Promise, + getAll(): Promise, getSplitNames(): Promise, trafficTypeExists(trafficType: string): Promise, usesSegments(): Promise, @@ -544,7 +544,7 @@ export type RolloutPlan = { /** * Feature flags and rule-based segments. */ - splitChanges: ISplitChangesResponse; + splitChanges: IDefinitionChangesResponse; /** * Optional map of matching keys to their memberships. */ diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index c78b9215..902a0d93 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -10,6 +10,11 @@ jest.mock('../submitters/submitterManager', () => { }; }); +// Mocked splitApi +const splitApiMock = { + fetchSplitChanges: jest.fn() +} as any; + // Mocked storageManager const storageManagerMock = { splits: { @@ -49,6 +54,7 @@ test('syncManagerOnline should start or not the submitter depending on user cons const syncManager = syncManagerOnlineFactory()({ settings, // @ts-ignore storage: {}, + splitApi: splitApiMock, }); const submitterManager = syncManager.submitterManager!; @@ -101,6 +107,7 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy const syncManager = syncManagerOnlineFactory(() => pollingManagerMock, pushManagerFactoryMock)({ settings, // @ts-ignore storage: { validateCache: () => { return Promise.resolve({ initialCacheLoad: true, lastUpdateTimestamp: undefined }); } }, + splitApi: splitApiMock, }); expect(pushManagerFactoryMock).not.toBeCalled(); @@ -170,6 +177,7 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy const testSyncManager = syncManagerOnlineFactory(() => pollingManagerMock, pushManagerFactoryMock)({ settings, // @ts-ignore storage: { validateCache: () => Promise.resolve({ initialCacheLoad: true, lastUpdateTimestamp: undefined }) }, + splitApi: splitApiMock, }); expect(pushManagerFactoryMock).toBeCalled(); @@ -188,7 +196,8 @@ test('syncManagerOnline should emit SDK_SPLITS_CACHE_LOADED if validateCache ret const params = { settings: fullSettings, storage: { validateCache: () => Promise.resolve({ initialCacheLoad: false, lastUpdateTimestamp }) }, - readiness: { splits: { emit: jest.fn() } } + readiness: { splits: { emit: jest.fn() } }, + splitApi: splitApiMock, }; // @ts-ignore const syncManager = syncManagerOnlineFactory()(params); diff --git a/src/sync/offline/splitsParser/parseCondition.ts b/src/sync/offline/splitsParser/parseCondition.ts index a2223bec..ffe0863d 100644 --- a/src/sync/offline/splitsParser/parseCondition.ts +++ b/src/sync/offline/splitsParser/parseCondition.ts @@ -1,4 +1,4 @@ -import { ISplitCondition } from '../../../dtos/types'; +import { IDefinitionCondition } from '../../../dtos/types'; import { isString } from '../../../utils/lang'; export interface IMockSplitEntry { @@ -7,7 +7,7 @@ export interface IMockSplitEntry { config?: string } -export function parseCondition(data: IMockSplitEntry): ISplitCondition { +export function parseCondition(data: IMockSplitEntry): IDefinitionCondition { const treatment = data.treatment; if (data.keys) { diff --git a/src/sync/offline/splitsParser/splitsParserFromSettings.ts b/src/sync/offline/splitsParser/splitsParserFromSettings.ts index f242b26c..696a5ccb 100644 --- a/src/sync/offline/splitsParser/splitsParserFromSettings.ts +++ b/src/sync/offline/splitsParser/splitsParserFromSettings.ts @@ -1,4 +1,4 @@ -import { ISplitPartial } from '../../../dtos/types'; +import { IDefinitionPartial } from './types'; import SplitIO from '../../../../types/splitio'; import { isObject, forOwn, merge } from '../../../utils/lang'; import { parseCondition } from './parseCondition'; @@ -41,12 +41,12 @@ export function splitsParserFromSettingsFactory() { * * @param settings - validated object with mocked features mapping. */ - return function splitsParserFromSettings(settings: Pick): false | Record { + return function splitsParserFromSettings(settings: Pick): false | Record { const features = settings.features as SplitIO.MockedFeaturesMap || {}; if (!mockUpdated(features)) return false; - const splitObjects: Record = {}; + const splitObjects: Record = {}; forOwn(features, (data, splitName) => { let treatment = data; diff --git a/src/sync/offline/splitsParser/types.ts b/src/sync/offline/splitsParser/types.ts index a5d93647..91fd084c 100644 --- a/src/sync/offline/splitsParser/types.ts +++ b/src/sync/offline/splitsParser/types.ts @@ -1,5 +1,8 @@ -import { ISplitPartial } from '../../../dtos/types'; +import { IDefinition } from '../../../dtos/types'; import { ISettings } from '../../../types'; -// Analog to `ISplitChangesFetcher` used by `splitChangesUpdaterFactory` -export type ISplitsParser = (settings: ISettings) => false | Record +// Split definition used in offline mode +export type IDefinitionPartial = Pick + +// Analog to `IDefinitionChangesFetcher` used by `definitionChangesUpdaterFactory` +export type IDefinitionsParser = (settings: ISettings) => false | Record diff --git a/src/sync/offline/syncManagerOffline.ts b/src/sync/offline/syncManagerOffline.ts index 31ac6dd0..60c6d6e8 100644 --- a/src/sync/offline/syncManagerOffline.ts +++ b/src/sync/offline/syncManagerOffline.ts @@ -1,7 +1,7 @@ import { ISyncManagerCS } from '../types'; import { fromObjectSyncTaskFactory } from './syncTasks/fromObjectSyncTask'; import { objectAssign } from '../../utils/lang/objectAssign'; -import { ISplitsParser } from './splitsParser/types'; +import { IDefinitionsParser } from './splitsParser/types'; import { IReadinessManager } from '../../readiness/types'; import { SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; @@ -17,7 +17,7 @@ function flush() { * @param splitsParser - e.g., `splitsParserFromFile`, `splitsParserFromSettings`. */ export function syncManagerOfflineFactory( - splitsParserFactory: () => ISplitsParser + splitsParserFactory: () => IDefinitionsParser ): (params: ISdkFactoryContextSync) => ISyncManagerCS { /** diff --git a/src/sync/offline/syncTasks/fromObjectSyncTask.ts b/src/sync/offline/syncTasks/fromObjectSyncTask.ts index cc2ffcb7..14f431ea 100644 --- a/src/sync/offline/syncTasks/fromObjectSyncTask.ts +++ b/src/sync/offline/syncTasks/fromObjectSyncTask.ts @@ -1,8 +1,8 @@ import { forOwn } from '../../../utils/lang'; import { IReadinessManager } from '../../../readiness/types'; import { IStorageSync } from '../../../storages/types'; -import { ISplitsParser } from '../splitsParser/types'; -import { ISplit, ISplitPartial } from '../../../dtos/types'; +import { IDefinitionsParser, IDefinitionPartial } from '../splitsParser/types'; +import { IDefinition } from '../../../dtos/types'; import { syncTaskFactory } from '../../syncTask'; import { ISyncTask } from '../../types'; import { ISettings } from '../../../types'; @@ -14,7 +14,7 @@ import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/c * Offline equivalent of `splitChangesUpdaterFactory` */ export function fromObjectUpdaterFactory( - splitsParser: ISplitsParser, + splitsParser: IDefinitionsParser, storage: Pick, readiness: IReadinessManager, settings: ISettings, @@ -24,9 +24,9 @@ export function fromObjectUpdaterFactory( let startingUp = true; return function objectUpdater() { - const splits: ISplit[] = []; + const splits: IDefinition[] = []; let loadError = null; - let splitsMock: false | Record = {}; + let splitsMock: false | Record = {}; try { splitsMock = splitsParser(settings); } catch (err) { @@ -80,7 +80,7 @@ export function fromObjectUpdaterFactory( * PollingManager in Offline mode */ export function fromObjectSyncTaskFactory( - splitsParser: ISplitsParser, + splitsParser: IDefinitionsParser, storage: Pick, readiness: IReadinessManager, settings: ISettings diff --git a/src/sync/polling/fetchers/splitChangesFetcher.ts b/src/sync/polling/fetchers/splitChangesFetcher.ts index 7d133ba2..5f677a0a 100644 --- a/src/sync/polling/fetchers/splitChangesFetcher.ts +++ b/src/sync/polling/fetchers/splitChangesFetcher.ts @@ -1,12 +1,12 @@ import { ISettings } from '../../../types'; -import { ISplitChangesResponse } from '../../../dtos/types'; -import { IFetchSplitChanges, IResponse } from '../../../services/types'; -import { IStorageBase } from '../../../storages/types'; +import { IDefinitionChangesResponse } from '../../../dtos/types'; +import { IResponse } from '../../../services/types'; import { FLAG_SPEC_VERSION } from '../../../utils/constants'; import { base } from '../../../utils/settingsValidation'; -import { ISplitChangesFetcher } from './types'; -import { LOG_PREFIX_SYNC_SPLITS } from '../../../logger/constants'; +import { IDefinitionChangesFetcher } from './types'; +import { LOG_PREFIX_SYNC } from '../../../logger/constants'; import { checkIfServerSide } from '../../../utils/key'; +import { ISdkFactoryContextSync } from '../../../sdkFactory/types'; const PROXY_CHECK_INTERVAL_MILLIS_CS = 60 * 60 * 1000; // 1 hour in Client Side const PROXY_CHECK_INTERVAL_MILLIS_SS = 24 * PROXY_CHECK_INTERVAL_MILLIS_CS; // 24 hours in Server Side @@ -20,20 +20,21 @@ function sdkEndpointOverridden(settings: ISettings) { * SplitChanges fetcher is a wrapper around `splitChanges` API service that parses the response and handle errors. */ // @TODO breaking: drop support for Split Proxy below v5.10.0 and simplify the implementation -export function splitChangesFetcherFactory(fetchSplitChanges: IFetchSplitChanges, settings: ISettings, storage: Pick): ISplitChangesFetcher { +export function splitChangesFetcherFactory(params: ISdkFactoryContextSync): IDefinitionChangesFetcher { + const { splitApi: { fetchSplitChanges }, settings, storage } = params; const log = settings.log; const PROXY_CHECK_INTERVAL_MILLIS = checkIfServerSide(settings) ? PROXY_CHECK_INTERVAL_MILLIS_SS : PROXY_CHECK_INTERVAL_MILLIS_CS; let lastProxyCheckTimestamp: number | undefined; - return function splitChangesFetcher( + function splitChangesFetcher( since: number, noCache?: boolean, till?: number, rbSince?: number, // Optional decorator for `fetchSplitChanges` promise, such as timeout or time tracker decorator?: (promise: Promise) => Promise - ): Promise { + ): Promise { // Recheck proxy if (lastProxyCheckTimestamp && (Date.now() - lastProxyCheckTimestamp) > PROXY_CHECK_INTERVAL_MILLIS) { @@ -44,7 +45,7 @@ export function splitChangesFetcherFactory(fetchSplitChanges: IFetchSplitChanges .catch((err) => { // Handle proxy error with spec 1.3 if ((!err.statusCode || err.statusCode === 400) && sdkEndpointOverridden(settings) && settings.sync.flagSpecVersion === FLAG_SPEC_VERSION) { - log.error(LOG_PREFIX_SYNC_SPLITS + 'Proxy error detected. Retrying with spec 1.2. If you are using Split Proxy, please upgrade to latest version'); + log.error(LOG_PREFIX_SYNC + 'Proxy error detected. Retrying with spec 1.2. If you are using Split Proxy, please upgrade to latest version'); lastProxyCheckTimestamp = Date.now(); settings.sync.flagSpecVersion = '1.2'; // fallback to 1.2 spec return fetchSplitChanges(since, noCache, till); // retry request without rbSince @@ -70,10 +71,10 @@ export function splitChangesFetcherFactory(fetchSplitChanges: IFetchSplitChanges // Proxy recovery if (lastProxyCheckTimestamp) { - log.info(LOG_PREFIX_SYNC_SPLITS + 'Proxy error recovered'); + log.info(LOG_PREFIX_SYNC + 'Proxy error recovered'); lastProxyCheckTimestamp = undefined; return splitChangesFetcher(-1, undefined, undefined, -1) - .then((splitChangesResponse: ISplitChangesResponse) => + .then((splitChangesResponse: IDefinitionChangesResponse) => Promise.all([storage.splits.clear(), storage.rbSegments.clear()]) .then(() => splitChangesResponse) ); @@ -81,6 +82,8 @@ export function splitChangesFetcherFactory(fetchSplitChanges: IFetchSplitChanges return data; }); - }; + } + splitChangesFetcher.type = 'feature flags' as const; + return splitChangesFetcher; } diff --git a/src/sync/polling/fetchers/types.ts b/src/sync/polling/fetchers/types.ts index 8fe922ce..86c92f0f 100644 --- a/src/sync/polling/fetchers/types.ts +++ b/src/sync/polling/fetchers/types.ts @@ -1,13 +1,15 @@ -import { ISplitChangesResponse, ISegmentChangesResponse, IMembershipsResponse } from '../../../dtos/types'; +import { IDefinitionChangesResponse, ISegmentChangesResponse, IMembershipsResponse } from '../../../dtos/types'; import { IResponse } from '../../../services/types'; -export type ISplitChangesFetcher = ( +export type IDefinitionChangesFetcher = (( since: number, noCache?: boolean, till?: number, rbSince?: number, decorator?: (promise: Promise) => Promise -) => Promise +) => Promise) & { + type: 'configs' | 'feature flags' +} export type ISegmentChangesFetcher = ( since: number, diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 5c1169d3..05895aac 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -3,26 +3,28 @@ import { forOwn } from '../../utils/lang'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { mySegmentsSyncTaskFactory } from './syncTasks/mySegmentsSyncTask'; -import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; +import { definitionsSyncTaskFactory } from './syncTasks/definitionsSyncTask'; import { getMatching } from '../../utils/key'; import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/constants'; import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; import { usesSegmentsSync } from '../../storages/AbstractSplitsCacheSync'; import { SdkUpdateMetadata } from '../../../types/splitio'; +import { IDefinitionChangesFetcher } from './fetchers/types'; /** * Expose start / stop mechanism for polling data from services. * For client-side API with multiple clients. */ export function pollingManagerCSFactory( - params: ISdkFactoryContextSync + params: ISdkFactoryContextSync, + definitionChangesFetcher: IDefinitionChangesFetcher ): IPollingManagerCS { const { splitApi, storage, readiness, settings } = params; const log = settings.log; - const splitsSyncTask = splitsSyncTaskFactory(splitApi.fetchSplitChanges, storage, readiness, settings, true); + const definitionsSyncTask = definitionsSyncTaskFactory(definitionChangesFetcher, storage, readiness, settings, true); // Map of matching keys to their corresponding MySegmentsSyncTask. const mySegmentsSyncTasks: Record = {}; @@ -44,7 +46,7 @@ export function pollingManagerCSFactory( // smart pausing readiness.splits.on(SDK_SPLITS_ARRIVED, () => { - if (!splitsSyncTask.isRunning()) return; // noop if not doing polling + if (!definitionsSyncTask.isRunning()) return; // noop if not doing polling const usingSegments = usesSegmentsSync(storage); if (usingSegments !== mySegmentsSyncTask.isRunning()) { log.info(POLLING_SMART_PAUSING, [usingSegments ? 'ON' : 'OFF']); @@ -71,14 +73,14 @@ export function pollingManagerCSFactory( } return { - splitsSyncTask, + definitionsSyncTask, segmentsSyncTask: mySegmentsSyncTask, // Start periodic fetching (polling) start() { log.info(POLLING_START); - splitsSyncTask.start(); + definitionsSyncTask.start(); if (usesSegmentsSync(storage)) startMySegmentsSyncTasks(); }, @@ -86,16 +88,16 @@ export function pollingManagerCSFactory( stop() { log.info(POLLING_STOP); - if (splitsSyncTask.isRunning()) splitsSyncTask.stop(); + if (definitionsSyncTask.isRunning()) definitionsSyncTask.stop(); stopMySegmentsSyncTasks(); }, // Used by SyncManager to know if running in polling mode. - isRunning: splitsSyncTask.isRunning, + isRunning: definitionsSyncTask.isRunning, - // fetch splits and segments + // fetch definitions and segments syncAll() { - const promises = [splitsSyncTask.execute()]; + const promises = [definitionsSyncTask.execute()]; forOwn(mySegmentsSyncTasks, (mySegmentsSyncTask) => { promises.push(mySegmentsSyncTask.execute()); }); diff --git a/src/sync/polling/pollingManagerSS.ts b/src/sync/polling/pollingManagerSS.ts index cea57dfe..028162ca 100644 --- a/src/sync/polling/pollingManagerSS.ts +++ b/src/sync/polling/pollingManagerSS.ts @@ -1,36 +1,38 @@ -import { splitsSyncTaskFactory } from './syncTasks/splitsSyncTask'; +import { definitionsSyncTaskFactory } from './syncTasks/definitionsSyncTask'; import { segmentsSyncTaskFactory } from './syncTasks/segmentsSyncTask'; -import { IPollingManager, ISegmentsSyncTask, ISplitsSyncTask } from './types'; +import { IPollingManager, ISegmentsSyncTask, IDefinitionsSyncTask } from './types'; import { POLLING_START, POLLING_STOP, LOG_PREFIX_SYNC_POLLING } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; +import { IDefinitionChangesFetcher } from './fetchers/types'; /** * Expose start / stop mechanism for pulling data from services. */ export function pollingManagerSSFactory( - params: ISdkFactoryContextSync + params: ISdkFactoryContextSync, + definitionChangesFetcher: IDefinitionChangesFetcher ): IPollingManager { const { splitApi, storage, readiness, settings } = params; const log = settings.log; - const splitsSyncTask: ISplitsSyncTask = splitsSyncTaskFactory(splitApi.fetchSplitChanges, storage, readiness, settings); + const definitionsSyncTask: IDefinitionsSyncTask = definitionsSyncTaskFactory(definitionChangesFetcher, storage, readiness, settings); const segmentsSyncTask: ISegmentsSyncTask = segmentsSyncTaskFactory(splitApi.fetchSegmentChanges, storage, readiness, settings); return { - splitsSyncTask, + definitionsSyncTask, segmentsSyncTask, // Start periodic fetching (polling) start() { log.info(POLLING_START); - log.debug(LOG_PREFIX_SYNC_POLLING + `Splits will be refreshed each ${settings.scheduler.featuresRefreshRate} millis`); - log.debug(LOG_PREFIX_SYNC_POLLING + `Segments will be refreshed each ${settings.scheduler.segmentsRefreshRate} millis`); + log.debug(LOG_PREFIX_SYNC_POLLING + `${definitionChangesFetcher.type} will be refreshed each ${settings.scheduler.featuresRefreshRate} millis`); + log.debug(LOG_PREFIX_SYNC_POLLING + `segments will be refreshed each ${settings.scheduler.segmentsRefreshRate} millis`); - const startingUp = splitsSyncTask.start(); + const startingUp = definitionsSyncTask.start(); if (startingUp) { startingUp.then(() => { - if (splitsSyncTask.isRunning()) segmentsSyncTask.start(); + if (definitionsSyncTask.isRunning()) segmentsSyncTask.start(); }); } }, @@ -39,16 +41,16 @@ export function pollingManagerSSFactory( stop() { log.info(POLLING_STOP); - if (splitsSyncTask.isRunning()) splitsSyncTask.stop(); + if (definitionsSyncTask.isRunning()) definitionsSyncTask.stop(); if (segmentsSyncTask.isRunning()) segmentsSyncTask.stop(); }, // Used by SyncManager to know if running in polling mode. - isRunning: splitsSyncTask.isRunning, + isRunning: definitionsSyncTask.isRunning, syncAll() { - // fetch splits and segments. There is no need to catch this promise (`SplitChangesUpdater` is always resolved with a boolean value) - return splitsSyncTask.execute().then(() => { + // fetch definitions and segments. There is no need to catch this promise (`DefinitionChangesUpdater` is always resolved with a boolean value) + return definitionsSyncTask.execute().then(() => { return segmentsSyncTask.execute(); }); } diff --git a/src/sync/polling/syncTasks/splitsSyncTask.ts b/src/sync/polling/syncTasks/definitionsSyncTask.ts similarity index 53% rename from src/sync/polling/syncTasks/splitsSyncTask.ts rename to src/sync/polling/syncTasks/definitionsSyncTask.ts index d385bf77..6e107211 100644 --- a/src/sync/polling/syncTasks/splitsSyncTask.ts +++ b/src/sync/polling/syncTasks/definitionsSyncTask.ts @@ -1,27 +1,26 @@ import { IStorageSync } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; import { syncTaskFactory } from '../../syncTask'; -import { ISplitsSyncTask } from '../types'; -import { splitChangesFetcherFactory } from '../fetchers/splitChangesFetcher'; -import { IFetchSplitChanges } from '../../../services/types'; +import { IDefinitionsSyncTask } from '../types'; import { ISettings } from '../../../types'; -import { splitChangesUpdaterFactory } from '../updaters/splitChangesUpdater'; +import { definitionChangesUpdaterFactory } from '../updaters/definitionChangesUpdater'; +import { IDefinitionChangesFetcher } from '../fetchers/types'; /** - * Creates a sync task that periodically executes a `splitChangesUpdater` task + * Creates a sync task that periodically executes a `definitionChangesUpdater` task */ -export function splitsSyncTaskFactory( - fetchSplitChanges: IFetchSplitChanges, +export function definitionsSyncTaskFactory( + definitionChangesFetcher: IDefinitionChangesFetcher, storage: IStorageSync, readiness: IReadinessManager, settings: ISettings, isClientSide?: boolean -): ISplitsSyncTask { +): IDefinitionsSyncTask { return syncTaskFactory( settings.log, - splitChangesUpdaterFactory( + definitionChangesUpdaterFactory( settings.log, - splitChangesFetcherFactory(fetchSplitChanges, settings, storage), + definitionChangesFetcher, storage, settings.sync.__splitFiltersValidation, readiness.splits, @@ -30,6 +29,6 @@ export function splitsSyncTaskFactory( isClientSide ), settings.scheduler.featuresRefreshRate, - 'splitChangesUpdater', + 'definitionChangesUpdater', ); } diff --git a/src/sync/polling/types.ts b/src/sync/polling/types.ts index 4ff29c83..2f8e0f0a 100644 --- a/src/sync/polling/types.ts +++ b/src/sync/polling/types.ts @@ -1,10 +1,10 @@ -import { IRBSegment, ISplit } from '../../dtos/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; import { IReadinessManager } from '../../readiness/types'; import { IStorageSync } from '../../storages/types'; import { MEMBERSHIPS_LS_UPDATE, MEMBERSHIPS_MS_UPDATE } from '../streaming/types'; import { ITask, ISyncTask } from '../types'; -export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: ISplit | IRBSegment, changeNumber: number }], boolean> { } +export interface IDefinitionsSyncTask extends ISyncTask<[noCache?: boolean, till?: number, splitUpdateNotification?: { payload: IDefinition | IRBSegment, changeNumber: number }], boolean> { } export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { } @@ -19,7 +19,7 @@ export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegment export interface IPollingManager extends ITask { syncAll(): Promise - splitsSyncTask: ISplitsSyncTask + definitionsSyncTask: IDefinitionsSyncTask segmentsSyncTask: ISyncTask } diff --git a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts similarity index 78% rename from src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts rename to src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts index 5398e06b..757458f5 100644 --- a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/definitionChangesUpdater.spec.ts @@ -1,10 +1,10 @@ -import { IRBSegment, ISplit } from '../../../../dtos/types'; +import { IRBSegment, IDefinition } from '../../../../dtos/types'; import { readinessManagerFactory } from '../../../../readiness/readinessManager'; import { splitApiFactory } from '../../../../services/splitApi'; import { SegmentsCacheInMemory } from '../../../../storages/inMemory/SegmentsCacheInMemory'; import { SplitsCacheInMemory } from '../../../../storages/inMemory/SplitsCacheInMemory'; import { splitChangesFetcherFactory } from '../../fetchers/splitChangesFetcher'; -import { splitChangesUpdaterFactory, parseSegments, computeMutation } from '../splitChangesUpdater'; +import { definitionChangesUpdaterFactory, parseSegments, computeMutation } from '../definitionChangesUpdater'; import splitChangesMock1 from '../../../../__tests__/mocks/splitchanges.since.-1.json'; import fetchMock from '../../../../__tests__/testUtils/fetchMock'; import { fullSettings, settingsSplitApi } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; @@ -45,7 +45,7 @@ const archivedSplit = { status: 'ARCHIVED' }; // @ts-ignore -const testFFSetsAB: ISplit = +const testFFSetsAB: IDefinition = { name: 'test', status: 'ACTIVE', @@ -54,7 +54,7 @@ const testFFSetsAB: ISplit = sets: ['set_a', 'set_b'] }; // @ts-ignore -const test2FFSetsX: ISplit = +const test2FFSetsX: IDefinition = { name: 'test2', status: 'ACTIVE', @@ -63,7 +63,7 @@ const test2FFSetsX: ISplit = sets: ['set_x'] }; // @ts-ignore -const testFFRemoveSetB: ISplit = +const testFFRemoveSetB: IDefinition = { name: 'test', status: 'ACTIVE', @@ -71,7 +71,7 @@ const testFFRemoveSetB: ISplit = sets: ['set_a'] }; // @ts-ignore -const testFFRemoveSetA: ISplit = +const testFFRemoveSetA: IDefinition = { name: 'test', status: 'ACTIVE', @@ -79,7 +79,7 @@ const testFFRemoveSetA: ISplit = sets: ['set_x'] }; // @ts-ignore -const testFFEmptySet: ISplit = +const testFFEmptySet: IDefinition = { name: 'test', status: 'ACTIVE', @@ -102,8 +102,8 @@ const rbsWithExcludedSegment: IRBSegment = { } }; -test('splitChangesUpdater / segments parser', () => { - let segments = parseSegments(activeSplitWithSegments as ISplit); +test('definitionChangesUpdater / segments parser', () => { + let segments = parseSegments(activeSplitWithSegments as IDefinition); expect(segments).toEqual(new Set(['A', 'B'])); segments = parseSegments(rbsWithExcludedSegment); @@ -113,11 +113,11 @@ test('splitChangesUpdater / segments parser', () => { expect(segments).toEqual(new Set(['D'])); }); -test('splitChangesUpdater / compute splits mutation', () => { +test('definitionChangesUpdater / compute splits mutation', () => { const splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; let segments = new Set(); - let splitsMutation = computeMutation([activeSplitWithSegments, archivedSplit] as ISplit[], segments, splitFiltersValidation); + let splitsMutation = computeMutation([activeSplitWithSegments, archivedSplit] as IDefinition[], segments, splitFiltersValidation); expect(splitsMutation.added).toEqual([activeSplitWithSegments]); expect(splitsMutation.removed).toEqual([archivedSplit]); @@ -127,7 +127,7 @@ test('splitChangesUpdater / compute splits mutation', () => { // SDK initialization without sets // should process all the notifications segments = new Set(); - splitsMutation = computeMutation([testFFSetsAB, test2FFSetsX] as ISplit[], segments, splitFiltersValidation); + splitsMutation = computeMutation([testFFSetsAB, test2FFSetsX] as IDefinition[], segments, splitFiltersValidation); expect(splitsMutation.added).toEqual([testFFSetsAB, test2FFSetsX]); expect(splitsMutation.removed).toEqual([]); @@ -135,7 +135,7 @@ test('splitChangesUpdater / compute splits mutation', () => { expect(Array.from(segments)).toEqual([]); }); -test('splitChangesUpdater / compute splits mutation with filters', () => { +test('definitionChangesUpdater / compute splits mutation with filters', () => { // SDK initialization with sets: [set_a, set_b] let splitFiltersValidation = { queryString: '&sets=set_a,set_b', groupedFilters: { bySet: ['set_a', 'set_b'], byName: ['name_1'], byPrefix: [] }, validFilters: [] }; @@ -183,7 +183,7 @@ test('splitChangesUpdater / compute splits mutation with filters', () => { expect(splitsMutation.names).toEqual([test2FFSetsX.name, testFFEmptySet.name]); }); -describe('splitChangesUpdater', () => { +describe('definitionChangesUpdater', () => { const splits = new SplitsCacheInMemory(); const updateSplits = jest.spyOn(splits, 'update'); @@ -197,22 +197,22 @@ describe('splitChangesUpdater', () => { fetchMock.once('*', { status: 200, body: splitChangesMock1 }); // @ts-ignore const splitApi = splitApiFactory(settingsSplitApi, { getFetch: () => fetchMock }, telemetryTrackerFactory()); - const fetchSplitChanges = jest.spyOn(splitApi, 'fetchSplitChanges'); - const splitChangesFetcher = splitChangesFetcherFactory(splitApi.fetchSplitChanges, fullSettings, storage); + const fetchSplitChanges = jest.spyOn(splitApi, 'fetchSplitChanges'); // @ts-ignore + const splitChangesFetcher = splitChangesFetcherFactory({ splitApi, settings: fullSettings, storage }); const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); const splitsEmitSpy = jest.spyOn(readinessManager.splits, 'emit'); let splitFiltersValidation = { queryString: null, groupedFilters: { bySet: [], byName: [], byPrefix: [] }, validFilters: [] }; - let splitChangesUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1); + let definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1); afterEach(() => { jest.clearAllMocks(); }); test('test without payload', async () => { - const result = await splitChangesUpdater(); + const result = await definitionChangesUpdater(); const updatedFlags = splitChangesMock1.ff.d.map(ff => ff.name); expect(fetchSplitChanges).toBeCalledTimes(1); @@ -228,10 +228,10 @@ describe('splitChangesUpdater', () => { test('test with ff payload', async () => { let index = 0; for (const notification of splitNotifications) { - const payload = notification.decoded as Pick; + const payload = notification.decoded as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); // fetch and RBSegments.update not being called expect(fetchSplitChanges).toBeCalledTimes(0); @@ -255,7 +255,7 @@ describe('splitChangesUpdater', () => { const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); // fetch and Splits.update not being called expect(fetchSplitChanges).toBeCalledTimes(0); @@ -269,7 +269,7 @@ describe('splitChangesUpdater', () => { }); test('flag sets splits-arrived emission', async () => { - const payload = splitNotifications[3].decoded as Pick; + const payload = splitNotifications[3].decoded as Pick; const setMocks = [ { sets: [], shouldEmit: false }, /* should not emit if flag does not have any set */ { sets: ['set_a'], shouldEmit: true }, /* should emit if flag is in configured sets */ @@ -279,13 +279,13 @@ describe('splitChangesUpdater', () => { { sets: ['set_a'], shouldEmit: true }, /* should emit if flag is back in configured sets */ ]; - splitChangesUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); let index = 0; let calls = 0; // emit always if not configured sets for (const setMock of setMocks) { - await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); expect(splitsEmitSpy.mock.calls[index][0]).toBe(SDK_SPLITS_ARRIVED); expect(splitsEmitSpy.mock.calls[index][1]).toEqual({ type: FLAGS_UPDATE, names: [payload.name] }); index++; @@ -294,11 +294,11 @@ describe('splitChangesUpdater', () => { // @ts-ignore splitFiltersValidation = { queryString: null, groupedFilters: { bySet: ['set_a'], byName: [], byPrefix: [] }, validFilters: [] }; storage.splits.clear(); - splitChangesUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + definitionChangesUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); splitsEmitSpy.mockReset(); index = 0; for (const setMock of setMocks) { - await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); if (setMock.shouldEmit) calls++; expect(splitsEmitSpy.mock.calls.length).toBe(calls); index++; @@ -312,10 +312,10 @@ describe('splitChangesUpdater', () => { readinessManager.splits.splitsArrived = false; storage.splits.clear(); - const payload = splitNotifications[0].decoded as Pick; + const payload = splitNotifications[0].decoded as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); }); @@ -328,12 +328,12 @@ describe('splitChangesUpdater', () => { readinessManager.splits.splitsArrived = false; readinessManager.segments.segmentsArrived = true; // Segments ready - const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 100, conditions: [] } as unknown as ISplit; - const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 101, conditions: [] } as unknown as ISplit; - const flag3 = { name: 'flag3', status: 'ACTIVE', changeNumber: 102, conditions: [] } as unknown as ISplit; + const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 100, conditions: [] } as unknown as IDefinition; + const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 101, conditions: [] } as unknown as IDefinition; + const flag3 = { name: 'flag3', status: 'ACTIVE', changeNumber: 102, conditions: [] } as unknown as IDefinition; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2, flag3], t: 102 } } }); - await splitChangesUpdater(); + await definitionChangesUpdater(); // Should emit with metadata when splitsArrived is false (first update) expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2', 'flag3'] }); @@ -347,12 +347,12 @@ describe('splitChangesUpdater', () => { readinessManager.splits.splitsArrived = false; readinessManager.segments.segmentsArrived = true; // Segments ready - const archivedFlag = { name: 'archived-flag', status: ARCHIVED_FF, changeNumber: 200, conditions: [] } as unknown as ISplit; + const archivedFlag = { name: 'archived-flag', status: ARCHIVED_FF, changeNumber: 200, conditions: [] } as unknown as IDefinition; - const payload = archivedFlag as Pick; + const payload = archivedFlag as Pick; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); // Should emit with metadata when splitsArrived is false (first update) expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); @@ -366,7 +366,7 @@ describe('splitChangesUpdater', () => { const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; const changeNumber = payload.changeNumber; - await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); + await expect(definitionChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); // Should emit SEGMENTS_UPDATE (not FLAGS_UPDATE) when only RB segment is updated expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); @@ -381,7 +381,7 @@ describe('splitChangesUpdater', () => { // Simulate a scenario where only RB segments are updated (no flags) const rbSegment = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; fetchMock.once('*', { status: 200, body: { rbs: { d: [rbSegment], t: 1684329854385 } } }); - await splitChangesUpdater(); + await definitionChangesUpdater(); // When updatedFlags.length === 0, should emit SEGMENTS_UPDATE expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); @@ -395,12 +395,12 @@ describe('splitChangesUpdater', () => { storage.segments.clear(); // Simulate a scenario where both flags and RB segments are updated - const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 400, conditions: [] } as unknown as ISplit; - const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 401, conditions: [] } as unknown as ISplit; + const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 400, conditions: [] } as unknown as IDefinition; + const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 401, conditions: [] } as unknown as IDefinition; const rbSegment = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2], t: 401 }, rbs: { d: [rbSegment], t: 1684329854385 } } }); - await splitChangesUpdater(); + await definitionChangesUpdater(); // When both flags and RB segments are updated, should emit FLAGS_UPDATE with flag names expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2'] }); @@ -414,9 +414,9 @@ describe('splitChangesUpdater', () => { readinessManager.segments.segmentsArrived = false; // Segments not ready - client-side should still emit // Create client-side updater (isClientSide = true) - const clientSideUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + const clientSideUpdater = definitionChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); - const flag1 = { name: 'client-flag', status: 'ACTIVE', changeNumber: 300, conditions: [] } as unknown as ISplit; + const flag1 = { name: 'client-flag', status: 'ACTIVE', changeNumber: 300, conditions: [] } as unknown as IDefinition; fetchMock.once('*', { status: 200, body: { ff: { d: [flag1], t: 300 } } }); await clientSideUpdater(); diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/definitionChangesUpdater.ts similarity index 56% rename from src/sync/polling/updaters/splitChangesUpdater.ts rename to src/sync/polling/updaters/definitionChangesUpdater.ts index 0510a485..d7587eff 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/definitionChangesUpdater.ts @@ -1,19 +1,19 @@ import { ISegmentsCacheBase, IStorageBase } from '../../../storages/types'; -import { ISplitChangesFetcher } from '../fetchers/types'; -import { IRBSegment, ISplit, ISplitChangesResponse, ISplitFiltersValidation, MaybeThenable } from '../../../dtos/types'; +import { IDefinitionChangesFetcher } from '../fetchers/types'; +import { IRBSegment, IDefinition, IDefinitionChangesResponse, ISplitFiltersValidation, MaybeThenable } from '../../../dtos/types'; import { ISplitsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE, CONFIGS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; -import { SYNC_SPLITS_FETCH, SYNC_SPLITS_UPDATE, SYNC_RBS_UPDATE, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; +import { SYNC_FETCH, SYNC_UPDATE, SYNC_FETCH_FAILS, SYNC_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; import { IN_RULE_BASED_SEGMENT, IN_SEGMENT, RULE_BASED_SEGMENT, STANDARD_SEGMENT } from '../../../utils/constants'; import { setToArray } from '../../../utils/lang/sets'; import { SPLIT_UPDATE } from '../../streaming/constants'; import { SdkUpdateMetadata } from '../../../../types/splitio'; -export type InstantUpdate = { payload: ISplit | IRBSegment, changeNumber: number, type: string }; -type SplitChangesUpdater = (noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) => Promise +export type InstantUpdate = { payload: IDefinition | IRBSegment, changeNumber: number, type: string }; +type DefinitionChangesUpdater = (noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) => Promise // Checks that all registered segments have been fetched (changeNumber !== -1 for every segment). // Returns a promise that could be rejected. @@ -30,7 +30,7 @@ function checkAllSegmentsExist(segments: ISegmentsCacheBase): Promise { * Collect segments from a raw FF or RBS definition. * Exported for testing purposes. */ -export function parseSegments(ruleEntity: ISplit | IRBSegment, matcherType: typeof IN_SEGMENT | typeof IN_RULE_BASED_SEGMENT = IN_SEGMENT): Set { +export function parseSegments(ruleEntity: IDefinition | IRBSegment, matcherType: typeof IN_SEGMENT | typeof IN_RULE_BASED_SEGMENT = IN_SEGMENT): Set { const { conditions, excluded } = ruleEntity as IRBSegment; const segments = new Set(); @@ -55,42 +55,42 @@ export function parseSegments(ruleEntity: ISplit | IRBSegment, matcherType: type return segments; } -interface ISplitMutations { +interface IDefinitionMutations { added: T[], removed: T[], names: string[] } /** - * If there are defined filters and one feature flag doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it + * If there are defined filters and one definition doesn't match with them, its status is changed to 'ARCHIVE' to avoid storing it * If there is `bySet` filter, `byName` and `byPrefix` filters are ignored * - * @param featureFlag - feature flag to be evaluated + * @param definition - definition to be evaluated * @param filters - splitFiltersValidation bySet | byName */ -function matchFilters(featureFlag: ISplit, filters: ISplitFiltersValidation) { +function matchFilters(definition: IDefinition, filters: ISplitFiltersValidation) { const { bySet: setsFilter, byName: namesFilter, byPrefix: prefixFilter } = filters.groupedFilters; - if (setsFilter.length > 0) return featureFlag.sets && featureFlag.sets.some((featureFlagSet: string) => setsFilter.indexOf(featureFlagSet) > -1); + if (setsFilter.length > 0) return definition.sets && definition.sets.some((definitionSet: string) => setsFilter.indexOf(definitionSet) > -1); const namesFilterConfigured = namesFilter.length > 0; const prefixFilterConfigured = prefixFilter.length > 0; if (!namesFilterConfigured && !prefixFilterConfigured) return true; - const matchNames = namesFilterConfigured && namesFilter.indexOf(featureFlag.name) > -1; - const matchPrefix = prefixFilterConfigured && prefixFilter.some(prefix => startsWith(featureFlag.name, prefix)); + const matchNames = namesFilterConfigured && namesFilter.indexOf(definition.name) > -1; + const matchPrefix = prefixFilterConfigured && prefixFilter.some(prefix => startsWith(definition.name, prefix)); return matchNames || matchPrefix; } /** - * Given the list of splits from /splitChanges endpoint, it returns the mutations, - * i.e., an object with added splits, removed splits and used segments. + * Given the list of definitions from /splitChanges or /configs endpoint, it returns the mutations, + * i.e., an object with added definitions, removed definitions, and used segments. * Exported for testing purposes. */ -export function computeMutation(rules: Array, segments: Set, filters?: ISplitFiltersValidation): ISplitMutations { +export function computeMutation(rules: Array, segments: Set, filters?: ISplitFiltersValidation): IDefinitionMutations { return rules.reduce((accum, ruleEntity) => { - if (ruleEntity.status !== 'ARCHIVED' && (!filters || matchFilters(ruleEntity as ISplit, filters))) { + if (ruleEntity.status !== 'ARCHIVED' && (!filters || matchFilters(ruleEntity as IDefinition, filters))) { accum.added.push(ruleEntity); parseSegments(ruleEntity).forEach((segmentName: string) => { @@ -102,88 +102,88 @@ export function computeMutation(rules: Array, accum.names.push(ruleEntity.name); return accum; - }, { added: [], removed: [], names: [] } as ISplitMutations); + }, { added: [], removed: [], names: [] } as IDefinitionMutations); } /** - * factory of SplitChanges updater, a task that: - * - fetches split changes using `splitChangesFetcher` - * - updates `splitsCache` - * - uses `splitsEventEmitter` to emit events related to split data updates + * Factory of DefinitionChanges updater, a task that: + * - fetches definition changes using `definitionChangesFetcher` + * - updates definitions storage + * - uses `definitionsEventEmitter` to emit events related to definition data updates * * @param log - Logger instance - * @param splitChangesFetcher - Fetcher of `/splitChanges` - * @param splits - Splits storage, with sync or async methods + * @param definitionChangesFetcher - Fetcher of `/splitChanges` or `/configs` + * @param definitions - Definitions storage, with sync or async methods * @param segments - Segments storage, with sync or async methods - * @param splitsEventEmitter - Optional readiness manager. Not required for synchronizer or producer mode. + * @param definitionsEventEmitter - Optional readiness manager. Not required for synchronizer or producer mode. * @param requestTimeoutBeforeReady - How long the updater will wait for the request to timeout. Default 0, i.e., never timeout. - * @param retriesOnFailureBeforeReady - How many retries on `/splitChanges` we the updater do in case of failure or timeout. Default 0, i.e., no retries. + * @param retriesOnFailureBeforeReady - How many retries on `/splitChanges` or `/configs` we the updater do in case of failure or timeout. Default 0, i.e., no retries. */ -export function splitChangesUpdaterFactory( +export function definitionChangesUpdaterFactory( log: ILogger, - splitChangesFetcher: ISplitChangesFetcher, + definitionChangesFetcher: IDefinitionChangesFetcher, storage: Pick, splitFiltersValidation: ISplitFiltersValidation, splitsEventEmitter?: ISplitsEventEmitter, requestTimeoutBeforeReady = 0, retriesOnFailureBeforeReady = 0, isClientSide?: boolean -): SplitChangesUpdater { +): DefinitionChangesUpdater { const { splits, rbSegments, segments } = storage; let startingUp = true; - /** timeout decorator for `splitChangesFetcher` promise */ + /** timeout decorator for `definitionChangesFetcher` promise */ function _promiseDecorator(promise: Promise) { if (startingUp && requestTimeoutBeforeReady) promise = timeout(requestTimeoutBeforeReady, promise); return promise; } /** - * SplitChanges updater returns a promise that resolves with a `false` boolean value if it fails to fetch splits or synchronize them with the storage. + * DefinitionChanges updater returns a promise that resolves with a `false` boolean value if it fails to fetch definitions or synchronize them with the storage. * Returned promise will not be rejected. * * @param noCache - true to revalidate data to fetch * @param till - query param to bypass CDN requests */ - return function splitChangesUpdater(noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) { + return function definitionChangesUpdater(noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) { /** - * @param since - current changeNumber at splitsCache + * @param since - current changeNumber at definitionsCache * @param retry - current number of retry attempts */ - function _splitChangesUpdater(sinces: [number, number], retry = 0): Promise { + function _definitionChangesUpdater(sinces: [number, number], retry = 0): Promise { const [since, rbSince] = sinces; - log.debug(SYNC_SPLITS_FETCH, sinces); + log.debug(SYNC_FETCH, [definitionChangesFetcher.type, since, rbSince]); return Promise.resolve( instantUpdate ? instantUpdate.type === SPLIT_UPDATE ? - // IFFU edge case: a change to a flag that adds an IN_RULE_BASED_SEGMENT matcher that is not present yet + // IFFU edge case: a change to definition that adds an IN_RULE_BASED_SEGMENT matcher that is not present yet Promise.resolve(rbSegments.contains(parseSegments(instantUpdate.payload, IN_RULE_BASED_SEGMENT))).then((contains) => { return contains ? - { ff: { d: [instantUpdate.payload as ISplit], t: instantUpdate.changeNumber } } : - splitChangesFetcher(since, noCache, till, rbSince, _promiseDecorator); + { ff: { d: [instantUpdate.payload as IDefinition], t: instantUpdate.changeNumber } } : + definitionChangesFetcher(since, noCache, till, rbSince, _promiseDecorator); }) : { rbs: { d: [instantUpdate.payload as IRBSegment], t: instantUpdate.changeNumber } } : - splitChangesFetcher(since, noCache, till, rbSince, _promiseDecorator) + definitionChangesFetcher(since, noCache, till, rbSince, _promiseDecorator) ) - .then((splitChanges: ISplitChangesResponse) => { + .then((definitionChanges: IDefinitionChangesResponse) => { const usedSegments = new Set(); - let updatedFlags: string[] = []; + let updatedDefinitions: string[] = []; let ffUpdate: MaybeThenable = false; - if (splitChanges.ff) { - const { added, removed, names } = computeMutation(splitChanges.ff.d, usedSegments, splitFiltersValidation); - updatedFlags = names; - log.debug(SYNC_SPLITS_UPDATE, [added.length, removed.length]); - ffUpdate = splits.update(added, removed, splitChanges.ff.t); + if (definitionChanges.ff) { + const { added, removed, names } = computeMutation(definitionChanges.ff.d, usedSegments, splitFiltersValidation); + updatedDefinitions = names; + log.debug(SYNC_UPDATE, [definitionChangesFetcher.type, added.length, removed.length]); + ffUpdate = splits.update(added, removed, definitionChanges.ff.t); } let rbsUpdate: MaybeThenable = false; - if (splitChanges.rbs) { - const { added, removed } = computeMutation(splitChanges.rbs.d, usedSegments); - log.debug(SYNC_RBS_UPDATE, [added.length, removed.length]); - rbsUpdate = rbSegments.update(added, removed, splitChanges.rbs.t); + if (definitionChanges.rbs) { + const { added, removed } = computeMutation(definitionChanges.rbs.d, usedSegments); + log.debug(SYNC_UPDATE, ['rule-based segments', added.length, removed.length]); + rbsUpdate = rbSegments.update(added, removed, definitionChanges.rbs.t); } return Promise.all([ffUpdate, rbsUpdate, @@ -202,8 +202,8 @@ export function splitChangesUpdaterFactory( // emit SDK events if (emitSplitsArrivedEvent) { const metadata: SdkUpdateMetadata = { - type: updatedFlags.length > 0 ? FLAGS_UPDATE : SEGMENTS_UPDATE, - names: updatedFlags.length > 0 ? updatedFlags : [] + type: updatedDefinitions.length > 0 ? definitionChangesFetcher.type === 'configs' ? CONFIGS_UPDATE : FLAGS_UPDATE : SEGMENTS_UPDATE, + names: updatedDefinitions.length > 0 ? updatedDefinitions : [] }; splitsEventEmitter.emit(SDK_SPLITS_ARRIVED, metadata); } @@ -216,17 +216,17 @@ export function splitChangesUpdaterFactory( .catch(error => { if (startingUp && retriesOnFailureBeforeReady > retry) { retry += 1; - log.warn(SYNC_SPLITS_FETCH_RETRY, [retry, error]); - return _splitChangesUpdater(sinces, retry); + log.warn(SYNC_FETCH_RETRY, [definitionChangesFetcher.type, retry, error]); + return _definitionChangesUpdater(sinces, retry); } else { startingUp = false; - log.warn(SYNC_SPLITS_FETCH_FAILS, [error]); + log.warn(SYNC_FETCH_FAILS, [definitionChangesFetcher.type, error]); } return false; }); } // `getChangeNumber` never rejects or throws error - return Promise.all([splits.getChangeNumber(), rbSegments.getChangeNumber()]).then(_splitChangesUpdater); + return Promise.all([splits.getChangeNumber(), rbSegments.getChangeNumber()]).then(_definitionChangesUpdater); }; } diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 86f5cb0e..98679b6c 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -4,7 +4,7 @@ import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; -import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; +import { SYNC_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; import { IMembershipsResponse } from '../../../dtos/types'; import { MEMBERSHIPS_LS_UPDATE } from '../../streaming/constants'; @@ -76,7 +76,7 @@ export function mySegmentsUpdaterFactory( return updaterPromise.catch(error => { if (startingUp && retriesOnFailureBeforeReady > retry) { retry += 1; - log.warn(SYNC_MYSEGMENTS_FETCH_RETRY, [retry, error]); + log.warn(SYNC_FETCH_RETRY, ['memberships', retry, error]); return _mySegmentsUpdater(retry); // no need to forward `segmentList` and `noCache` params } else { startingUp = false; diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index 5b27b361..800694e0 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -3,7 +3,7 @@ import { ISegmentsCacheBase } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; -import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC_SEGMENTS } from '../../../logger/constants'; +import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC } from '../../../logger/constants'; import { timeout } from '../../../utils/promise/timeout'; import { SdkUpdateMetadata } from '../../../../types/splitio'; @@ -38,7 +38,7 @@ export function segmentChangesUpdaterFactory( } function updateSegment(segmentName: string, noCache?: boolean, till?: number, fetchOnlyNew?: boolean, retries?: number): Promise { - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing segment ${segmentName}`); + log.debug(`${LOG_PREFIX_SYNC}Processing segment ${segmentName}`); const sincePromise = Promise.resolve(segments.getChangeNumber(segmentName)); return sincePromise.then(since => { @@ -47,14 +47,14 @@ export function segmentChangesUpdaterFactory( false : segmentChangesFetcher(since || -1, segmentName, noCache, till, _promiseDecorator).then((changes) => { return Promise.all(changes.map(x => { - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); + log.debug(`${LOG_PREFIX_SYNC}Processing ${segmentName} with till = ${x.till}. Added: ${x.added.length}. Removed: ${x.removed.length}`); return segments.update(segmentName, x.added, x.removed, x.till); })).then((updates) => { return updates.some(update => update); }); }).catch(error => { if (retries) { - log.warn(`${LOG_PREFIX_SYNC_SEGMENTS}Retrying fetch of segment ${segmentName} (attempt #${retries}). Reason: ${error}`); + log.warn(`${LOG_PREFIX_SYNC}Retrying fetch of segment ${segmentName} (attempt #${retries}). Reason: ${error}`); return updateSegment(segmentName, noCache, till, fetchOnlyNew, retries - 1); } throw error; @@ -73,7 +73,7 @@ export function segmentChangesUpdaterFactory( * @param till - till target for the provided segmentName, for CDN bypass. */ return function segmentChangesUpdater(fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) { - log.debug(`${LOG_PREFIX_SYNC_SEGMENTS}Started segments update`); + log.debug(`${LOG_PREFIX_SYNC}Started segments update`); // If not a segment name provided, read list of available segments names to be updated. let segmentsPromise = Promise.resolve(segmentName ? [segmentName] : segments.getRegisteredSegments()); @@ -102,9 +102,9 @@ export function segmentChangesUpdaterFactory( // If the operation is forbidden, it may be due to permissions. Destroy the SDK instance. // @TODO although factory status is destroyed, synchronization is not stopped if (readiness) readiness.setDestroyed(); - log.error(`${LOG_PREFIX_INSTANTIATION}: you passed a client-side type authorizationKey, please grab an SDK Key from the Split user interface that is of type server-side.`); + log.error(`${LOG_PREFIX_INSTANTIATION}: you passed a client-side type authorizationKey, please grab an SDK Key from Harness UI that is of type server-side.`); } else { - log.warn(`${LOG_PREFIX_SYNC_SEGMENTS}Error while doing fetch of segments. ${error}`); + log.warn(`${LOG_PREFIX_SYNC}Error while doing fetch of segments. ${error}`); } return false; diff --git a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts index dc5cb7dc..58db2806 100644 --- a/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +++ b/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts @@ -1,4 +1,4 @@ -import { IRBSegment, ISplit } from '../../../dtos/types'; +import { IRBSegment, IDefinition } from '../../../dtos/types'; import { STREAMING_PARSING_SPLIT_UPDATE } from '../../../logger/constants'; import { ILogger } from '../../../logger/types'; import { SDK_SPLITS_ARRIVED } from '../../../readiness/constants'; @@ -7,8 +7,8 @@ import { IRBSegmentsCacheSync, ISplitsCacheSync, IStorageSync } from '../../../s import { ITelemetryTracker } from '../../../trackers/types'; import { Backoff } from '../../../utils/Backoff'; import { SPLITS } from '../../../utils/constants'; -import { ISegmentsSyncTask, ISplitsSyncTask } from '../../polling/types'; -import { InstantUpdate } from '../../polling/updaters/splitChangesUpdater'; +import { ISegmentsSyncTask, IDefinitionsSyncTask } from '../../polling/types'; +import { InstantUpdate } from '../../polling/updaters/definitionChangesUpdater'; import { RB_SEGMENT_UPDATE } from '../constants'; import { parseFFUpdatePayload } from '../parseUtils'; import { ISplitKillData, ISplitUpdateData } from '../SSEHandler/types'; @@ -18,7 +18,7 @@ import { IUpdateWorker } from './types'; /** * SplitsUpdateWorker factory */ -export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData]> & { killSplit(event: ISplitKillData): void } { +export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, definitionsSyncTask: IDefinitionsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData]> & { killSplit(event: ISplitKillData): void } { const ff = SplitsUpdateWorker(storage.splits); const rbs = SplitsUpdateWorker(storage.rbSegments); @@ -36,7 +36,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy if (maxChangeNumber > cache.getChangeNumber()) { handleNewEvent = false; // fetch splits revalidating data if cached - splitsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, instantUpdate).then(() => { + definitionsSyncTask.execute(true, cdnBypass ? maxChangeNumber : undefined, instantUpdate).then(() => { if (!isHandlingEvent) return; // halt if `stop` has been called if (handleNewEvent) { __handleSplitUpdateCall(); @@ -79,7 +79,7 @@ export function SplitsUpdateWorker(log: ILogger, storage: IStorageSync, splitsSy * * @param changeNumber - change number of the notification */ - put({ changeNumber, pcn, type }: ISplitUpdateData, payload?: ISplit | IRBSegment) { + put({ changeNumber, pcn, type }: ISplitUpdateData, payload?: IDefinition | IRBSegment) { const currentChangeNumber = cache.getChangeNumber(); if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return; diff --git a/src/sync/streaming/__tests__/parseUtils.spec.ts b/src/sync/streaming/__tests__/parseUtils.spec.ts index a12a0a4e..3279c343 100644 --- a/src/sync/streaming/__tests__/parseUtils.spec.ts +++ b/src/sync/streaming/__tests__/parseUtils.spec.ts @@ -1,13 +1,13 @@ import { hash64 } from '../../../utils/murmur3/murmur3_64'; import { keylists, bitmaps, splitNotifications } from './dataMocks'; -import { parseKeyList, parseBitmap, isInBitmap, parseFFUpdatePayload, getDelay } from '../parseUtils'; +import { parseCompressedData, parseBitmap, isInBitmap, parseFFUpdatePayload, getDelay } from '../parseUtils'; -test('parseKeyList', () => { +test('parseCompressedData', () => { keylists.forEach(keylist => { const { compression, keyListData, keyListDataCompressed, addedUserKeys, removedUserKeys, otherUserKeys } = keylist; - expect(parseKeyList(keyListDataCompressed, compression)).toEqual(keyListData); // decompress KeyList + expect(parseCompressedData(keyListDataCompressed, compression)).toEqual(keyListData); // decompress KeyList const added = new Set(keyListData.a); const removed = new Set(keyListData.r); @@ -51,7 +51,7 @@ test('parseBitmap & isInBitmap', () => { }); }); -test('split notification - parseKeyList', () => { +test('split notification - parseCompressedData', () => { splitNotifications.forEach(notification => { let { compression, data, decoded } = notification; diff --git a/src/sync/streaming/parseUtils.ts b/src/sync/streaming/parseUtils.ts index a34f2dc9..eff154ea 100644 --- a/src/sync/streaming/parseUtils.ts +++ b/src/sync/streaming/parseUtils.ts @@ -1,8 +1,8 @@ import { algorithms } from '../../utils/decompress'; import { decodeFromBase64 } from '../../utils/base64'; import { hash } from '../../utils/murmur3/murmur3'; -import { Compression, IMembershipMSUpdateData, KeyList } from './SSEHandler/types'; -import { IRBSegment, ISplit } from '../../dtos/types'; +import { Compression, IMembershipMSUpdateData } from './SSEHandler/types'; +import { IRBSegment, IDefinition } from '../../dtos/types'; const GZIP = 1; const ZLIB = 2; @@ -35,20 +35,20 @@ function decompress(data: string, compression: Compression) { } /** - * Decode, decompress and parse the provided 'data' into a KeyList object + * Decode, decompress and parse the provided 'data' into an object of type T * * @param data - base64 encoded string * @param compression - 1 GZIP, 2 ZLIB * @param avoidPrecisionLoss - true as default, set it as false if dont need to avoid precission loss - * @returns keyList + * @returns parsed object * @throws if data string cannot be decoded, decompressed or parsed */ -export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss = true): KeyList { - const binKeyList = decompress(data, compression); - let strKeyList = Uint8ArrayToString(binKeyList); - // replace numbers to strings, to avoid losing precision - if (avoidPrecisionLoss) strKeyList = strKeyList.replace(/\d+/g, '"$&"'); - return JSON.parse(strKeyList); +export function parseCompressedData(data: string, compression: Compression, avoidPrecisionLoss = true): T { + const binData = decompress(data, compression); + let str = Uint8ArrayToString(binData); + // replace numbers to strings, to avoid losing precision (e.g., 64-bit IDs in KeyList) + if (avoidPrecisionLoss) str = str.replace(/\d+/g, '"$&"'); + return JSON.parse(str) as T; } /** @@ -82,9 +82,9 @@ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) { /** * Parse feature flags notifications for instant feature flag updates */ -export function parseFFUpdatePayload(compression: Compression, data: string): ISplit | IRBSegment | undefined { +export function parseFFUpdatePayload(compression: Compression, data: string): IDefinition | IRBSegment | undefined { return compression > 0 ? - parseKeyList(data, compression, false) : + parseCompressedData(data, compression, false) : JSON.parse(decodeFromBase64(data)); } diff --git a/src/sync/streaming/pushManager.ts b/src/sync/streaming/pushManager.ts index f0a5ac4e..b1d4b730 100644 --- a/src/sync/streaming/pushManager.ts +++ b/src/sync/streaming/pushManager.ts @@ -14,7 +14,7 @@ import { checkIfServerSide, getMatching } from '../../utils/key'; import { MEMBERSHIPS_MS_UPDATE, MEMBERSHIPS_LS_UPDATE, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, RB_SEGMENT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants'; import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MEMBERSHIPS_UPDATE } from '../../logger/constants'; import { IMembershipMSUpdateData, IMembershipLSUpdateData, KeyList, UpdateStrategy } from './SSEHandler/types'; -import { getDelay, isInBitmap, parseBitmap, parseKeyList } from './parseUtils'; +import { getDelay, isInBitmap, parseBitmap, parseCompressedData } from './parseUtils'; import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64'; import { IAuthTokenPushEnabled } from './AuthClient/types'; import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants'; @@ -56,7 +56,7 @@ export function pushManagerFactory( // MySegmentsUpdateWorker (client-side) are initiated in `add` method const segmentsUpdateWorker = userKey ? undefined : SegmentsUpdateWorker(log, pollingManager.segmentsSyncTask as ISegmentsSyncTask, storage.segments); // For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments - const splitsUpdateWorker = SplitsUpdateWorker(log, storage, pollingManager.splitsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); + const splitsUpdateWorker = SplitsUpdateWorker(log, storage, pollingManager.definitionsSyncTask, readiness.splits, telemetryTracker, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask); // [Only for client-side] map of hashes to user keys, to dispatch membership update events to the corresponding MySegmentsUpdateWorker const userKeyHashes: Record = {}; @@ -243,7 +243,7 @@ export function pushManagerFactory( case UpdateStrategy.KeyList: { let keyList: KeyList, added: Set, removed: Set; try { - keyList = parseKeyList(parsedData.d!, parsedData.c!); + keyList = parseCompressedData(parsedData.d!, parsedData.c!); added = new Set(keyList.a); removed = new Set(keyList.r); } catch (e) { diff --git a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts index 57a368c5..0776600d 100644 --- a/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts +++ b/src/sync/submitters/__tests__/telemetrySubmitter.spec.ts @@ -77,7 +77,7 @@ describe('Telemetry submitter', () => { expect(recordTimeUntilReadySpy).toBeCalledTimes(1); expect(postMetricsConfig).toBeCalledWith(JSON.stringify({ - oM: 0, st: 'memory', aF: 0, rF: 0, sE: true, rR: { sp: 0.001, se: 0.001, im: 0.001, ev: 0.001, te: 0.1 }, uO: { s: true, e: true, a: true, st: true, t: true }, iQ: 1, eQ: 1, iM: 0, iL: false, hP: false, tR: 0, tC: 0, nR: 0, t: [], i: ['NoopIntegration'], uC: 0, fsT: 0, fsI: 0 + oM: 0, st: 'memory', aF: 0, rF: 0, sE: true, rR: { sp: 0.001, se: 0.001, im: 0.001, ev: 0.001, te: 0.1 }, uO: { s: false, e: false, a: false, st: false, t: false }, iQ: 1, eQ: 1, iM: 0, iL: false, hP: false, tR: 0, tC: 0, nR: 0, t: [], i: ['NoopIntegration'], uC: 0, fsT: 0, fsI: 0 })); // Stop submitter, to not execute the 1st periodic metrics/usage POST diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index df9ff152..f3709fa1 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -11,6 +11,8 @@ import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants'; import { ISdkFactoryContextSync } from '../sdkFactory/types'; import { SDK_SPLITS_CACHE_LOADED } from '../readiness/constants'; import { usesSegmentsSync } from '../storages/AbstractSplitsCacheSync'; +import { splitChangesFetcherFactory } from './polling/fetchers/splitChangesFetcher'; +import { IDefinitionChangesFetcher } from './polling/fetchers/types'; /** * Online SyncManager factory. @@ -19,10 +21,12 @@ import { usesSegmentsSync } from '../storages/AbstractSplitsCacheSync'; * @param pollingManagerFactory - allows to specialize the SyncManager for server-side or client-side API by passing * `pollingManagerSSFactory` or `pollingManagerCSFactory` respectively. * @param pushManagerFactory - optional to build a SyncManager with or without streaming support + * @param definitionChangesFetcherFactory - optional to replace the default split changes fetcher */ export function syncManagerOnlineFactory( - pollingManagerFactory?: (params: ISdkFactoryContextSync) => IPollingManager, + pollingManagerFactory?: (params: ISdkFactoryContextSync, definitionChangesFetcher: IDefinitionChangesFetcher) => IPollingManager, pushManagerFactory?: (params: ISdkFactoryContextSync, pollingManager: IPollingManager) => IPushManager | undefined, + definitionChangesFetcherFactory = splitChangesFetcherFactory ): (params: ISdkFactoryContextSync) => ISyncManagerCS { /** @@ -33,7 +37,7 @@ export function syncManagerOnlineFactory( const { settings, settings: { log, streamingEnabled, sync: { enabled: syncEnabled } }, telemetryTracker, storage, readiness } = params; /** Polling Manager */ - const pollingManager = pollingManagerFactory && pollingManagerFactory(params); + const pollingManager = pollingManagerFactory && pollingManagerFactory(params, definitionChangesFetcherFactory(params)); /** Push Manager */ const pushManager = syncEnabled && streamingEnabled && pollingManager && pushManagerFactory ? diff --git a/src/utils/EventEmitter.js b/src/utils/EventEmitter.js new file mode 100644 index 00000000..2f173c20 --- /dev/null +++ b/src/utils/EventEmitter.js @@ -0,0 +1,444 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// Adaptation of "events" library (https://www.npmjs.com/package/events) +// exported as an ES module instead of CommonJS, to avoid extra configuration steps when using +// the ESM build of the SDK with tools that doesn't support CommonJS by default (e.g. Rollup). + +var R = typeof Reflect === 'object' ? Reflect : null; +var ReflectApply = R && typeof R.apply === 'function' + ? R.apply + : function ReflectApply(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + }; + +var ReflectOwnKeys; +if (R && typeof R.ownKeys === 'function') { + ReflectOwnKeys = R.ownKeys; +} else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target) + .concat(Object.getOwnPropertySymbols(target)); + }; +} else { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target); + }; +} + +function ProcessEmitWarning(warning) { + if (console && console.warn) console.warn(warning); +} + +// eslint-disable-next-line compat/compat +var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) { + return value !== value; +}; + +export function EventEmitter() { + EventEmitter.init.call(this); +} + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +var defaultMaxListeners = 10; + +function checkListener(listener) { + if (typeof listener !== 'function') { + throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener); + } +} + +Object.defineProperty(EventEmitter, 'defaultMaxListeners', { + enumerable: true, + get: function () { + return defaultMaxListeners; + }, + set: function (arg) { + if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.'); + } + defaultMaxListeners = arg; + } +}); + +EventEmitter.init = function () { + + if (this._events === undefined || + this._events === Object.getPrototypeOf(this)._events) { + this._events = Object.create(null); + this._eventsCount = 0; + } + + this._maxListeners = this._maxListeners || undefined; +}; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) { + throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.'); + } + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) + return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) args.push(arguments[i]); + var doError = (type === 'error'); + + var events = this._events; + if (events !== undefined) + doError = (doError && events.error === undefined); + else if (!doError) + return false; + + // If there is no 'error' event listener then throw. + if (doError) { + var er; + if (args.length > 0) + er = args[0]; + if (er instanceof Error) { + // Note: The comments on the `throw` lines are intentional, they show + // up in Node.js output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + // At least give some kind of context to the user + var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : '')); + err.context = er; + throw err; // Unhandled 'error' event + } + + var handler = events[type]; + + if (handler === undefined) + return false; + + if (typeof handler === 'function') { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); // eslint-disable-next-line no-redeclare + for (var i = 0; i < len; ++i) + ReflectApply(listeners[i], this, args); + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = Object.create(null); + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit('newListener', type, listener.listener ? listener.listener : listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === 'function') { + // Adding the second element, need to change to array. + existing = events[type] = + prepend ? [listener, existing] : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + // eslint-disable-next-line no-restricted-syntax + var w = new Error('Possible EventEmitter memory leak detected. ' + + existing.length + ' ' + String(type) + ' listeners ' + + 'added. Use emitter.setMaxListeners() to ' + + 'increase limit'); + w.name = 'MaxListenersExceededWarning'; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + + return target; +} + +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.prependListener = + function prependListener(type, listener) { + return _addListener(this, type, listener, true); + }; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) + return this.listener.call(this.target); + return this.listener.apply(this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +EventEmitter.prototype.prependOnceListener = + function prependOnceListener(type, listener) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + +// Emits a 'removeListener' event if and only if the listener was removed. +EventEmitter.prototype.removeListener = + function removeListener(type, listener) { + var list, events, position, i, originalListener; + + checkListener(listener); + + events = this._events; + if (events === undefined) + return this; + + list = events[type]; + if (list === undefined) + return this; + + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else { + delete events[type]; + if (events.removeListener) + this.emit('removeListener', type, list.listener || listener); + } + } else if (typeof list !== 'function') { + position = -1; + + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + + if (position < 0) + return this; + + if (position === 0) + list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) + events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit('removeListener', type, originalListener || listener); + } + + return this; + }; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +EventEmitter.prototype.removeAllListeners = + function removeAllListeners(type) { + var listeners, events, i; + + events = this._events; + if (events === undefined) + return this; + + // not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else + delete events[type]; + } + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = Object.create(null); + this._eventsCount = 0; + return this; + } + + listeners = events[type]; + + if (typeof listeners === 'function') { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; + }; + +function _listeners(target, type, unwrap) { + var events = target._events; + + if (events === undefined) + return []; + + var evlistener = events[type]; + if (evlistener === undefined) + return []; + + if (typeof evlistener === 'function') + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? + unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); +} + +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +EventEmitter.listenerCount = function (emitter, type) { + if (typeof emitter.listenerCount === 'function') { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } +}; + +EventEmitter.prototype.listenerCount = listenerCount; +function listenerCount(type) { + var events = this._events; + + if (events !== undefined) { + var evlistener = events[type]; + + if (typeof evlistener === 'function') { + return 1; + } else if (evlistener !== undefined) { + return evlistener.length; + } + } + + return 0; +} + +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) + copy[i] = arr[i]; + return copy; +} + +function spliceOne(list, index) { + for (; index + 1 < list.length; index++) + list[index] = list[index + 1]; + list.pop(); +} + +function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; +} diff --git a/src/utils/filter/__tests__/bloomFilter.spec.ts b/src/utils/filter/__tests__/bloomFilter.spec.ts new file mode 100644 index 00000000..91b891a7 --- /dev/null +++ b/src/utils/filter/__tests__/bloomFilter.spec.ts @@ -0,0 +1,33 @@ +import { bloomFilterFactory } from '../bloomFilter'; + +describe('Bloom filter', () => { + + test('should add and check membership correctly', () => { + const bloomFilter = bloomFilterFactory(); + + expect(bloomFilter.add('feature', 'key')).toBe(true); + expect(bloomFilter.contains('feature1', 'key')).toBe(false); + expect(bloomFilter.contains('feature', 'key')).toBe(true); + }); + + test('should clear the filter', () => { + const bloomFilter = bloomFilterFactory(); + + bloomFilter.add('feature', 'key'); + bloomFilter.clear(); + + expect(bloomFilter.contains('feature', 'key')).toBe(false); + }); + + test('should work after clear with new entries', () => { + const bloomFilter = bloomFilterFactory(); + + bloomFilter.add('feature', 'key'); + bloomFilter.clear(); + + expect(bloomFilter.add('feature2', 'key')).toBe(true); + expect(bloomFilter.contains('feature3', 'key')).toBe(false); + expect(bloomFilter.contains('feature2', 'key')).toBe(true); + }); + +}); diff --git a/src/utils/filter/bloomFilter.ts b/src/utils/filter/bloomFilter.ts new file mode 100644 index 00000000..ec2211f3 --- /dev/null +++ b/src/utils/filter/bloomFilter.ts @@ -0,0 +1,38 @@ +// bloom-filters is supported on the following platforms: +// - Node.js: v4.0.0 or higher +// - Google Chrome: v41 or higher +// - Mozilla Firefox: v34 or higher +// - Microsoft Edge: v12 or higher +import { BloomFilter } from 'bloom-filters'; + +const EXPECTED_INSERTIONS = 10000000; +const ERROR_RATE = 0.01; +const REFRESH_RATE = 24 * 60 * 60000; // 24HS + +export function bloomFilterFactory(expectedInsertions = EXPECTED_INSERTIONS, errorRate = ERROR_RATE, refreshRate = REFRESH_RATE) { + let filter = BloomFilter.create(expectedInsertions, errorRate); + + return { + + refreshRate: refreshRate, + + add(key: string, value: string) { + const data = `${key}:${value}`; + if (filter.has(data)) { + return false; + } + filter.add(data); + return true; + }, + + contains(key: string, value: string) { + const data = `${key}:${value}`; + return filter.has(data); + }, + + clear() { + filter = BloomFilter.create(expectedInsertions, errorRate); + } + + }; +} diff --git a/src/utils/inputValidation/__tests__/definition.spec.ts b/src/utils/inputValidation/__tests__/definition.spec.ts new file mode 100644 index 00000000..0b3fe53c --- /dev/null +++ b/src/utils/inputValidation/__tests__/definition.spec.ts @@ -0,0 +1,71 @@ +import { ERROR_INVALID, ERROR_NULL, ERROR_EMPTY, WARN_TRIMMING } from '../../../logger/constants'; +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; + +import { validateDefinition } from '../definition'; + +const invalidDefinitions = [ + { definition: [], msg: ERROR_INVALID }, + { definition: () => { }, msg: ERROR_INVALID }, + { definition: Object.create({}), msg: ERROR_INVALID }, + { definition: {}, msg: ERROR_INVALID }, + { definition: true, msg: ERROR_INVALID }, + { definition: false, msg: ERROR_INVALID }, + { definition: 10, msg: ERROR_INVALID }, + { definition: 0, msg: ERROR_INVALID }, + { definition: NaN, msg: ERROR_INVALID }, + { definition: Infinity, msg: ERROR_INVALID }, + { definition: null, msg: ERROR_NULL }, + { definition: undefined, msg: ERROR_NULL }, + { definition: new Promise(res => res), msg: ERROR_INVALID }, + { definition: Symbol('asd'), msg: ERROR_INVALID }, + { definition: '', msg: ERROR_EMPTY } +]; + +const trimmableDefinitions = [ + ' splitName ', + 'split_name2 \n ', + ' split_name3' +]; + +describe('INPUT VALIDATION for definition name', () => { + + afterEach(() => { loggerMock.mockClear(); }); + + test('Should return the provided definition name if it is a valid string without logging any errors', () => { + expect(validateDefinition(loggerMock, 'definitionName', 'some_method')).toBe('definitionName'); + expect(loggerMock.error.mock.calls[0]).not.toEqual('some_method'); + expect(validateDefinition(loggerMock, 'definition_name', 'some_method')).toBe('definition_name'); + expect(loggerMock.error.mock.calls[0]).not.toEqual('some_method'); + expect(validateDefinition(loggerMock, 'A_definition-name_29', 'some_method')).toBe('A_definition-name_29'); + expect(loggerMock.error.mock.calls[0]).not.toEqual('some_method'); + + expect(loggerMock.warn).not.toBeCalled(); + }); + + test('Should trim definition name if it is a valid string with trimmable spaces and log a warning', () => { + for (let i = 0; i < trimmableDefinitions.length; i++) { + const trimmableDefinition = trimmableDefinitions[i]; + expect(validateDefinition(loggerMock, trimmableDefinition, 'some_method')).toBe(trimmableDefinition.trim()); + expect(loggerMock.warn).toBeCalledWith(WARN_TRIMMING, ['some_method', 'feature flag name', trimmableDefinition]); + + loggerMock.warn.mockClear(); + } + + expect(loggerMock.error).not.toBeCalled(); + }); + + test('Should return false and log error if definition name is not a valid string', () => { + for (let i = 0; i < invalidDefinitions.length; i++) { + const invalidValue = invalidDefinitions[i]['definition']; + // @ts-ignore + const expectedLog = invalidDefinitions[i]['msg']; + + expect(validateDefinition(loggerMock, invalidValue, 'test_method')).toBe(false); + expect(loggerMock.error).toBeCalledWith(expectedLog, ['test_method', 'feature flag name']); + + loggerMock.error.mockClear(); + } + + expect(loggerMock.warn).not.toBeCalled(); + }); +}); diff --git a/src/utils/inputValidation/__tests__/definitionExistence.spec.ts b/src/utils/inputValidation/__tests__/definitionExistence.spec.ts new file mode 100644 index 00000000..2a320e43 --- /dev/null +++ b/src/utils/inputValidation/__tests__/definitionExistence.spec.ts @@ -0,0 +1,47 @@ + +import * as LabelConstants from '../../labels'; + +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; + +import { validateDefinitionExistence } from '../definitionExistence'; +import { IReadinessManager } from '../../../readiness/types'; +import { WARN_NOT_EXISTENT_DEFINITION } from '../../../logger/constants'; + +describe('Split existence (special case)', () => { + + afterEach(() => { loggerMock.mockClear(); }); + + test('Should return a boolean indicating if the SDK was ready and there was no Split object or "definition not found" label', () => { + // @ts-expect-error + let readinessManagerMock = { + isReady: jest.fn(() => false) // Fake the signal for the non ready SDK + } as IReadinessManager; + + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', {}, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', null, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', undefined, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', 'a label', 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'some_split', LabelConstants.DEFINITION_NOT_FOUND, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. + + expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the SDK was not ready yet. + expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the SDK was not ready yet. + + // Prepare the mock to fake that the SDK is ready now. + (readinessManagerMock.isReady as jest.Mock).mockImplementation(() => true); + + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', {}, 'other_method')).toBe(true); // Should return true if it receives a Split Object instead of null (when the object is not found, for manager). + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', 'a label', 'other_method')).toBe(true); // Should return true if it receives a Label and it is not split not found (when the Split was not found on the storage, for client). + + expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the values we used so far were considered valid. + expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the values we used so far were considered valid. + + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', null, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', undefined, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label + expect(validateDefinitionExistence(loggerMock, readinessManagerMock, 'other_split', LabelConstants.DEFINITION_NOT_FOUND, 'other_method')).toBe(false); // Should return false if it receives a label but it is the split not found one. + + expect(loggerMock.warn).toBeCalledTimes(3); // It should have logged 3 warnings, one per each time we called it + loggerMock.warn.mock.calls.forEach(call => expect(call).toEqual([WARN_NOT_EXISTENT_DEFINITION, ['other_method', 'other_split']])); // Warning logs should have the correct message. + + expect(loggerMock.error).not.toBeCalled(); // We log warnings, not errors. + }); +}); diff --git a/src/utils/inputValidation/__tests__/definitions.spec.ts b/src/utils/inputValidation/__tests__/definitions.spec.ts new file mode 100644 index 00000000..56197955 --- /dev/null +++ b/src/utils/inputValidation/__tests__/definitions.spec.ts @@ -0,0 +1,87 @@ +import uniq from 'lodash/uniq'; +import startsWith from 'lodash/startsWith'; + +// mocks sdkLogger +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; +import { ERROR_EMPTY_ARRAY } from '../../../logger/constants'; + +// mocks validateDefinition +jest.mock('../definition'); +import { validateDefinition } from '../definition'; +const validateDefinitionMock = validateDefinition as jest.Mock; +validateDefinitionMock.mockImplementation((_, maybeDefinition) => maybeDefinition); + +// test target +import { validateDefinitions } from '../definitions'; + +const invalidDefinitions = [ + [], + {}, + Object.create({}), + () => { }, + false, + true, + 5, + 'something', + NaN, + -Infinity, + new Promise(res => res), + Symbol('asd'), + null, + undefined, + NaN +]; + +describe('INPUT VALIDATION for definition names', () => { + + afterEach(() => { + loggerMock.mockClear(); + validateDefinitionMock.mockClear(); + }); + + test('Should return the provided array if it is a valid definition names array without logging any errors', () => { + const validArr = ['definitionName1', 'definition_name_2', 'definition-name-3']; + + expect(validateDefinitions(loggerMock, validArr, 'some_method')).toEqual(validArr); + expect(validateDefinitionMock).toBeCalledTimes(validArr.length); + expect(loggerMock.error).not.toBeCalled(); + + expect(loggerMock.warn).not.toBeCalled(); + }); + + test('Should return the provided array if it is a valid definition names array removing duplications, without logging any errors', () => { + const validArr = ['definition_name', 'definition_name', 'definition-name']; + + expect(validateDefinitions(loggerMock, validArr, 'some_method')).toEqual(uniq(validArr)); + expect(validateDefinitionMock).toBeCalledTimes(validArr.length); + expect(loggerMock.error).not.toBeCalled(); + + expect(loggerMock.warn).not.toBeCalled(); + }); + + test('Should return false and log an error for the array if it is invalid', () => { + for (let i = 0; i < invalidDefinitions.length; i++) { + expect(validateDefinitions(loggerMock, invalidDefinitions[i], 'test_method')).toBe(false); + expect(loggerMock.error).toBeCalledWith(ERROR_EMPTY_ARRAY, ['test_method', 'feature flag names']); + expect(validateDefinitionMock).not.toBeCalled(); + + loggerMock.error.mockClear(); + } + + expect(loggerMock.warn).not.toBeCalled(); + }); + + test('Should strip out any invalid value from the array', () => { + validateDefinitionMock.mockImplementation((_, value) => startsWith(value, 'invalid') ? false : value); + const myArr = ['valid_name', 'invalid_name', 'invalid_val_2', 'something_valid']; + + expect(validateDefinitions(loggerMock, myArr, 'test_method')).toEqual(['valid_name', 'something_valid']); + + for (let i = 0; i < myArr.length; i++) { + expect(validateDefinitionMock.mock.calls[i]).toEqual([loggerMock, myArr[i], 'test_method', 'feature flag name']); + } + + expect(loggerMock.error).not.toBeCalled(); + expect(loggerMock.warn).not.toBeCalled(); + }); +}); diff --git a/src/utils/inputValidation/__tests__/isOperational.spec.ts b/src/utils/inputValidation/__tests__/isOperational.spec.ts index 19c1373a..4b93096a 100644 --- a/src/utils/inputValidation/__tests__/isOperational.spec.ts +++ b/src/utils/inputValidation/__tests__/isOperational.spec.ts @@ -46,7 +46,7 @@ describe('validateIfReadyFromCache', () => { // @ts-ignore expect(validateIfReadyFromCache(loggerMock, readinessManagerMock, 'test_method')).toBe(false); // It should return true if SDK was ready. expect(readinessManagerMock.isReadyFromCache).toBeCalledTimes(1); // It checks for SDK_READY_FROM_CACHE status. - expect(loggerMock.warn).toBeCalledWith(CLIENT_NOT_READY_FROM_CACHE, ['test_method', '']); // It should log the expected warning. + expect(loggerMock.warn).toBeCalledWith(CLIENT_NOT_READY_FROM_CACHE, ['test_method']); // It should log the expected warning. expect(loggerMock.error).not.toBeCalled(); // But it should not log any errors. }); }); diff --git a/src/utils/inputValidation/__tests__/split.spec.ts b/src/utils/inputValidation/__tests__/split.spec.ts deleted file mode 100644 index 01bb381b..00000000 --- a/src/utils/inputValidation/__tests__/split.spec.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { ERROR_INVALID, ERROR_NULL, ERROR_EMPTY, WARN_TRIMMING } from '../../../logger/constants'; -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -import { validateSplit } from '../split'; - -const invalidSplits = [ - { split: [], msg: ERROR_INVALID }, - { split: () => { }, msg: ERROR_INVALID }, - { split: Object.create({}), msg: ERROR_INVALID }, - { split: {}, msg: ERROR_INVALID }, - { split: true, msg: ERROR_INVALID }, - { split: false, msg: ERROR_INVALID }, - { split: 10, msg: ERROR_INVALID }, - { split: 0, msg: ERROR_INVALID }, - { split: NaN, msg: ERROR_INVALID }, - { split: Infinity, msg: ERROR_INVALID }, - { split: null, msg: ERROR_NULL }, - { split: undefined, msg: ERROR_NULL }, - { split: new Promise(res => res), msg: ERROR_INVALID }, - { split: Symbol('asd'), msg: ERROR_INVALID }, - { split: '', msg: ERROR_EMPTY } -]; - -const trimmableSplits = [ - ' splitName ', - 'split_name2 \n ', - ' split_name3' -]; - -describe('INPUT VALIDATION for feature flag name', () => { - - afterEach(() => { loggerMock.mockClear(); }); - - test('Should return the provided feature flag name if it is a valid string without logging any errors', () => { - expect(validateSplit(loggerMock, 'splitName', 'some_method_splitName')).toBe('splitName'); // It should return the provided string if it is valid. - expect(loggerMock.error.mock.calls[0]).not.toEqual('some_method_splitName'); // Should not log any errors. - expect(validateSplit(loggerMock, 'split_name', 'some_method_splitName')).toBe('split_name'); // It should return the provided string if it is valid. - expect(loggerMock.error.mock.calls[0]).not.toEqual('some_method_splitName'); // Should not log any errors. - expect(validateSplit(loggerMock, 'A_split-name_29', 'some_method_splitName')).toBe('A_split-name_29'); // It should return the provided string if it is valid. - expect(loggerMock.error.mock.calls[0]).not.toEqual('some_method_splitName'); // Should not log any errors. - - expect(loggerMock.warn).not.toBeCalled(); // It should have not logged any warnings. - }); - - test('Should trim feature flag name if it is a valid string with trimmable spaces and log a warning (if those are enabled)', () => { - for (let i = 0; i < trimmableSplits.length; i++) { - const trimmableSplit = trimmableSplits[i]; - expect(validateSplit(loggerMock, trimmableSplit, 'some_method_splitName')).toBe(trimmableSplit.trim()); // It should return the trimmed version of the feature flag name received. - expect(loggerMock.warn).toBeCalledWith(WARN_TRIMMING, ['some_method_splitName', 'feature flag name', trimmableSplit]); // Should log a warning if those are enabled. - - loggerMock.warn.mockClear(); - } - - expect(loggerMock.error).not.toBeCalled(); // It should have not logged any errors. - }); - - test('Should return false and log error if feature flag name is not a valid string', () => { - for (let i = 0; i < invalidSplits.length; i++) { - const invalidValue = invalidSplits[i]['split']; - // @ts-ignore - const expectedLog = invalidSplits[i]['msg']; - - expect(validateSplit(loggerMock, invalidValue, 'test_method')).toBe(false); // Invalid event types should always return false. - expect(loggerMock.error).toBeCalledWith(expectedLog, ['test_method', 'feature flag name']); // Should log the error for the invalid event type. - - loggerMock.error.mockClear(); - } - - expect(loggerMock.warn).not.toBeCalled(); // It should have not logged any warnings. - }); -}); diff --git a/src/utils/inputValidation/__tests__/splitExistence.spec.ts b/src/utils/inputValidation/__tests__/splitExistence.spec.ts deleted file mode 100644 index 9d78df9e..00000000 --- a/src/utils/inputValidation/__tests__/splitExistence.spec.ts +++ /dev/null @@ -1,47 +0,0 @@ - -import * as LabelConstants from '../../labels'; - -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; - -import { validateSplitExistence } from '../splitExistence'; -import { IReadinessManager } from '../../../readiness/types'; -import { WARN_NOT_EXISTENT_SPLIT } from '../../../logger/constants'; - -describe('Split existence (special case)', () => { - - afterEach(() => { loggerMock.mockClear(); }); - - test('Should return a boolean indicating if the SDK was ready and there was no Split object or "definition not found" label', () => { - // @ts-expect-error - let readinessManagerMock = { - isReady: jest.fn(() => false) // Fake the signal for the non ready SDK - } as IReadinessManager; - - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', {}, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', null, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', undefined, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', 'a label', 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'some_split', LabelConstants.SPLIT_NOT_FOUND, 'test_method')).toBe(true); // Should always return true when the SDK is not ready. - - expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the SDK was not ready yet. - expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the SDK was not ready yet. - - // Prepare the mock to fake that the SDK is ready now. - (readinessManagerMock.isReady as jest.Mock).mockImplementation(() => true); - - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', {}, 'other_method')).toBe(true); // Should return true if it receives a Split Object instead of null (when the object is not found, for manager). - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', 'a label', 'other_method')).toBe(true); // Should return true if it receives a Label and it is not split not found (when the Split was not found on the storage, for client). - - expect(loggerMock.warn).not.toBeCalled(); // There should have been no warning logs since the values we used so far were considered valid. - expect(loggerMock.error).not.toBeCalled(); // There should have been no error logs since the values we used so far were considered valid. - - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', null, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', undefined, 'other_method')).toBe(false); // Should return false if it receives a non-truthy value as a split object or label - expect(validateSplitExistence(loggerMock, readinessManagerMock, 'other_split', LabelConstants.SPLIT_NOT_FOUND, 'other_method')).toBe(false); // Should return false if it receives a label but it is the split not found one. - - expect(loggerMock.warn).toBeCalledTimes(3); // It should have logged 3 warnings, one per each time we called it - loggerMock.warn.mock.calls.forEach(call => expect(call).toEqual([WARN_NOT_EXISTENT_SPLIT, ['other_method', 'other_split']])); // Warning logs should have the correct message. - - expect(loggerMock.error).not.toBeCalled(); // We log warnings, not errors. - }); -}); diff --git a/src/utils/inputValidation/__tests__/splits.spec.ts b/src/utils/inputValidation/__tests__/splits.spec.ts deleted file mode 100644 index 923b31b3..00000000 --- a/src/utils/inputValidation/__tests__/splits.spec.ts +++ /dev/null @@ -1,88 +0,0 @@ -import uniq from 'lodash/uniq'; -import startsWith from 'lodash/startsWith'; - -// mocks sdkLogger -import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; -import { ERROR_EMPTY_ARRAY } from '../../../logger/constants'; - -// mocks validateSplit -jest.mock('../split'); -import { validateSplit } from '../split'; -const validateSplitMock = validateSplit as jest.Mock; -validateSplitMock.mockImplementation((_, maybeSplit) => maybeSplit); - -// test target -import { validateSplits } from '../splits'; - -const invalidSplits = [ - [], - {}, - Object.create({}), - () => { }, - false, - true, - 5, - 'something', - NaN, - -Infinity, - new Promise(res => res), - Symbol('asd'), - null, - undefined, - NaN -]; - -describe('INPUT VALIDATION for feature flag names', () => { - - afterEach(() => { - loggerMock.mockClear(); - validateSplitMock.mockClear(); - }); - - test('Should return the provided array if it is a valid splits names array without logging any errors', () => { - const validArr = ['splitName1', 'split_name_2', 'split-name-3']; - - expect(validateSplits(loggerMock, validArr, 'some_method_splits')).toEqual(validArr); // It should return the provided array without changes if it is valid. - expect(validateSplitMock).toBeCalledTimes(validArr.length); // Should have validated each value independently. - expect(loggerMock.error).not.toBeCalled(); // Should not log any errors on the collection. - - expect(loggerMock.warn).not.toBeCalled(); // It should have not logged any warnings. - }); - - test('Should return the provided array if it is a valid splits names array removing duplications, without logging any errors', () => { - const validArr = ['split_name', 'split_name', 'split-name']; - - expect(validateSplits(loggerMock, validArr, 'some_method_splits')).toEqual(uniq(validArr)); // It should return the provided array without changes if it is valid. - expect(validateSplitMock).toBeCalledTimes(validArr.length); // Should have validated each value independently. - expect(loggerMock.error).not.toBeCalled(); // Should not log any errors on the collection. - - expect(loggerMock.warn).not.toBeCalled(); // It should have not logged any warnings. - }); - - test('Should return false and log an error for the array if it is invalid', () => { - for (let i = 0; i < invalidSplits.length; i++) { - expect(validateSplits(loggerMock, invalidSplits[i], 'test_method')).toBe(false); // It will return false as the array is of an incorrect type. - expect(loggerMock.error).toBeCalledWith(ERROR_EMPTY_ARRAY, ['test_method', 'feature flag names']); // Should log the error for the collection. - expect(validateSplitMock).not.toBeCalled(); // Should not try to validate any inner value if there is no valid array. - - loggerMock.error.mockClear(); - } - - expect(loggerMock.warn).not.toBeCalled(); // It should have not logged any warnings. - }); - - test('Should strip out any invalid value from the array', () => { - // We use a mock function for individual validation. - validateSplitMock.mockImplementation((_, value) => startsWith(value, 'invalid') ? false : value); - const myArr = ['valid_name', 'invalid_name', 'invalid_val_2', 'something_valid']; - - expect(validateSplits(loggerMock, myArr, 'test_method')).toEqual(['valid_name', 'something_valid']); // It will return the array without the invalid values. - - for (let i = 0; i < myArr.length; i++) { - expect(validateSplitMock.mock.calls[i]).toEqual([loggerMock, myArr[i], 'test_method', 'feature flag name']); // Should validate any inner value independently. - } - - expect(loggerMock.error).not.toBeCalled(); // Should not log any error for the collection. - expect(loggerMock.warn).not.toBeCalled(); // It should have not logged any warnings for the collection. - }); -}); diff --git a/src/utils/inputValidation/__tests__/target.spec.ts b/src/utils/inputValidation/__tests__/target.spec.ts new file mode 100644 index 00000000..cf09935a --- /dev/null +++ b/src/utils/inputValidation/__tests__/target.spec.ts @@ -0,0 +1,62 @@ +import { ERROR_NOT_PLAIN_OBJECT } from '../../../logger/constants'; +import { loggerMock } from '../../../logger/__tests__/sdkLogger.mock'; + +import { validateTarget } from '../target'; + +const invalidTargets = [ + [], + () => { }, + false, + true, + 5, + 'something', + NaN, + -Infinity, + new Promise(res => res), + Symbol('asd'), + null, + undefined, +]; + +describe('INPUT VALIDATION for Target', () => { + + afterEach(() => { loggerMock.mockClear(); }); + + test('Should return the validated target if it is a valid object with key and attributes', () => { + const validTarget = { key: 'validKey', attributes: { attr1: 'value1' } }; + + expect(validateTarget(loggerMock, validTarget, 'test_method')).toEqual(validTarget); + expect(loggerMock.error).not.toBeCalled(); + expect(loggerMock.warn).not.toBeCalled(); + }); + + test('Should return the validated target if attributes are undefined', () => { + const validTarget = { key: 'validKey' }; + + expect(validateTarget(loggerMock, validTarget, 'test_method')).toEqual({ key: 'validKey', attributes: undefined }); + expect(loggerMock.error).not.toBeCalled(); + }); + + test('Should return false and log error if target is not a plain object', () => { + for (let i = 0; i < invalidTargets.length; i++) { + expect(validateTarget(loggerMock, invalidTargets[i], 'test_method')).toBe(false); + expect(loggerMock.error).toBeCalledWith(ERROR_NOT_PLAIN_OBJECT, ['test_method', 'target']); + + loggerMock.error.mockClear(); + } + }); + + test('Should return false if key is invalid', () => { + expect(validateTarget(loggerMock, { key: '', attributes: { attr1: 'value1' } }, 'test_method')).toBe(false); + expect(validateTarget(loggerMock, { key: null }, 'test_method')).toBe(false); + expect(validateTarget(loggerMock, { key: true }, 'test_method')).toBe(false); + expect(loggerMock.error).toBeCalled(); + }); + + test('Should return false if attributes are invalid', () => { + expect(validateTarget(loggerMock, { key: 'validKey', attributes: 'invalid' }, 'test_method')).toBe(false); + expect(validateTarget(loggerMock, { key: 'validKey', attributes: true }, 'test_method')).toBe(false); + expect(validateTarget(loggerMock, { key: 'validKey', attributes: 5 }, 'test_method')).toBe(false); + expect(loggerMock.error).toBeCalled(); + }); +}); diff --git a/src/utils/inputValidation/definition.ts b/src/utils/inputValidation/definition.ts new file mode 100644 index 00000000..e6b9e29f --- /dev/null +++ b/src/utils/inputValidation/definition.ts @@ -0,0 +1,27 @@ +import { ERROR_NULL, ERROR_INVALID, WARN_TRIMMING, ERROR_EMPTY } from '../../logger/constants'; +import { ILogger } from '../../logger/types'; +import { isString } from '../lang'; + +// include BOM and nbsp +const TRIMMABLE_SPACES_REGEX = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/; + +export function validateDefinition(log: ILogger, maybeDefinition: any, method: string, item = 'feature flag name'): string | false { + if (maybeDefinition == undefined) { // eslint-disable-line eqeqeq + log.error(ERROR_NULL, [method, item]); + } else if (!isString(maybeDefinition)) { + log.error(ERROR_INVALID, [method, item]); + } else { + if (TRIMMABLE_SPACES_REGEX.test(maybeDefinition)) { + log.warn(WARN_TRIMMING, [method, item, maybeDefinition]); + maybeDefinition = maybeDefinition.trim(); + } + + if (maybeDefinition.length > 0) { + return maybeDefinition; + } else { + log.error(ERROR_EMPTY, [method, item]); + } + } + + return false; +} diff --git a/src/utils/inputValidation/definitionExistence.ts b/src/utils/inputValidation/definitionExistence.ts new file mode 100644 index 00000000..290d8b59 --- /dev/null +++ b/src/utils/inputValidation/definitionExistence.ts @@ -0,0 +1,19 @@ +import { FALLBACK_DEFINITION_NOT_FOUND, DEFINITION_NOT_FOUND } from '../labels'; +import { IReadinessManager } from '../../readiness/types'; +import { ILogger } from '../../logger/types'; +import { WARN_NOT_EXISTENT_DEFINITION } from '../../logger/constants'; + +/** + * This is defined here and in this format mostly because of the logger and the fact that it's considered a validation at product level. + * But it's not going to run on the input validation layer. In any case, the most compelling reason to use it as we do is to avoid going to Redis and get a definition twice. + */ +export function validateDefinitionExistence(log: ILogger, readinessManager: IReadinessManager, definitionName: string, labelOrDefinitionObj: any, method: string): boolean { + if (readinessManager.isReady()) { // Only if it's ready (synced with BE) we validate this, otherwise it may just be that the SDK is still syncing + if (labelOrDefinitionObj === DEFINITION_NOT_FOUND || labelOrDefinitionObj === FALLBACK_DEFINITION_NOT_FOUND || labelOrDefinitionObj == null) { + log.warn(WARN_NOT_EXISTENT_DEFINITION, [method, definitionName]); + return false; + } + } + + return true; +} diff --git a/src/utils/inputValidation/definitions.ts b/src/utils/inputValidation/definitions.ts new file mode 100644 index 00000000..c008ed87 --- /dev/null +++ b/src/utils/inputValidation/definitions.ts @@ -0,0 +1,21 @@ +import { ERROR_EMPTY_ARRAY } from '../../logger/constants'; +import { ILogger } from '../../logger/types'; +import { uniq } from '../lang'; +import { validateDefinition } from './definition'; + +export function validateDefinitions(log: ILogger, maybeDefinitions: any, method: string, listName = 'feature flag names', item = 'feature flag name'): string[] | false { + if (Array.isArray(maybeDefinitions) && maybeDefinitions.length > 0) { + let validatedArray: string[] = []; + // Remove invalid values + maybeDefinitions.forEach(maybeDefinition => { + const definitionName = validateDefinition(log, maybeDefinition, method, item); + if (definitionName) validatedArray.push(definitionName); + }); + + // Strip off duplicated values if we have valid definition names then return + if (validatedArray.length) return uniq(validatedArray); + } + + log.error(ERROR_EMPTY_ARRAY, [method, listName]); + return false; +} diff --git a/src/utils/inputValidation/index.ts b/src/utils/inputValidation/index.ts index f6e06c5e..7e6752ce 100644 --- a/src/utils/inputValidation/index.ts +++ b/src/utils/inputValidation/index.ts @@ -4,10 +4,10 @@ export { validateEvent } from './event'; export { validateEventValue } from './eventValue'; export { validateEventProperties } from './eventProperties'; export { validateKey } from './key'; -export { validateSplit } from './split'; -export { validateSplits } from './splits'; +export { validateDefinition } from './definition'; +export { validateDefinitions } from './definitions'; export { validateTrafficType } from './trafficType'; export { validateIfNotDestroyed, validateIfReadyFromCache, validateIfOperational } from './isOperational'; -export { validateSplitExistence } from './splitExistence'; +export { validateDefinitionExistence } from './definitionExistence'; export { validateTrafficTypeExistence } from './trafficTypeExistence'; export { validateEvaluationOptions } from './eventProperties'; diff --git a/src/utils/inputValidation/isOperational.ts b/src/utils/inputValidation/isOperational.ts index 5f122926..5fbec731 100644 --- a/src/utils/inputValidation/isOperational.ts +++ b/src/utils/inputValidation/isOperational.ts @@ -9,14 +9,14 @@ export function validateIfNotDestroyed(log: ILogger, readinessManager: IReadines return false; } -export function validateIfReadyFromCache(log: ILogger, readinessManager: IReadinessManager, method: string, featureFlagNameOrNames?: string | string[] | false) { +export function validateIfReadyFromCache(log: ILogger, readinessManager: IReadinessManager, method: string) { if (readinessManager.isReadyFromCache()) return true; - log.warn(CLIENT_NOT_READY_FROM_CACHE, [method, featureFlagNameOrNames ? ` for feature flag ${featureFlagNameOrNames.toString()}` : '']); + log.warn(CLIENT_NOT_READY_FROM_CACHE, [method]); return false; } // Operational means that the SDK is ready to evaluate (not destroyed and ready from cache) -export function validateIfOperational(log: ILogger, readinessManager: IReadinessManager, method: string, featureFlagNameOrNames?: string | string[] | false) { - return validateIfNotDestroyed(log, readinessManager, method) && validateIfReadyFromCache(log, readinessManager, method, featureFlagNameOrNames); +export function validateIfOperational(log: ILogger, readinessManager: IReadinessManager, method: string) { + return validateIfNotDestroyed(log, readinessManager, method) && validateIfReadyFromCache(log, readinessManager, method); } diff --git a/src/utils/inputValidation/split.ts b/src/utils/inputValidation/split.ts deleted file mode 100644 index d0e659e0..00000000 --- a/src/utils/inputValidation/split.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { ERROR_NULL, ERROR_INVALID, WARN_TRIMMING, ERROR_EMPTY } from '../../logger/constants'; -import { ILogger } from '../../logger/types'; -import { isString } from '../lang'; - -// include BOM and nbsp -const TRIMMABLE_SPACES_REGEX = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/; - -export function validateSplit(log: ILogger, maybeSplit: any, method: string, item = 'feature flag name'): string | false { - if (maybeSplit == undefined) { // eslint-disable-line eqeqeq - log.error(ERROR_NULL, [method, item]); - } else if (!isString(maybeSplit)) { - log.error(ERROR_INVALID, [method, item]); - } else { - if (TRIMMABLE_SPACES_REGEX.test(maybeSplit)) { - log.warn(WARN_TRIMMING, [method, item, maybeSplit]); - maybeSplit = maybeSplit.trim(); - } - - if (maybeSplit.length > 0) { - return maybeSplit; - } else { - log.error(ERROR_EMPTY, [method, item]); - } - } - - return false; -} diff --git a/src/utils/inputValidation/splitExistence.ts b/src/utils/inputValidation/splitExistence.ts deleted file mode 100644 index c8559b2a..00000000 --- a/src/utils/inputValidation/splitExistence.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { FALLBACK_SPLIT_NOT_FOUND, SPLIT_NOT_FOUND } from '../labels'; -import { IReadinessManager } from '../../readiness/types'; -import { ILogger } from '../../logger/types'; -import { WARN_NOT_EXISTENT_SPLIT } from '../../logger/constants'; - -/** - * This is defined here and in this format mostly because of the logger and the fact that it's considered a validation at product level. - * But it's not going to run on the input validation layer. In any case, the most compelling reason to use it as we do is to avoid going to Redis and get a split twice. - */ -export function validateSplitExistence(log: ILogger, readinessManager: IReadinessManager, splitName: string, labelOrSplitObj: any, method: string): boolean { - if (readinessManager.isReady()) { // Only if it's ready (synced with BE) we validate this, otherwise it may just be that the SDK is still syncing - if (labelOrSplitObj === SPLIT_NOT_FOUND || labelOrSplitObj === FALLBACK_SPLIT_NOT_FOUND || labelOrSplitObj == null) { - log.warn(WARN_NOT_EXISTENT_SPLIT, [method, splitName]); - return false; - } - } - - return true; -} diff --git a/src/utils/inputValidation/splits.ts b/src/utils/inputValidation/splits.ts deleted file mode 100644 index d23e7bd1..00000000 --- a/src/utils/inputValidation/splits.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { ERROR_EMPTY_ARRAY } from '../../logger/constants'; -import { ILogger } from '../../logger/types'; -import { uniq } from '../lang'; -import { validateSplit } from './split'; - -export function validateSplits(log: ILogger, maybeSplits: any, method: string, listName = 'feature flag names', item = 'feature flag name'): string[] | false { - if (Array.isArray(maybeSplits) && maybeSplits.length > 0) { - let validatedArray: string[] = []; - // Remove invalid values - maybeSplits.forEach(maybeSplit => { - const splitName = validateSplit(log, maybeSplit, method, item); - if (splitName) validatedArray.push(splitName); - }); - - // Strip off duplicated values if we have valid split names then return - if (validatedArray.length) return uniq(validatedArray); - } - - log.error(ERROR_EMPTY_ARRAY, [method, listName]); - return false; -} diff --git a/src/utils/inputValidation/target.ts b/src/utils/inputValidation/target.ts new file mode 100644 index 00000000..393e65d7 --- /dev/null +++ b/src/utils/inputValidation/target.ts @@ -0,0 +1,21 @@ +import { isObject } from '../lang'; +import SplitIO from '../../../types/splitio'; +import { ILogger } from '../../logger/types'; +import { validateKey } from './key'; +import { validateAttributes } from './attributes'; +import { ERROR_NOT_PLAIN_OBJECT } from '../../logger/constants'; + +export function validateTarget(log: ILogger, maybeTarget: any, method: string): SplitIO.Target | false { + if (!isObject(maybeTarget)) { + log.error(ERROR_NOT_PLAIN_OBJECT, [method, 'target']); + return false; + } + + const key = validateKey(log, maybeTarget.key, method); + if (key === false) return false; + + const attributes = validateAttributes(log, maybeTarget.attributes, method); + if (attributes === false) return false; + + return { ...maybeTarget, key, attributes }; +} diff --git a/src/utils/labels/index.ts b/src/utils/labels/index.ts index 78117a1d..5eccf3a6 100644 --- a/src/utils/labels/index.ts +++ b/src/utils/labels/index.ts @@ -2,11 +2,11 @@ import { FALLBACK_PREFIX } from '../../evaluator/fallbackTreatmentsCalculator'; export const SPLIT_KILLED = 'killed'; export const NO_CONDITION_MATCH = 'default rule'; -export const SPLIT_NOT_FOUND = 'definition not found'; +export const DEFINITION_NOT_FOUND = 'definition not found'; export const SDK_NOT_READY = 'not ready'; export const EXCEPTION = 'exception'; export const SPLIT_ARCHIVED = 'archived'; export const NOT_IN_SPLIT = 'not in split'; export const UNSUPPORTED_MATCHER_TYPE = 'targeting rule type unsupported by sdk'; export const PREREQUISITES_NOT_MET = 'prerequisites not met'; -export const FALLBACK_SPLIT_NOT_FOUND = FALLBACK_PREFIX + SPLIT_NOT_FOUND; +export const FALLBACK_DEFINITION_NOT_FOUND = FALLBACK_PREFIX + DEFINITION_NOT_FOUND; diff --git a/src/utils/settingsValidation/__tests__/settings.mocks.ts b/src/utils/settingsValidation/__tests__/settings.mocks.ts index f850f0bf..7bce6249 100644 --- a/src/utils/settingsValidation/__tests__/settings.mocks.ts +++ b/src/utils/settingsValidation/__tests__/settings.mocks.ts @@ -76,11 +76,11 @@ export const fullSettings: ISettings = { hostname: false }, urls: { - events: 'events', - sdk: 'sdk', - auth: 'auth', - streaming: 'streaming', - telemetry: 'telemetry' + sdk: 'https://sdk.split.io/api', + events: 'https://events.split.io/api', + auth: 'https://auth.split.io/api', + streaming: 'https://streaming.split.io', + telemetry: 'https://telemetry.split.io/api' }, log: loggerMock, userConsent: undefined diff --git a/src/utils/settingsValidation/splitFilters.ts b/src/utils/settingsValidation/splitFilters.ts index 455d3ee1..facd81a3 100644 --- a/src/utils/settingsValidation/splitFilters.ts +++ b/src/utils/settingsValidation/splitFilters.ts @@ -1,4 +1,4 @@ -import { validateSplits } from '../inputValidation/splits'; +import { validateDefinitions } from '../inputValidation/definitions'; import { ISplitFiltersValidation } from '../../dtos/types'; import SplitIO from '../../../types/splitio'; import { ILogger } from '../../logger/types'; @@ -49,7 +49,7 @@ function validateFilterType(maybeFilterType: any): maybeFilterType is SplitIO.Sp */ function validateSplitFilter(log: ILogger, type: SplitIO.SplitFilterType, values: string[], maxLength: number) { // validate and remove invalid and duplicated values - let result = validateSplits(log, values, LOG_PREFIX_SETTINGS, `${type} filter`, `${type} filter value`); + let result = validateDefinitions(log, values, LOG_PREFIX_SETTINGS, `${type} filter`, `${type} filter value`); if (result) { @@ -181,7 +181,7 @@ export function validateSplitFilters(log: ILogger, maybeSplitFilters: any, mode: } export function validateFlagSets(log: ILogger, method: string, flagSets: string[], flagSetsInConfig: string[]): string[] | false { - const sets = validateSplits(log, flagSets, method, 'flag sets', 'flag set'); + const sets = validateDefinitions(log, flagSets, method, 'flag sets', 'flag set'); let toReturn = sets ? sanitizeFlagSets(log, sets, method) : []; if (flagSetsInConfig.length > 0) { toReturn = toReturn.filter(flagSet => { diff --git a/tsconfig.json b/tsconfig.json index 20433c1a..028de601 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,7 +5,7 @@ "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ "module": "es2015", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ // "lib": [], /* Specify library files to be included in the compilation. */ - // "allowJs": true, /* Allow javascript files to be compiled. */ + "allowJs": true, /* Allow javascript files to be compiled. */ // "checkJs": true, /* Report errors in .js files. */ // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ "declaration": false, /* Generates corresponding '.d.ts' file. */ diff --git a/types/splitio.d.ts b/types/splitio.d.ts index b8753566..30149a84 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -73,10 +73,12 @@ interface ISharedSettings { * const factory = SplitFactory({ * ... * sync: { - * getHeaderOverrides: (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' + * requestOptions: { + * getHeaderOverrides: (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; * }; * } * } @@ -509,7 +511,7 @@ declare namespace SplitIO { /** * Metadata type for SDK update events. */ - type SdkUpdateMetadataType = 'FLAGS_UPDATE' | 'SEGMENTS_UPDATE'; + type SdkUpdateMetadataType = 'CONFIGS_UPDATE' | 'FLAGS_UPDATE' | 'SEGMENTS_UPDATE'; /** * Metadata for the ready events emitted when the SDK is ready to evaluate feature flags. @@ -1590,10 +1592,12 @@ declare namespace SplitIO { * const factory = SplitFactory({ * ... * sync: { - * getHeaderOverrides: (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' + * requestOptions: { + * getHeaderOverrides: (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; * }; * } * } @@ -2284,4 +2288,180 @@ declare namespace SplitIO { */ split(featureFlagName: string): SplitViewAsync; } + + /** + * Fallback configuration objects returned by the `client.getConfig` method when the SDK is not ready or the provided config name is not found. + */ + type FallbackConfigs = { + /** + * Fallback config for all config names. + */ + global?: Config; + /** + * Fallback configs for specific config names. It takes precedence over the global fallback config. + */ + byName?: { + [configName: string]: Config; + }; + } + + /** + * Configs SDK settings. + */ + interface ConfigsClientSettings { + /** + * Your SDK key. + * + * @see {@link https://developer.harness.io/docs/feature-management-experimentation/management-and-administration/account-settings/api-keys/} + */ + authorizationKey: string; + /** + * Configs definitions refresh rate for polling, in seconds. + * + * @defaultValue `60` + */ + configsRefreshRate?: number; + /** + * Logging level. + * + * @defaultValue `'NONE'` + */ + logLevel?: LogLevel; + /** + * Time in seconds until SDK ready timeout is emitted. + * + * @defaultValue `10` + */ + timeout?: number; + /** + * Custom endpoints to replace the default ones used by the SDK. + */ + urls?: Pick; + /** + * Fallback configuration objects returned by the `client.getConfig` method when the SDK is not ready or the provided config name is not found. + */ + fallbackConfigs?: FallbackConfigs; + /** + * Custom options object for HTTP(S) requests. + * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. + */ + requestOptions?: { + /** + * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. + * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. + * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` + * Or provide keys with different cases since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` + * + * @defaultValue `undefined` + * + * @param context - The context for the request, which contains the `headers` property object representing the current headers in the request. + * @returns An object representing a set of headers to be merged with the current headers. + * + * @example + * ``` + * const client = ConfigsClient({ + * ... + * requestOptions: { + * getHeaderOverrides: (context) => { + * return { + * 'Authorization': context.headers['Authorization'] + ', other-value', + * 'custom-header': 'custom-value' + * }; + * } + * } + * }); + * ``` + */ + getHeaderOverrides?: (context: { headers: Record; }) => Record; + /** + * Custom Node.js HTTP(S) Agent used by the SDK for HTTP(S) requests. + * + * You can use it, for example, for certificate pinning or setting a network proxy: + * + * ``` + * const { HttpsProxyAgent } = require('https-proxy-agent'); + * + * const proxyAgent = new HttpsProxyAgent(process.env.HTTPS_PROXY || 'http://10.10.1.10:1080'); + * + * const client = ConfigsClient({ + * ... + * requestOptions: { + * agent: proxyAgent + * } + * }) + * ``` + * + * @see {@link https://nodejs.org/api/https.html#class-httpsagent} + * + * @defaultValue `undefined` + */ + agent?: RequestOptions['agent']; + }; + } + + /** + * Target for a config evaluation. + */ + interface Target { + /** + * The key of the target. + */ + key: SplitKey; + /** + * The attributes of the target. + * + * @defaultValue `undefined` + */ + attributes?: Attributes; + } + + type JsonValue = string | number | boolean | null | JsonObject | JsonArray; + type JsonArray = JsonValue[]; + type JsonObject = { [key: string]: JsonValue; }; + + /** + * Config object returned by getConfig. + */ + type Config = { + /** + * The name of the variant. + */ + variant: string; + /** + * The config value, a raw JSON object. + */ + value: JsonObject; + } + + /** + * Configs SDK client interface. + */ + interface ConfigsClient extends IStatusInterface { + /** + * Destroys the client. + * + * @returns A promise that resolves once all clients are destroyed. + */ + destroy(): Promise; + /** + * Gets the config object for a given config name and optional target. If no target is provided, the default variant of the config is returned. + * + * @param configName - The name of the config we want to get. + * @param target - The target of the config evaluation. + * @param options - An object of type EvaluationOptions for advanced evaluation options. + * @returns The config object. + */ + getConfig(configName: string, target?: Target, options?: EvaluationOptions): Config; + /** + * Tracks an event to be fed to the results product on Harness FME user interface. + * + * @param trafficKey - The key that identifies the entity related to this event. + * @param trafficType - The traffic type of the entity related to this event. See {@link https://developer.harness.io/docs/feature-management-experimentation/management-and-administration/fme-settings/traffic-types/} + * @param eventType - The event type corresponding to this event. + * @param value - The value of this event. + * @param properties - The properties of this event. Values can be string, number, boolean or null. + * @returns Whether the event was added to the queue successfully or not. + */ + track(trafficKey: SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean; + } }