diff --git a/.eslintrc b/.eslintrc index 1d7f59384..b410eddeb 100644 --- a/.eslintrc +++ b/.eslintrc @@ -46,7 +46,7 @@ ], "rules": { "no-restricted-syntax": ["error", "ForOfStatement", "ForInStatement", "ArrayPattern"], - "compat/compat": ["error", "defaults, ie 10, node 6"], + "compat/compat": ["error", "defaults, node >=14"], "no-throw-literal": "error", "import/no-default-export": "error", "import/no-self-import": "error" diff --git a/.gitignore b/.gitignore index 376800e23..f26a161f7 100644 --- a/.gitignore +++ b/.gitignore @@ -19,8 +19,8 @@ dump.rdb /stats ## transpiled code -/lib -/es +/cjs +/esm /umd ## TS tests compilated files diff --git a/CHANGES.txt b/CHANGES.txt index d609813a6..4dfe7bf8d 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,15 @@ +11.0.0 (November 1, 2024) + - Added support for targeting rules based on large segments for browsers (client-side API). + - Added `factory.destroy()` method, which invokes the `destroy` method of all clients created by the factory. + - Updated @splitsoftware/splitio-commons package to version 2.0.0 that includes major updates and updated some transitive dependencies for vulnerability fixes. + - Renamed distribution folders from `/lib` to `/cjs` for CommonJS build, and `/es` to `/esm` for ECMAScript Modules build. + - BREAKING CHANGES: + - Dropped support for Split Proxy below version 5.9.0, when using in the browser (client-side API). The SDK now requires Split Proxy 5.9.0 or above. + - Dropped support for NodeJS v6. The SDK now requires NodeJS v14 or above. + - Removed internal ponyfills for the `Map` and `Set` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or provide a polyfill. + - Removed the deprecated `GOOGLE_ANALYTICS_TO_SPLIT` and `SPLIT_TO_GOOGLE_ANALYTICS` integrations. The `integrations` configuration option has been removed from the SDK factory configuration, along with the associated interfaces in the TypeScript definitions. + - Removed the `core.trafficType` configuration option (`SplitIO.IBrowserSettings['core']['trafficType]`) and the `trafficType` parameter from the SDK `client()` method in Browser (`SplitIO.IBrowserSDK['client']`). As a result, traffic types can no longer be bound to SDK clients, and the traffic type must be provided in the `track` method. + 10.28.0 (September 6, 2024) - Updated @splitsoftware/splitio-commons package to version 1.17.0 that includes minor updates: - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. @@ -507,7 +519,7 @@ - Bugfixing - Return correct label when consulted Split is not found. 9.1.1 (May 03, 2017) - - Bugfixing - Fixed invalid behaviour when using native Fetch API and comparing statusText + - Bugfixing - Fixed invalid behavior when using native Fetch API and comparing statusText instead of resp.ok 9.1.0 (April 21, 2017) diff --git a/client/package.json b/client/package.json index d1f660668..f99b69ceb 100644 --- a/client/package.json +++ b/client/package.json @@ -1,5 +1,5 @@ { - "main": "../lib/factory/browser.js", - "module": "../es/factory/browser.js", + "main": "../cjs/factory/browser.js", + "module": "../esm/factory/browser.js", "types": "../types/client/index.d.ts" } diff --git a/karma/e2e.gaIntegration.karma.conf.js b/karma/e2e.gaIntegration.karma.conf.js deleted file mode 100644 index f6b03110f..000000000 --- a/karma/e2e.gaIntegration.karma.conf.js +++ /dev/null @@ -1,20 +0,0 @@ -const assign = require('lodash/assign'); - -module.exports = function(config) { - 'use strict'; - - config.set(assign({}, require('./config'), { - // list of files / patterns to load in the browser - files: [ - '__tests__/gaIntegration/browser.spec.js' - ], - // prepare code for the browser using webpack - preprocessors: { - '__tests__/gaIntegration/browser.spec.js': ['webpack'] - }, - - // level of logging - // possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG - logLevel: config.LOG_WARN - })); -}; diff --git a/package-lock.json b/package-lock.json index 7c9c69de8..d5457999a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,17 +1,15 @@ { "name": "@splitsoftware/splitio", - "version": "10.28.0", + "version": "11.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio", - "version": "10.28.0", + "version": "11.0.0", "license": "Apache-2.0", "dependencies": { - "@splitsoftware/splitio-commons": "1.17.0", - "@types/google.analytics": "0.0.40", - "@types/ioredis": "^4.28.0", + "@splitsoftware/splitio-commons": "2.0.0", "bloom-filters": "^3.0.0", "ioredis": "^4.28.0", "js-yaml": "^3.13.1", @@ -53,8 +51,7 @@ "webpack-merge": "^5.8.0" }, "engines": { - "node": ">=6", - "npm": ">=3" + "node": ">=14.0.0" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -872,10 +869,11 @@ "dev": true }, "node_modules/@splitsoftware/splitio-commons": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-1.17.0.tgz", - "integrity": "sha512-rvP+0LGUN92bcTytiqyVxq9UzBG5kTkIYjU7b7AU2awBUYgM0bqT3xhQ9/MJ/2fsBbqC6QIsxoKDOz9pMgbAQw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-2.0.0.tgz", + "integrity": "sha512-Sz4+vFacl29xw3451z9IUgB4zBFKUWZdCnmOB0DDXA803YKPqjXphdAwN6nV+1vsX9pXV/OS6UaNC4oUICa6PA==", "dependencies": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" }, "peerDependencies": { @@ -932,11 +930,6 @@ "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", "dev": true }, - "node_modules/@types/google.analytics": { - "version": "0.0.40", - "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", - "integrity": "sha512-R3HpnLkqmKxhUAf8kIVvDVGJqPtaaZlW4yowNwjOZUTmYUQEgHh8Nh5wkSXKMroNAuQM8gbXJHmNbbgA8tdb7Q==" - }, "node_modules/@types/ioredis": { "version": "4.28.10", "resolved": "https://registry.npmjs.org/@types/ioredis/-/ioredis-4.28.10.tgz", @@ -1357,15 +1350,14 @@ } }, "node_modules/asn1.js": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", - "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", + "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", "dev": true, "dependencies": { "bn.js": "^4.0.0", "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0", - "safer-buffer": "^2.1.0" + "minimalistic-assert": "^1.0.0" } }, "node_modules/asn1.js/node_modules/bn.js": { @@ -1695,25 +1687,75 @@ } }, "node_modules/browserify-sign": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.2.tgz", - "integrity": "sha512-1rudGyeYY42Dk6texmv7c4VcQ0EsvVbLwZkA+AQB7SxvXxmcD93jcHie8bzecJ+ChDlmAm2Qyu0+Ccg5uhZXCg==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.3.tgz", + "integrity": "sha512-JWCZW6SKhfhjJxO8Tyiiy+XYB7cqd2S5/+WeYHsKdNKFlCBhKbblba1A/HN/90YwtxKc8tCErjffZl++UNmGiw==", "dev": true, "dependencies": { "bn.js": "^5.2.1", "browserify-rsa": "^4.1.0", "create-hash": "^1.2.0", "create-hmac": "^1.1.7", - "elliptic": "^6.5.4", + "elliptic": "^6.5.5", + "hash-base": "~3.0", "inherits": "^2.0.4", - "parse-asn1": "^5.1.6", - "readable-stream": "^3.6.2", + "parse-asn1": "^5.1.7", + "readable-stream": "^2.3.8", "safe-buffer": "^5.2.1" }, "engines": { - "node": ">= 4" + "node": ">= 0.12" + } + }, + "node_modules/browserify-sign/node_modules/hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", + "dev": true, + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/browserify-sign/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" } }, + "node_modules/browserify-sign/node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/browserify-sign/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/browserify-sign/node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, "node_modules/browserify-zlib": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", @@ -4473,9 +4515,9 @@ "dev": true }, "node_modules/karma": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.1.tgz", - "integrity": "sha512-Cj57NKOskK7wtFWSlMvZf459iX+kpYIPXmkNUzP2WAFcA7nhr/ALn5R7sw3w+1udFDcpMx/tuB8d5amgm3ijaA==", + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.4.tgz", + "integrity": "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w==", "dev": true, "dependencies": { "@colors/colors": "1.5.0", @@ -4497,7 +4539,7 @@ "qjobs": "^1.2.0", "range-parser": "^1.2.1", "rimraf": "^3.0.2", - "socket.io": "^4.4.1", + "socket.io": "^4.7.2", "source-map": "^0.6.1", "tmp": "^0.2.1", "ua-parser-js": "^0.7.30", @@ -4951,9 +4993,9 @@ "dev": true }, "node_modules/nise/node_modules/path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", "dev": true, "dependencies": { "isarray": "0.0.1" @@ -5284,16 +5326,33 @@ } }, "node_modules/parse-asn1": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", - "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.7.tgz", + "integrity": "sha512-CTM5kuWR3sx9IFamcl5ErfPl6ea/N8IYwiJ+vpeB2g+1iknv7zBl5uPwbMbRVznRVbrNY6lGuDoE5b30grmbqg==", "dev": true, "dependencies": { - "asn1.js": "^5.2.0", - "browserify-aes": "^1.0.0", - "evp_bytestokey": "^1.0.0", - "pbkdf2": "^3.0.3", - "safe-buffer": "^5.1.1" + "asn1.js": "^4.10.1", + "browserify-aes": "^1.2.0", + "evp_bytestokey": "^1.0.3", + "hash-base": "~3.0", + "pbkdf2": "^3.1.2", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/parse-asn1/node_modules/hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", + "dev": true, + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": ">=4" } }, "node_modules/parse-ms": { @@ -8528,10 +8587,11 @@ "dev": true }, "@splitsoftware/splitio-commons": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-1.17.0.tgz", - "integrity": "sha512-rvP+0LGUN92bcTytiqyVxq9UzBG5kTkIYjU7b7AU2awBUYgM0bqT3xhQ9/MJ/2fsBbqC6QIsxoKDOz9pMgbAQw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-2.0.0.tgz", + "integrity": "sha512-Sz4+vFacl29xw3451z9IUgB4zBFKUWZdCnmOB0DDXA803YKPqjXphdAwN6nV+1vsX9pXV/OS6UaNC4oUICa6PA==", "requires": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" } }, @@ -8580,11 +8640,6 @@ "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", "dev": true }, - "@types/google.analytics": { - "version": "0.0.40", - "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", - "integrity": "sha512-R3HpnLkqmKxhUAf8kIVvDVGJqPtaaZlW4yowNwjOZUTmYUQEgHh8Nh5wkSXKMroNAuQM8gbXJHmNbbgA8tdb7Q==" - }, "@types/ioredis": { "version": "4.28.10", "resolved": "https://registry.npmjs.org/@types/ioredis/-/ioredis-4.28.10.tgz", @@ -8943,15 +8998,14 @@ } }, "asn1.js": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", - "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", + "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", "dev": true, "requires": { "bn.js": "^4.0.0", "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0", - "safer-buffer": "^2.1.0" + "minimalistic-assert": "^1.0.0" }, "dependencies": { "bn.js": { @@ -9234,20 +9288,73 @@ } }, "browserify-sign": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.2.tgz", - "integrity": "sha512-1rudGyeYY42Dk6texmv7c4VcQ0EsvVbLwZkA+AQB7SxvXxmcD93jcHie8bzecJ+ChDlmAm2Qyu0+Ccg5uhZXCg==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.3.tgz", + "integrity": "sha512-JWCZW6SKhfhjJxO8Tyiiy+XYB7cqd2S5/+WeYHsKdNKFlCBhKbblba1A/HN/90YwtxKc8tCErjffZl++UNmGiw==", "dev": true, "requires": { "bn.js": "^5.2.1", "browserify-rsa": "^4.1.0", "create-hash": "^1.2.0", "create-hmac": "^1.1.7", - "elliptic": "^6.5.4", + "elliptic": "^6.5.5", + "hash-base": "~3.0", "inherits": "^2.0.4", - "parse-asn1": "^5.1.6", - "readable-stream": "^3.6.2", + "parse-asn1": "^5.1.7", + "readable-stream": "^2.3.8", "safe-buffer": "^5.2.1" + }, + "dependencies": { + "hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + } + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + } + } + } } }, "browserify-zlib": { @@ -11305,9 +11412,9 @@ "dev": true }, "karma": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.1.tgz", - "integrity": "sha512-Cj57NKOskK7wtFWSlMvZf459iX+kpYIPXmkNUzP2WAFcA7nhr/ALn5R7sw3w+1udFDcpMx/tuB8d5amgm3ijaA==", + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.4.tgz", + "integrity": "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w==", "dev": true, "requires": { "@colors/colors": "1.5.0", @@ -11329,7 +11436,7 @@ "qjobs": "^1.2.0", "range-parser": "^1.2.1", "rimraf": "^3.0.2", - "socket.io": "^4.4.1", + "socket.io": "^4.7.2", "source-map": "^0.6.1", "tmp": "^0.2.1", "ua-parser-js": "^0.7.30", @@ -11715,9 +11822,9 @@ "dev": true }, "path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", "dev": true, "requires": { "isarray": "0.0.1" @@ -11977,16 +12084,29 @@ } }, "parse-asn1": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", - "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.7.tgz", + "integrity": "sha512-CTM5kuWR3sx9IFamcl5ErfPl6ea/N8IYwiJ+vpeB2g+1iknv7zBl5uPwbMbRVznRVbrNY6lGuDoE5b30grmbqg==", "dev": true, "requires": { - "asn1.js": "^5.2.0", - "browserify-aes": "^1.0.0", - "evp_bytestokey": "^1.0.0", - "pbkdf2": "^3.0.3", - "safe-buffer": "^5.1.1" + "asn1.js": "^4.10.1", + "browserify-aes": "^1.2.0", + "evp_bytestokey": "^1.0.3", + "hash-base": "~3.0", + "pbkdf2": "^3.1.2", + "safe-buffer": "^5.2.1" + }, + "dependencies": { + "hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + } } }, "parse-ms": { diff --git a/package.json b/package.json index ed966a4f2..f6083e0b2 100644 --- a/package.json +++ b/package.json @@ -1,17 +1,16 @@ { "name": "@splitsoftware/splitio", - "version": "10.28.0", + "version": "11.0.0", "description": "Split SDK", "files": [ "README.md", "CONTRIBUTORS-GUIDE.md", "LICENSE", "CHANGES.txt", - "lib", + "cjs", "types", - "es", + "esm", "src", - "scripts/ga-to-split-autorequire.js", "client", "server" ], @@ -32,17 +31,14 @@ "sdk", "javascript" ], - "main": "lib/index.js", - "module": "es/index.js", + "main": "cjs/index.js", + "module": "esm/index.js", "types": "types", "engines": { - "npm": ">=3", - "node": ">=6" + "node": ">=14.0.0" }, "dependencies": { - "@splitsoftware/splitio-commons": "1.17.0", - "@types/google.analytics": "0.0.40", - "@types/ioredis": "^4.28.0", + "@splitsoftware/splitio-commons": "2.0.0", "bloom-filters": "^3.0.0", "ioredis": "^4.28.0", "js-yaml": "^3.13.1", @@ -84,14 +80,13 @@ "webpack-merge": "^5.8.0" }, "scripts": { - "build-esm": "rimraf es && tsc -outDir es", - "postbuild-esm": "cross-env NODE_ENV=es node scripts/copy.packages.json.js && ./scripts/build_esm_replace_imports.sh", + "build-esm": "rimraf esm && tsc -outDir esm", + "postbuild-esm": "cross-env NODE_ENV=esm node scripts/copy.packages.json.js && ./scripts/build_esm_replace_imports.sh", "build-umd:stats": "webpack --progress --env production --json > ./stats/stat_results.json", - "build-cjs": "rimraf lib && tsc -outDir lib -m CommonJS", + "build-cjs": "rimraf cjs && tsc -outDir cjs -m CommonJS", "postbuild-cjs": "cross-env NODE_ENV=cjs node scripts/copy.packages.json.js && ./scripts/build_cjs_replace_imports.sh", "build-umd": "rimraf umd && webpack --config webpack.dev.js --env branch=$BUILD_BRANCH && webpack --config webpack.prod.js --env branch=$BUILD_BRANCH && ./scripts/clean_umd_build.sh", "build:npm": "npm run build-cjs && npm run build-esm", - "build:ga-to-split-autorequire": "terser ./node_modules/@splitsoftware/splitio-commons/src/integrations/ga/autoRequire.js --mangle --output ./scripts/ga-to-split-autorequire.js && cp ./scripts/ga-to-split-autorequire.js umd/ga-to-split-autorequire.js", "build": "npm run build-cjs && npm run build-esm && npm run build-umd", "check": "npm run check:lint && npm run check:version", "check:lint": "eslint src", @@ -104,7 +99,6 @@ "test-browser-e2e-destroy": "cross-env NODE_ENV=test karma start karma/e2e.destroy.karma.conf.js", "test-browser-e2e-errorCatching": "cross-env NODE_ENV=test karma start karma/e2e.errorCatching.karma.conf.js", "test-browser-e2e-push": "cross-env NODE_ENV=test karma start karma/e2e.push.karma.conf.js", - "test-browser-e2e-gaIntegration": "cross-env NODE_ENV=test karma start karma/e2e.gaIntegration.karma.conf.js", "test-node": "npm run test-node-unit && npm run test-node-e2e", "test-node-unit": "cross-env NODE_ENV=test tape -r ./ts-node.register \"src/*/**/__tests__/**/!(browser).spec.js\" | tap-min", "test-node-e2e": "npm run test-node-e2e-online && npm run test-node-e2e-offline && npm run test-node-e2e-destroy && npm run test-node-e2e-errorCatching && npm run test-node-e2e-push && npm run test-node-e2e-redis", @@ -114,9 +108,7 @@ "test-node-e2e-errorCatching": "cross-env NODE_ENV=test tape -r ./ts-node.register src/__tests__/errorCatching/node.spec.js | tap-min", "test-node-e2e-push": "cross-env NODE_ENV=test tape -r ./ts-node.register src/__tests__/push/node.spec.js | tap-min", "test-node-e2e-redis": "cross-env NODE_ENV=test tape -r ./ts-node.register src/__tests__/consumer/node_redis.spec.js | tap-min", - "pretest-ts-decls": "npm run build-esm && npm run build-cjs && npm link", - "test-ts-decls": "./scripts/ts-tests.sh", - "posttest-ts-decls": "npm rm --location=global @splitsoftware/splitio && npm install", + "test-ts-decls": "tsc --build ts-tests", "test": "npm run test-node && npm run test-browser", "all": "npm run check && npm run build && npm run test-ts-decls && npm run test", "publish:rc": "npm run check && npm run build && npm publish --tag canary", diff --git a/scripts/build_cjs_replace_imports.sh b/scripts/build_cjs_replace_imports.sh index 85490429f..fd29a4a7e 100755 --- a/scripts/build_cjs_replace_imports.sh +++ b/scripts/build_cjs_replace_imports.sh @@ -1,7 +1,7 @@ #!/bin/bash -# replace splitio-commons imports to use ES modules -replace '@splitsoftware/splitio-commons/src' '@splitsoftware/splitio-commons/cjs' ./lib -r +# replace splitio-commons imports to use CommonJS +replace '@splitsoftware/splitio-commons/src' '@splitsoftware/splitio-commons/cjs' ./cjs -r if [ $? -eq 0 ] then diff --git a/scripts/build_esm_replace_imports.sh b/scripts/build_esm_replace_imports.sh index 81cfb81b0..16155e206 100755 --- a/scripts/build_esm_replace_imports.sh +++ b/scripts/build_esm_replace_imports.sh @@ -1,7 +1,7 @@ #!/bin/bash -# replace splitio-commons imports to use ES modules -replace '@splitsoftware/splitio-commons/src' '@splitsoftware/splitio-commons/esm' ./es -r +# replace splitio-commons imports to use EcmaScript Modules +replace '@splitsoftware/splitio-commons/src' '@splitsoftware/splitio-commons/esm' ./esm -r if [ $? -eq 0 ] then diff --git a/scripts/copy.packages.json.js b/scripts/copy.packages.json.js index 3ce08513b..68683ee6c 100644 --- a/scripts/copy.packages.json.js +++ b/scripts/copy.packages.json.js @@ -3,8 +3,8 @@ const copyfiles = require('copyfiles'); const input = './src/**/package.json'; -const outputCjsDir = './lib'; -const outputEsmDir = './es'; +const outputCjsDir = './cjs'; +const outputEsmDir = './esm'; copyfiles([input, process.env.NODE_ENV === 'cjs' ? outputCjsDir : outputEsmDir], { up: 1, diff --git a/scripts/ga-to-split-autorequire.js b/scripts/ga-to-split-autorequire.js deleted file mode 100644 index 7f8b88597..000000000 --- a/scripts/ga-to-split-autorequire.js +++ /dev/null @@ -1 +0,0 @@ -(function(n,t,e){n[e]=n[e]||t;n[t]=n[t]||function(){n[t].q.push(arguments)};n[t].q=n[t].q||[];var r={};function i(n){return typeof n==="object"&&typeof n.name==="string"&&n.name}function o(e){if(e&&e[0]==="create"){var o=i(e[1])||i(e[2])||i(e[3])||(typeof e[3]==="string"?e[3]:undefined);if(!r[o]){r[o]=true;n[t]((o?o+".":"")+"require","splitTracker")}}}n[t].q.forEach(o);var u=n[t].q.push;n[t].q.push=function(n){var t=u.apply(this,arguments);o(n);return t}})(window,"ga","GoogleAnalyticsObject"); \ No newline at end of file diff --git a/scripts/ts-tests.sh b/scripts/ts-tests.sh deleted file mode 100755 index 2263e923d..000000000 --- a/scripts/ts-tests.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -cd ts-tests ## Go to typescript tests folder -echo "Installing dependencies for TypeScript declarations testing..." -npm install ## Install dependencies -npm install @types/node@6.0.31 ## Install type definitions for Node.js v6.x (the oldest supported version) -echo "Dependencies installed, linking the package." -npm link @splitsoftware/splitio ## Link to the cloned code -echo "Running tsc compiler." -./node_modules/.bin/tsc ## Run typescript compiler. No need for flags as we have a tsconfig.json file - -echo "Testing again with the latest @types/node version..." -npm install @types/node@14 ## Install latest type definitions for Node.js -echo "Dependencies installed, linking the package." -npm link @splitsoftware/splitio ## Link to the cloned code -echo "Running tsc compiler." -./node_modules/.bin/tsc ## Run typescript compiler. No need for flags as we have a tsconfig.json file - -if [ $? -eq 0 ] -then - echo "✅ Successfully compiled TS tests." - npm unlink @splitsoftware/splitio - exit 0 -else - echo "☠️ Error compiling TS tests." - npm unlink @splitsoftware/splitio - exit 1 -fi diff --git a/server/package.json b/server/package.json index be8f299ed..21aeb6f1f 100644 --- a/server/package.json +++ b/server/package.json @@ -1,5 +1,5 @@ { - "main": "../lib/factory/node.js", - "module": "../es/factory/node.js", + "main": "../cjs/factory/node.js", + "module": "../esm/factory/node.js", "types": "../types/server/index.d.ts" } diff --git a/src/__tests__/browserSuites/evaluations-semver.spec.js b/src/__tests__/browserSuites/evaluations-semver.spec.js index 29d2540fe..716c5467d 100644 --- a/src/__tests__/browserSuites/evaluations-semver.spec.js +++ b/src/__tests__/browserSuites/evaluations-semver.spec.js @@ -25,10 +25,10 @@ const config = { export default async function (fetchMock, assert) { - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=1675259356568', { status: 200, body: { splits: [], since: 1675259356568, till: 1675259356568 } }); - fetchMock.getOnce(config.urls.sdk + '/mySegments/emi%40split.io', { status: 200, body: { mySegments: [] } }); - fetchMock.getOnce(config.urls.sdk + '/mySegments/2nd', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=1675259356568', { status: 200, body: { splits: [], since: 1675259356568, till: 1675259356568 } }); + fetchMock.getOnce(config.urls.sdk + '/memberships/emi%40split.io', { status: 200, body: { ms: {} } }); + fetchMock.getOnce(config.urls.sdk + '/memberships/2nd', { status: 200, body: { ms: {} } }); const splitio = SplitFactory(config); const client = splitio.client(); diff --git a/src/__tests__/browserSuites/evaluations.spec.js b/src/__tests__/browserSuites/evaluations.spec.js index cc8bee2cc..b40b0b1d3 100644 --- a/src/__tests__/browserSuites/evaluations.spec.js +++ b/src/__tests__/browserSuites/evaluations.spec.js @@ -365,7 +365,7 @@ export default function (config, fetchMock, assert) { for (i; i < SDK_INSTANCES_TO_TEST; i++) { let splitio = SplitFactory(config); - fetchMock.getOnce('https://sdk.split.io/api/mySegments/aaaaaaklmnbv', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce('https://sdk.split.io/api/memberships/aaaaaaklmnbv', { status: 200, body: { ms: {} } }); // on TA tests, this is going to return one against the mocked seed. let clientTABucket1 = splitio.client('aaaaaaklmnbv'); diff --git a/src/__tests__/browserSuites/fetch-specific-splits.spec.js b/src/__tests__/browserSuites/fetch-specific-splits.spec.js index fb64a4b82..a9f6b383e 100644 --- a/src/__tests__/browserSuites/fetch-specific-splits.spec.js +++ b/src/__tests__/browserSuites/fetch-specific-splits.spec.js @@ -25,15 +25,15 @@ export function fetchSpecificSplits(fetchMock, assert) { const queryString = queryStrings[i] || ''; let factory; - fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.1&since=-1' + queryString, { status: 200, body: { splits: [], since: -1, till: 1457552620999 } }); - fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.1&since=1457552620999' + queryString, { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.1&since=1457552620999' + queryString, function () { + fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.2&since=-1' + queryString, { status: 200, body: { splits: [], since: -1, till: 1457552620999 } }); + fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.2&since=1457552620999' + queryString, { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.2&since=1457552620999' + queryString, function () { factory.client().destroy().then(() => { assert.pass(`splitFilters #${i}`); }); return { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }; }); - fetchMock.get(urls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(urls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); factory = SplitFactory(config); @@ -68,10 +68,10 @@ export function fetchSpecificSplitsForFlagSets(fetchMock, assert) { let factory; const queryString = '&sets=4_valid,set_2,set_3,set_ww,set_x'; - fetchMock.get(baseUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(baseUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1' + queryString, { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 }}); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1457552620999' + queryString, async function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1' + queryString, { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 }}); + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1457552620999' + queryString, async function () { t.pass('flag set query correctly formed'); t.true(logSpy.calledWithExactly('[WARN] splitio => settings: bySet filter value "set_x " has extra whitespace, trimming.')); t.true(logSpy.calledWithExactly('[WARN] splitio => settings: you passed invalid+, flag set must adhere to the regular expressions /^[a-z0-9][_a-z0-9]{0,49}$/. This means a flag set must start with a letter or number, be in lowercase, alphanumeric and have a max length of 50 characters. invalid+ was discarded.')); diff --git a/src/__tests__/browserSuites/flag-sets.spec.js b/src/__tests__/browserSuites/flag-sets.spec.js index 60396eb81..214c1a1fb 100644 --- a/src/__tests__/browserSuites/flag-sets.spec.js +++ b/src/__tests__/browserSuites/flag-sets.spec.js @@ -17,19 +17,19 @@ const baseConfig = { }; export default function flagSets(fetchMock, t) { - fetchMock.get(baseUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(baseUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); t.test(async (assert) => { let factory; let manager; // Receive split change with 1 split belonging to set_1 & set_2 and one belonging to set_3 - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1&sets=set_1,set_2', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1&sets=set_1,set_2', function () { return { status: 200, body: splitChange2}; }); // Receive split change with 1 split belonging to set_1 only - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602796638344&sets=set_1,set_2', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602796638344&sets=set_1,set_2', function () { // stored feature flags before update const storedFlags = manager.splits(); assert.true(storedFlags.length === 1, 'only one feature flag should be added'); @@ -41,7 +41,7 @@ export default function flagSets(fetchMock, t) { }); // Receive split change with 1 split belonging to set_3 only - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602797638344&sets=set_1,set_2', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602797638344&sets=set_1,set_2', function () { // stored feature flags before update const storedFlags = manager.splits(); assert.true(storedFlags.length === 1); @@ -52,7 +52,7 @@ export default function flagSets(fetchMock, t) { return { status: 200, body: splitChange0}; }); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602798638344&sets=set_1,set_2', async function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602798638344&sets=set_1,set_2', async function () { // stored feature flags before update const storedFlags = manager.splits(); assert.true(storedFlags.length === 0, 'the feature flag should be removed'); @@ -75,12 +75,12 @@ export default function flagSets(fetchMock, t) { let manager; // Receive split change with 1 split belonging to set_1 & set_2 and one belonging to set_3 - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return { status: 200, body: splitChange2}; }); // Receive split change with 1 split belonging to set_1 only - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602796638344', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602796638344', function () { // stored feature flags before update const storedFlags = manager.splits(); assert.true(storedFlags.length === 2, 'every feature flag should be added'); @@ -94,7 +94,7 @@ export default function flagSets(fetchMock, t) { }); // Receive split change with 1 split belonging to set_3 only - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602797638344', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602797638344', function () { // stored feature flags before update const storedFlags = manager.splits(); assert.true(storedFlags.length === 2); @@ -107,7 +107,7 @@ export default function flagSets(fetchMock, t) { return { status: 200, body: splitChange0}; }); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602798638344', async function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602798638344', async function () { // stored feature flags before update const storedFlags = manager.splits(); assert.true(storedFlags.length === 2); @@ -135,13 +135,13 @@ export default function flagSets(fetchMock, t) { let factory, client = []; - fetchMock.get(baseUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(baseUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); // Receive split change with 1 split belonging to set_1 & set_2 and one belonging to set_3 - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1&sets=set_1', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1&sets=set_1', function () { return { status: 200, body: splitChange2}; }); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602796638344&sets=set_1', async function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602796638344&sets=set_1', async function () { // stored feature flags before update assert.deepEqual(client.getTreatmentsByFlagSet('set_1'), {workm: 'on'}, 'only the flag in set_1 can be evaluated'); assert.deepEqual(client.getTreatmentsByFlagSet('set_2'), {}, 'only the flag in set_1 can be evaluated'); @@ -172,13 +172,13 @@ export default function flagSets(fetchMock, t) { let factory, client = []; - fetchMock.get(baseUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(baseUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); // Receive split change with 1 split belonging to set_1 & set_2 and one belonging to set_3 - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return { status: 200, body: splitChange2}; }); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1602796638344', async function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1602796638344', async function () { // stored feature flags before update assert.deepEqual(client.getTreatmentsByFlagSet('set_1'), {workm: 'on'}, 'all flags can be evaluated'); assert.deepEqual(client.getTreatmentsByFlagSet('set_2'), {workm: 'on'}, 'all flags can be evaluated'); diff --git a/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js b/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js index c2c391d52..60108065a 100644 --- a/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js +++ b/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js @@ -101,9 +101,9 @@ export default function (fetchMock, assert) { // Mock GET endpoints before creating the client const settings = settingsFactory(config); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, `/mySegments/${encodeURIComponent(config.core.key)}`), { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, `/memberships/${encodeURIComponent(config.core.key)}`), { status: 200, body: { ms: {} } }); // Init Split client const splitio = SplitFactory(config); diff --git a/src/__tests__/browserSuites/impressions.debug.spec.js b/src/__tests__/browserSuites/impressions.debug.spec.js index 3a2eeb488..18d875b4e 100644 --- a/src/__tests__/browserSuites/impressions.debug.spec.js +++ b/src/__tests__/browserSuites/impressions.debug.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; import { url } from '../testUtils'; @@ -21,9 +21,9 @@ const settings = settingsFactory({ export default function (fetchMock, assert) { // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); const splitio = SplitFactory({ core: { diff --git a/src/__tests__/browserSuites/impressions.none.spec.js b/src/__tests__/browserSuites/impressions.none.spec.js index 89aee3ab0..d90c407a7 100644 --- a/src/__tests__/browserSuites/impressions.none.spec.js +++ b/src/__tests__/browserSuites/impressions.none.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../..'; import { settingsFactory } from '../../settings/node'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { NONE } from '@splitsoftware/splitio-commons/src/utils/constants'; import { truncateTimeFrame } from '@splitsoftware/splitio-commons/src/utils/time'; import { url } from '../testUtils'; @@ -41,10 +41,10 @@ const config = { export default async function (fetchMock, assert) { // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); - fetchMock.get(url(settings, '/mySegments/emma%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); + fetchMock.get(url(settings, '/memberships/emma%40split.io'), { status: 200, body: membershipsFacundo }); const splitio = SplitFactory(config); const client = splitio.client(); diff --git a/src/__tests__/browserSuites/impressions.spec.js b/src/__tests__/browserSuites/impressions.spec.js index 42c85809b..aa4cff4c0 100644 --- a/src/__tests__/browserSuites/impressions.spec.js +++ b/src/__tests__/browserSuites/impressions.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { OPTIMIZED } from '@splitsoftware/splitio-commons/src/utils/constants'; import { truncateTimeFrame } from '@splitsoftware/splitio-commons/src/utils/time'; import { url } from '../testUtils'; @@ -24,9 +24,9 @@ let truncatedTimeFrame; export default function (fetchMock, assert) { // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); const splitio = SplitFactory({ core: { diff --git a/src/__tests__/browserSuites/manager.spec.js b/src/__tests__/browserSuites/manager.spec.js index 15b250bab..a5a09ff6d 100644 --- a/src/__tests__/browserSuites/manager.spec.js +++ b/src/__tests__/browserSuites/manager.spec.js @@ -4,7 +4,7 @@ import map from 'lodash/map'; import { url } from '../testUtils'; export default async function (settings, fetchMock, assert) { - fetchMock.getOnce({ url: url(settings, '/splitChanges?s=1.1&since=-1'), overwriteRoutes: true }, { status: 200, body: splitChangesMockReal }); + fetchMock.getOnce({ url: url(settings, '/splitChanges?s=1.2&since=-1'), overwriteRoutes: true }, { status: 200, body: splitChangesMockReal }); const mockSplits = splitChangesMockReal; diff --git a/src/__tests__/browserSuites/push-corner-cases.spec.js b/src/__tests__/browserSuites/push-corner-cases.spec.js index bc7e8bb09..a07d1c162 100644 --- a/src/__tests__/browserSuites/push-corner-cases.spec.js +++ b/src/__tests__/browserSuites/push-corner-cases.spec.js @@ -37,8 +37,8 @@ const MILLIS_SPLIT_CHANGES_RESPONSE = 400; /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection, SDK_READY_FROM_CACHE - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth, SSE connection, SDK_READY_FROM_CACHE + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) * 0.2 secs: SPLIT_KILL event -> /splitChanges * 0.4 secs: /splitChanges response --> SDK_READY */ @@ -71,13 +71,13 @@ export function testSplitKillOnReadyFromCache(fetchMock, assert) { }); // 1 auth request - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }); - // 2 mySegments requests: initial sync and after SSE opened - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 2 }, { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }); + // 2 memberships requests: initial sync and after SSE opened + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 2 }, { status: 200, body: { ms: {} } }); // 2 splitChanges request: initial sync and after SSE opened. Sync after SPLIT_KILL is not performed because SplitsSyncTask is "executing" - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=25'), { status: 200, body: splitChangesMock1 }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE, /* delay response */ }); - fetchMock.getOnce(url(settings, `/splitChanges?s=1.1&since=${splitChangesMock1.till}`), { status: 200, body: { splits: [], since: splitChangesMock1.till, till: splitChangesMock1.till } }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE - 100, /* delay response */ }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=25'), { status: 200, body: splitChangesMock1 }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE, /* delay response */ }); + fetchMock.getOnce(url(settings, `/splitChanges?s=1.2&since=${splitChangesMock1.till}`), { status: 200, body: { splits: [], since: splitChangesMock1.till, till: splitChangesMock1.till } }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE - 100, /* delay response */ }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/push-fallbacking.spec.js b/src/__tests__/browserSuites/push-fallback.spec.js similarity index 67% rename from src/__tests__/browserSuites/push-fallbacking.spec.js rename to src/__tests__/browserSuites/push-fallback.spec.js index 83832c0d6..dc8df88b9 100644 --- a/src/__tests__/browserSuites/push-fallbacking.spec.js +++ b/src/__tests__/browserSuites/push-fallback.spec.js @@ -5,9 +5,9 @@ import splitChangesMock1 from '../mocks/splitchanges.real.withSegments.json'; // since: -1, till: 1457552620999 (for initial fetch) import splitChangesMock2 from '../mocks/splitchanges.real.updateWithSegments.json'; // since: 1457552620999, till: 1457552649999 (for SPLIT_UPDATE event) import splitChangesMock3 from '../mocks/splitchanges.real.updateWithoutSegments.json'; // since: 1457552649999, till: 1457552669999 (for second polling fetch) -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.mock2.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; +import membershipsNicolasMock1 from '../mocks/memberships.nicolas@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.mock2.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; import occupancy0ControlPriMessage from '../mocks/message.OCCUPANCY.0.control_pri.1586987434550.json'; import occupancy1ControlPriMessage from '../mocks/message.OCCUPANCY.1.control_pri.1586987434450.json'; @@ -20,7 +20,7 @@ import streamingPausedControlPriMessage2 from '../mocks/message.CONTROL.STREAMIN import streamingDisabledControlPriMessage from '../mocks/message.CONTROL.STREAMING_DISABLED.control_pri.1586987434950.json'; import splitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552649999.json'; -import mySegmentsUpdateMessage from '../mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; +import mySegmentsUpdateMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authPushEnabledNicolasAndMarcio from '../mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json'; @@ -39,9 +39,9 @@ const userKey = 'nicolas@split.io'; const secondUserKey = 'marcio@split.io'; const baseUrls = { - sdk: 'https://sdk.push-fallbacking/api', - events: 'https://events.push-fallbacking/api', - auth: 'https://auth.push-fallbacking/api' + sdk: 'https://sdk.push-fallback/api', + events: 'https://events.push-fallback/api', + auth: 'https://auth.push-fallback/api' }; const config = { core: { @@ -55,7 +55,6 @@ const config = { }, urls: baseUrls, streamingEnabled: true, - // debug: true, }; const settings = settingsFactory(config); @@ -67,11 +66,11 @@ const MILLIS_CREATE_CLIENT_DURING_PUSH = MILLIS_STREAMING_UP_OCCUPANCY + 50; const MILLIS_SPLIT_UPDATE_EVENT_DURING_PUSH = MILLIS_STREAMING_UP_OCCUPANCY + 100; const MILLIS_STREAMING_PAUSED_CONTROL = MILLIS_SPLIT_UPDATE_EVENT_DURING_PUSH + 100; -const MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_POLLING = MILLIS_STREAMING_PAUSED_CONTROL + 100; +const MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_POLLING = MILLIS_STREAMING_PAUSED_CONTROL + 100; const MILLIS_STREAMING_RESUMED_CONTROL = MILLIS_STREAMING_PAUSED_CONTROL + settings.scheduler.featuresRefreshRate + 100; -const MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH = MILLIS_STREAMING_RESUMED_CONTROL + 100; +const MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH = MILLIS_STREAMING_RESUMED_CONTROL + 100; -const MILLIS_STREAMING_PAUSED_CONTROL_2 = MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH + 100; +const MILLIS_STREAMING_PAUSED_CONTROL_2 = MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH + 100; const MILLIS_STREAMING_RESET_WHILE_PUSH_DOWN = MILLIS_STREAMING_PAUSED_CONTROL_2 + 100; const MILLIS_STREAMING_RESET_WHILE_PUSH_UP = MILLIS_STREAMING_RESET_WHILE_PUSH_DOWN + settings.scheduler.featuresRefreshRate; const MILLIS_STREAMING_DISABLED_CONTROL = MILLIS_STREAMING_RESET_WHILE_PUSH_UP + 100; @@ -79,30 +78,30 @@ const MILLIS_DESTROY = MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.fe /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/nicolas) - * 0.2 secs: Streaming down (OCCUPANCY event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/nicolas), auth, SSE connection + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/nicolas) + * 0.2 secs: Streaming down (OCCUPANCY event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas) * 0.3 secs: SPLIT_UPDATE event ignored * 0.4 secs: periodic fetch due to polling (/splitChanges) - * 0.45 secs: periodic fetch due to polling (/mySegments/*) - * 0.5 secs: Streaming up (OCCUPANCY event) -> syncAll (/splitChanges, /mySegments/nicolas) - * 0.55 secs: create a new client while streaming -> initial fetch (/mySegments/marcio), auth, SSE connection and syncAll (/splitChanges, /mySegments/nicolas, /mySegments/marcio) + * 0.45 secs: periodic fetch due to polling (/memberships/nicolas) + * 0.5 secs: Streaming up (OCCUPANCY event) -> syncAll (/splitChanges, /memberships/nicolas) + * 0.55 secs: create a new client while streaming -> initial fetch (/memberships/marcio), auth, SSE connection and syncAll (/splitChanges, /memberships/nicolas, /memberships/marcio) * 0.6 secs: SPLIT_UPDATE event -> /splitChanges - * 0.7 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas, /mySegments/marcio) - * 0.8 secs: MY_SEGMENTS_UPDATE event ignored + * 0.7 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas, /memberships/marcio) + * 0.8 secs: MEMBERSHIPS_MS_UPDATE event ignored * 0.9 secs: periodic fetch due to polling (/splitChanges) - * 0.95 secs: periodic fetch due to polling (/mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) - * 1.0 secs: Streaming up (CONTROL event) -> syncAll (/splitChanges, /mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) - * 1.1 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas - * 1.2 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) + * 0.95 secs: periodic fetch due to polling (/memberships/nicolas, /memberships/marcio) + * 1.0 secs: Streaming up (CONTROL event) -> syncAll (/splitChanges, /memberships/nicolas, /memberships/marcio) + * 1.1 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas, /memberships/marcio + * 1.2 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas, /memberships/marcio) * 1.3 secs: STREAMING_RESET control event -> auth, SSE connection, syncAll and stop polling * 1.5 secs: STREAMING_RESET control event -> auth, SSE connection, syncAll - * 1.6 secs: Streaming closed (CONTROL STREAMING_DISABLED event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) - * 1.8 secs: periodic fetch due to polling (/splitChanges): due to update without segments, mySegments are not fetched + * 1.6 secs: Streaming closed (CONTROL STREAMING_DISABLED event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas, /memberships/marcio) + * 1.8 secs: periodic fetch due to polling (/splitChanges): due to update without segments, memberships are not fetched * 2.0 secs: periodic fetch due to polling (/splitChanges) * 2.1 secs: destroy client */ -export function testFallbacking(fetchMock, assert) { +export function testFallback(fetchMock, assert) { assert.plan(20); fetchMock.reset(); @@ -111,7 +110,7 @@ export function testFallbacking(fetchMock, assert) { // mock SSE open and message events setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'EventSource URL is the expected'); setTimeout(() => { @@ -137,7 +136,7 @@ export function testFallbacking(fetchMock, assert) { secondClient = splitio.client(secondUserKey); setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_MjE0MTkxOTU2Mg%3D%3D_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'new EventSource URL is the expected'); eventSourceInstance.emitOpen(); @@ -156,7 +155,7 @@ export function testFallbacking(fetchMock, assert) { setTimeout(() => { assert.equal(eventSourceInstance.readyState, EventSourceMock.OPEN, 'EventSource connection keeps opened after PUSH_SUBSYSTEM_DOWN (STREAMING_PAUSED event)'); eventSourceInstance.emitMessage(mySegmentsUpdateMessage); - }, MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_POLLING - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MY_SEGMENTS_UPDATE event while polling, to check that we are ignoring it + }, MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_POLLING - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MEMBERSHIPS_MS_UPDATE event while polling, to check that we are ignoring it setTimeout(() => { eventSourceInstance.emitMessage(streamingResumedControlPriMessage); @@ -168,7 +167,7 @@ export function testFallbacking(fetchMock, assert) { assert.equal(client.getTreatment('real_split'), 'on', 'evaluation with updated segment'); }); eventSourceInstance.emitMessage(mySegmentsUpdateMessage); - }, MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MY_SEGMENTS_UPDATE event + }, MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MEMBERSHIPS_MS_UPDATE event setTimeout(() => { eventSourceInstance.emitMessage(streamingPausedControlPriMessage2); @@ -207,90 +206,91 @@ export function testFallbacking(fetchMock, assert) { }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); - // initial split and mySegment sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + // initial split and memberships sync + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // split and segment sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // fetches due to first fallback to polling - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_DOWN_OCCUPANCY + settings.scheduler.featuresRefreshRate), 'fetch due to first fallback to polling'); return { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // split and segment sync due to streaming up (OCCUPANCY event) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); - // creating of second client during streaming: initial mysegment sync, reauth and syncAll due to new client - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); - fetchMock.get({ url: url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(secondUserKey)}`), repeat: 3 /* initial + 2 STREAMING_RESET */ }, (url, opts) => { + // creating of second client during streaming: initial memberships sync, reauth and syncAll due to new client + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); + fetchMock.get({ url: url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(secondUserKey)}`), repeat: 3 /* initial + 2 STREAMING_RESET */ }, (url, opts) => { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('second auth success'); return { status: 200, body: authPushEnabledNicolasAndMarcio }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // fetch due to SPLIT_UPDATE event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SPLIT_UPDATE_EVENT_DURING_PUSH), 'sync due to SPLIT_UPDATE event'); return { status: 200, body: splitChangesMock2 }; }); // fetches due to second fallback to polling - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // continue fetches due to second fallback to polling - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_PAUSED_CONTROL + settings.scheduler.featuresRefreshRate), 'fetch due to second fallback to polling'); return { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // split and segment sync due to streaming up (CONTROL event) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); - // fetch due to MY_SEGMENTS_UPDATE event - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { + // fetch due to MEMBERSHIPS_MS_UPDATE event + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH), 'sync due to MY_SEGMENTS_UPDATE event'); - return { status: 200, body: mySegmentsNicolasMock2 }; + assert.true(nearlyEqual(lapse, MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH), 'sync due to MEMBERSHIPS_MS_UPDATE event'); + return { status: 200, body: membershipsNicolasMock2 }; }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // fetches due to third fallback to polling (STREAMING_PAUSED), two sync all (two STREAMING_RESET events) and fourth fallback (STREAMING_DISABLED) - fetchMock.get({ url: url(settings, '/splitChanges?s=1.1&since=1457552649999'), repeat: 4 }, { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsMarcio }); + fetchMock.get({ url: url(settings, '/splitChanges?s=1.2&since=1457552649999'), repeat: 4 }, { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 4 }, { status: 200, body: membershipsNicolasMock1 }); + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 4 }, { status: 200, body: membershipsMarcio }); - // Periodic fetch due to polling (mySegments is not fetched due to smart pausing) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + // Periodic fetch due to polling (memberships is not fetched due to smart pausing) + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.featuresRefreshRate), 'fetch due to fourth fallback to polling'); return { status: 200, body: splitChangesMock3 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552669999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552669999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.featuresRefreshRate * 2), 'fetch due to fourth fallback to polling'); return { status: 200, body: { splits: [], since: 1457552669999, till: 1457552669999 } }; diff --git a/src/__tests__/browserSuites/push-flag-sets.spec.js b/src/__tests__/browserSuites/push-flag-sets.spec.js index 8898f9759..df3d502f2 100644 --- a/src/__tests__/browserSuites/push-flag-sets.spec.js +++ b/src/__tests__/browserSuites/push-flag-sets.spec.js @@ -36,21 +36,21 @@ const MILLIS_FIFTH_SPLIT_UPDATE_EVENT = 500; export function testFlagSets(fetchMock, t) { fetchMock.reset(); - fetchMock.get(baseUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(baseUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); - fetchMock.get(baseUrls.auth + '/v2/auth?s=1.1&users=nicolas%40split.io', function () { + fetchMock.get(baseUrls.auth + '/v2/auth?s=1.2&users=nicolas%40split.io', function () { return { status: 200, body: authPushEnabled }; }); - fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return { status: 200, body: { splits: [], since: -1, till: 0}}; }); - fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.1&since=0', function () { + fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.2&since=0', function () { return { status: 200, body: { splits: [], since: 0, till: 1 }}; }); - fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.1&since=-1&sets=set_1,set_2', function () { + fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.2&since=-1&sets=set_1,set_2', function () { return { status: 200, body: { splits: [], since: -1, till: 0 }}; }); - fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.1&since=0&sets=set_1,set_2', function () { + fetchMock.get(baseUrls.sdk + '/splitChanges?s=1.2&since=0&sets=set_1,set_2', function () { return { status: 200, body: { splits: [], since: 0, till: 1 }}; }); @@ -189,7 +189,7 @@ export function testFlagSets(fetchMock, t) { t.test(async (assert) => { - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=2&sets=set_1,set_2', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=2&sets=set_1,set_2', function () { assert.pass('4 - A fetch is triggered due to the SPLIT_KILL'); return { status: 200, body: { splits: [], since: 2, till: 3 }}; }); @@ -229,7 +229,7 @@ export function testFlagSets(fetchMock, t) { t.test(async (assert) => { - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=1&sets=set_1,set_2', function () { + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=1&sets=set_1,set_2', function () { assert.pass('5 - A fetch is triggered due to the SPLIT_KILL'); return { status: 200, body: { splits: [], since: 1, till: 5 }}; }); diff --git a/src/__tests__/browserSuites/push-initialization-nopush.spec.js b/src/__tests__/browserSuites/push-initialization-nopush.spec.js index 568739970..1df52e58b 100644 --- a/src/__tests__/browserSuites/push-initialization-nopush.spec.js +++ b/src/__tests__/browserSuites/push-initialization-nopush.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; import authPushDisabled from '../mocks/auth.pushDisabled.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authInvalidCredentials from '../mocks/auth.invalidCredentials.txt'; @@ -38,22 +38,22 @@ const settings = settingsFactory(config); /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and auth (success but push disabled) - * 0.0 secs: syncAll if falling back to polling (/splitChanges, /mySegments/*) - * 0.1 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and auth (success but push disabled) + * 0.0 secs: syncAll if falling back to polling (/splitChanges, /memberships/*) + * 0.1 secs: polling (/splitChanges, /memberships/*) */ function testInitializationFail(fetchMock, assert, fallbackToPolling) { let start, splitio, client, ready = false; - fetchMock.get(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolas }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.get(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolas }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); return { status: 200, body: splitChangesMock1 }; }); if (fallbackToPolling) { - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'polling (first fetch)'); @@ -61,7 +61,7 @@ function testInitializationFail(fetchMock, assert, fallbackToPolling) { }); } - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate), 'polling (second fetch)'); @@ -83,7 +83,7 @@ function testInitializationFail(fetchMock, assert, fallbackToPolling) { export function testAuthWithPushDisabled(fetchMock, assert) { assert.plan(6); - fetchMock.getOnce(`https://auth.push-initialization-nopush/api/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`, function (url, opts) { + fetchMock.getOnce(`https://auth.push-initialization-nopush/api/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`, function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth'); return { status: 200, body: authPushDisabled }; @@ -96,7 +96,7 @@ export function testAuthWithPushDisabled(fetchMock, assert) { export function testAuthWith401(fetchMock, assert) { assert.plan(6); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth'); return { status: 401, body: authInvalidCredentials }; @@ -122,7 +122,7 @@ export function testSSEWithNonRetryableError(fetchMock, assert) { assert.plan(7); // Auth successes - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth successes'); return { status: 200, body: authPushEnabledNicolas }; diff --git a/src/__tests__/browserSuites/push-initialization-retries.spec.js b/src/__tests__/browserSuites/push-initialization-retries.spec.js index d907d21e8..027ceceaa 100644 --- a/src/__tests__/browserSuites/push-initialization-retries.spec.js +++ b/src/__tests__/browserSuites/push-initialization-retries.spec.js @@ -3,7 +3,7 @@ import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import authPushDisabled from '../mocks/auth.pushDisabled.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authPushBadToken from '../mocks/auth.pushBadToken.json'; -import mySegmentsNicolasMock from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolasMock from '../mocks/memberships.nicolas@split.io.json'; import { nearlyEqual, url } from '../testUtils'; @@ -40,49 +40,49 @@ const settings = settingsFactory(config); /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt (fail due to bad token) - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt (fail due to bad token) + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second push connect attempt (auth fail due to network error) - * 0.2 secs: polling (/splitChanges, /mySegments/*) + * 0.2 secs: polling (/splitChanges, /memberships/*) * 0.3 secs: third push connect attempt (auth success but push disabled) - * 0.4 secs: polling (/splitChanges, /mySegments/*) + * 0.4 secs: polling (/splitChanges, /memberships/*) */ export function testPushRetriesDueToAuthErrors(fetchMock, assert) { let start, splitio, client, ready = false; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('first auth attempt'); return { status: 200, body: authPushBadToken }; }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); const lapse = Date.now() - start; const expected = (settings.scheduler.pushRetryBackoffBase * Math.pow(2, 0) + settings.scheduler.pushRetryBackoffBase * Math.pow(2, 1)); assert.true(nearlyEqual(lapse, expected), 'third auth attempt (approximately in 0.3 seconds from first attempt)'); return { status: 200, body: authPushDisabled }; }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsNicolasMock }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 4 }, { status: 200, body: membershipsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready before first polling fetch'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'fallback to polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate), 'polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate * 2), 'keep polling since auth success buth with push disabled'); client.destroy().then(() => { @@ -102,11 +102,11 @@ export function testPushRetriesDueToAuthErrors(fetchMock, assert) { /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth successes and sse fails - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth successes and sse fails + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second push connect attempt (auth successes and sse fails again) - * 0.2 secs: polling (/splitChanges, /mySegments/*) - * 0.3 secs: third push connect attempt (auth and sse success), syncAll (/splitChanges, /mySegments/*) + * 0.2 secs: polling (/splitChanges, /memberships/*) + * 0.3 secs: third push connect attempt (auth and sse success), syncAll (/splitChanges, /memberships/*) */ export function testPushRetriesDueToSseErrors(fetchMock, assert) { window.EventSource = EventSourceMock; @@ -114,7 +114,7 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { let start, splitio, client, ready = false; const expectedTimeToSSEsuccess = (settings.scheduler.pushRetryBackoffBase * Math.pow(2, 0) + settings.scheduler.pushRetryBackoffBase * Math.pow(2, 1)); - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; let sseattempts = 0; setMockListener(function (eventSourceInstance) { assert.equal(eventSourceInstance.url, expectedSSEurl, 'SSE url is correct'); @@ -130,30 +130,30 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { sseattempts++; }); - fetchMock.get({ url: url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), repeat: 3 /* 3 push attempts */ }, function (url, opts) { + fetchMock.get({ url: url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), repeat: 3 /* 3 push attempts */ }, function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsNicolasMock }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 4 }, { status: 200, body: membershipsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready before first polling fetch'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'fallback to polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate), 'polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, expectedTimeToSSEsuccess), 'sync due to success SSE connection'); client.destroy().then(() => { @@ -176,7 +176,7 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { * Assert that if the main client is destroyed while authentication request is in progress and successes, the SDK doesn't open the SSE connection * * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt * 0.05 secs: client destroyed * 0.1 secs: auth success but not SSE connection opened since push was closed * 0.2 secs: test finished @@ -189,10 +189,10 @@ export function testSdkDestroyWhileAuthSuccess(fetchMock, assert) { let splitio, client, ready = false; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }, { delay: 100 }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }, { delay: 100 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); setTimeout(() => { client.destroy().then(() => { @@ -224,9 +224,9 @@ export function testSdkDestroyWhileConnDelay(fetchMock, assert) { assert.fail('unexpected EventSource request with url: ' + eventSourceInstance.url); }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: { ...authPushEnabledNicolas, connDelay: 0.1 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: { ...authPushEnabledNicolas, connDelay: 0.1 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); const client = SplitFactory(config).client(); setTimeout(() => { @@ -243,8 +243,8 @@ export function testSdkDestroyWhileConnDelay(fetchMock, assert) { * Asserts that if the client is destroyed while authentication request is in progress and fails, the SDK doesn't schedule an auth retry * * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt (fail due to bad token) - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt (fail due to bad token) + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second auth attempt request * 0.15 secs: client destroyed * 0.2 secs: second auth attempt response (fail due to network error) @@ -255,12 +255,12 @@ export function testSdkDestroyWhileAuthRetries(fetchMock, assert) { let splitio, client, ready = false; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushBadToken }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }, { delay: 100 }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushBadToken }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }, { delay: 100 }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 2 }, { status: 200, body: mySegmentsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 2 }, { status: 200, body: membershipsNicolasMock }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/push-refresh-token.spec.js b/src/__tests__/browserSuites/push-refresh-token.spec.js index cecfcb1fe..8e22592c9 100644 --- a/src/__tests__/browserSuites/push-refresh-token.spec.js +++ b/src/__tests__/browserSuites/push-refresh-token.spec.js @@ -1,6 +1,6 @@ import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolasMock1 from '../mocks/memberships.nicolas@split.io.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.601secs.json'; import authPushDisabled from '../mocks/auth.pushDisabled.json'; @@ -57,7 +57,7 @@ export function testRefreshToken(fetchMock, assert) { sseCount++; switch (sseCount) { case 1: - assert.true(nearlyEqual(Date.now() - start, 0), 'first connection is created inmediatelly'); + assert.true(nearlyEqual(Date.now() - start, 0), 'first connection is created immediately'); break; case 2: assert.true(nearlyEqual(Date.now() - start, MILLIS_REFRESH_TOKEN + MILLIS_CONNDELAY), 'second connection is created with a delay'); @@ -77,22 +77,22 @@ export function testRefreshToken(fetchMock, assert) { }); // initial sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // first auth - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); // sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // re-auth due to refresh token, with connDelay of 0.5 seconds - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN), 'reauthentication for token refresh'); if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); @@ -100,15 +100,15 @@ export function testRefreshToken(fetchMock, assert) { }); // sync after SSE reopened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN + MILLIS_CONNDELAY), 'sync after SSE connection is reopened'); return { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // second re-auth due to refresh token, this time responding with pushEnabled false - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN * 2), 'second reauthentication for token refresh'); if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); @@ -116,7 +116,7 @@ export function testRefreshToken(fetchMock, assert) { }); // split sync after SSE closed due to push disabled - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN * 2), 'sync after SSE connection is reopened a second time'); setTimeout(() => { @@ -126,7 +126,7 @@ export function testRefreshToken(fetchMock, assert) { }, 200); // destroy the client a little bit latter, to assert that there weren't new requests return { status: 500, body: 'server error' }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/push-synchronization-retries.spec.js b/src/__tests__/browserSuites/push-synchronization-retries.spec.js index c7031b7c9..db0374aa7 100644 --- a/src/__tests__/browserSuites/push-synchronization-retries.spec.js +++ b/src/__tests__/browserSuites/push-synchronization-retries.spec.js @@ -1,13 +1,13 @@ import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import splitChangesMock3 from '../mocks/splitchanges.since.1457552620999.till.1457552649999.SPLIT_UPDATE.json'; -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.mock2.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; +import membershipsNicolasMock1 from '../mocks/memberships.nicolas@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.mock2.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; import splitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552649999.json'; import oldSplitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552620999.json'; -import mySegmentsUpdateMessage from '../mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; +import mySegmentsUpdateMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json'; import splitKillMessage from '../mocks/message.SPLIT_KILL.1457552650000.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; @@ -48,26 +48,26 @@ const MILLIS_RETRY_FOR_FIRST_SPLIT_UPDATE_EVENT = 300; const MILLIS_SECOND_SPLIT_UPDATE_EVENT = 400; -const MILLIS_MYSEGMENT_UPDATE_EVENT = 500; -const MILLIS_THIRD_RETRY_FOR_MYSEGMENT_UPDATE_EVENT = 1200; +const MILLIS_MEMBERSHIPS_MS_UPDATE = 500; +const MILLIS_THIRD_RETRY_FOR_MEMBERSHIPS_MS_UPDATE = 1200; const MILLIS_SPLIT_KILL_EVENT = 1300; const MILLIS_THIRD_RETRY_FOR_SPLIT_KILL_EVENT = 2000; /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth, SSE connection + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) * * 0.2 secs: SPLIT_UPDATE event -> /splitChanges: bad response -> SDK_UPDATE triggered * 0.3 secs: SPLIT_UPDATE event -> /splitChanges retry: success * * 0.4 secs: SPLIT_UPDATE event with old changeNumber -> SDK_UPDATE not triggered * - * 0.5 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io: network error - * 0.6 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io retry: invalid JSON response - * 0.8 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io: server error - * 1.2 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io retry: success -> SDK_UPDATE triggered + * 0.5 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/marcio@split.io OK, /memberships/nicolas@split.io: network error + * 0.6 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas@split.io retry: invalid JSON response + * 0.8 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas@split.io: server error + * 1.2 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas@split.io retry: success -> SDK_UPDATE triggered * * 1.3 secs: SPLIT_KILL event -> /splitChanges: outdated response -> SDK_UPDATE triggered although fetches fail * 1.4 secs: SPLIT_KILL event -> /splitChanges retry: network error @@ -76,7 +76,7 @@ const MILLIS_THIRD_RETRY_FOR_SPLIT_KILL_EVENT = 2000; * (we destroy the client here, to assert that all scheduled tasks are clean) */ export function testSynchronizationRetries(fetchMock, assert) { - // Force the backoff base of UpdateWorkers, from 10 secs to 100 ms, to reduce test time + // Force the backoff base of UpdateWorkers, from 1 sec to 100 ms, to reduce test time Backoff.__TEST__BASE_MILLIS = 100; assert.plan(17); @@ -88,7 +88,7 @@ export function testSynchronizationRetries(fetchMock, assert) { setMockListener(function (eventSourceInstance) { start = Date.now(); - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'EventSource URL is the expected'); /* events on first SSE connection */ @@ -114,11 +114,11 @@ export function testSynchronizationRetries(fetchMock, assert) { assert.equal(client.getTreatment('splitters'), 'off', 'evaluation with initial MySegments list'); client.once(client.Event.SDK_UPDATE, () => { const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MYSEGMENT_UPDATE_EVENT), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event'); + assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MEMBERSHIPS_MS_UPDATE), 'SDK_UPDATE due to MEMBERSHIPS_MS_UPDATE event'); assert.equal(client.getTreatment('splitters'), 'on', 'evaluation with updated MySegments list'); }); eventSourceInstance.emitMessage(mySegmentsUpdateMessage); - }, MILLIS_MYSEGMENT_UPDATE_EVENT); // send a MY_SEGMENTS_UPDATE event with a new changeNumber after 0.4 seconds + }, MILLIS_MEMBERSHIPS_MS_UPDATE); // send a MEMBERSHIPS_MS_UPDATE event with a new changeNumber after 0.4 seconds setTimeout(() => { client.once(client.Event.SDK_UPDATE, () => { @@ -135,60 +135,60 @@ export function testSynchronizationRetries(fetchMock, assert) { }); // initial auth - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(otherUserKeySync)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(otherUserKeySync)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); - // initial split and mySegments sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 2 }, { status: 200, body: mySegmentsMarcio }); + // initial split and memberships sync + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 3 }, { status: 200, body: membershipsMarcio }); // split and segment sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SSE_OPEN), 'sync after SSE connection is opened'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // fetch due to SPLIT_UPDATE event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); // fetch retry for SPLIT_UPDATE event, due to previous unexpected response (response till minor than SPLIT_UPDATE changeNumber) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_RETRY_FOR_FIRST_SPLIT_UPDATE_EVENT), 'fetch retry due to SPLIT_UPDATE event'); return { status: 200, body: splitChangesMock3 }; }); - // fetch due to first MY_SEGMENTS_UPDATE event - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { throws: new TypeError('Network error') }); - // fetch retry for MY_SEGMENTS_UPDATE event, due to previous fail - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response - // fetch retry for MY_SEGMENTS_UPDATE event, due to previous fail - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 500, body: 'server error' }); - // second fetch retry for MY_SEGMENTS_UPDATE event, due to previous fail - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { + // fetch due to first MEMBERSHIPS_MS_UPDATE event + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { throws: new TypeError('Network error') }); + // fetch retry for MEMBERSHIPS_MS_UPDATE event, due to previous fail + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response + // fetch retry for MEMBERSHIPS_MS_UPDATE event, due to previous fail + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 500, body: 'server error' }); + // second fetch retry for MEMBERSHIPS_MS_UPDATE event, due to previous fail + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MYSEGMENT_UPDATE_EVENT), 'sync second retry for MY_SEGMENTS_UPDATE event'); - return { status: 200, body: mySegmentsNicolasMock2 }; + assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MEMBERSHIPS_MS_UPDATE), 'sync second retry for MEMBERSHIPS_MS_UPDATE event'); + return { status: 200, body: membershipsNicolasMock2 }; }); // fetch due to SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation with split killed immediately, before fetch is done'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SPLIT_KILL_EVENT), 'sync due to SPLIT_KILL event'); return { status: 200, body: { since: 1457552649999, till: 1457552649999, splits: [] } }; // returning old state }); // first fetch retry for SPLIT_KILL event, due to previous unexpected response (response till minor than SPLIT_KILL changeNumber) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { throws: new TypeError('Network error') }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { throws: new TypeError('Network error') }); // second fetch retry for SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response // third fetch retry for SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_SPLIT_KILL_EVENT), 'third fetch retry due to SPLIT_KILL event'); diff --git a/src/__tests__/browserSuites/push-synchronization.spec.js b/src/__tests__/browserSuites/push-synchronization.spec.js index 4ca030726..4086050fd 100644 --- a/src/__tests__/browserSuites/push-synchronization.spec.js +++ b/src/__tests__/browserSuites/push-synchronization.spec.js @@ -2,27 +2,25 @@ import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import splitChangesMock3 from '../mocks/splitchanges.since.1457552620999.till.1457552649999.SPLIT_UPDATE.json'; import splitChangesMock4 from '../mocks/splitchanges.since.1457552649999.till.1457552650000.SPLIT_KILL.json'; -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.mock2.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.mock2.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; import splitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552649999.json'; import oldSplitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552620999.json'; -import mySegmentsUpdateMessageNoPayload from '../mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; -import mySegmentsUpdateMessageWithPayload from '../mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json'; -import mySegmentsUpdateMessageWithEmptyPayload from '../mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json'; import splitKillMessage from '../mocks/message.SPLIT_KILL.1457552650000.json'; -import unboundedMessage from '../mocks/message.V2.UNBOUNDED.1457552650000.json'; -import boundedZlibMessage from '../mocks/message.V2.BOUNDED.ZLIB.1457552651000.json'; -import keylistGzipMessage from '../mocks/message.V2.KEYLIST.GZIP.1457552652000.json'; -import segmentRemovalMessage from '../mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json'; +import unboundedMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json'; +import boundedZlibMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json'; +import keylistGzipMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json'; +import segmentRemovalMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; +import unboundedLSMessage from '../mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json'; +import segmentRemovalLSMessage from '../mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authPushEnabledNicolasAndMarcio from '../mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json'; +import { Backoff } from '@splitsoftware/splitio-commons/src/utils/Backoff'; import { nearlyEqual, url, hasNoCacheHeader } from '../testUtils'; -import includes from 'lodash/includes'; // Replace original EventSource with mock import EventSourceMock, { setMockListener } from '../testUtils/eventSourceMock'; @@ -55,49 +53,52 @@ const settings = settingsFactory(config); const MILLIS_SSE_OPEN = 100; const MILLIS_FIRST_SPLIT_UPDATE_EVENT = 200; const MILLIS_SECOND_SPLIT_UPDATE_EVENT = 300; -const MILLIS_MY_SEGMENTS_UPDATE_EVENT_NO_PAYLOAD = 400; -const MILLIS_SPLIT_KILL_EVENT = 500; -const MILLIS_NEW_CLIENT = 600; -const MILLIS_SECOND_SSE_OPEN = 700; -const MILLIS_MY_SEGMENTS_UPDATE_WITH_PAYLOAD = 800; -const MILLIS_MY_SEGMENTS_UPDATE_WITH_EMPTY_PAYLOAD = 900; -const MILLIS_MORE_CLIENTS = 1000; -const MILLIS_UNBOUNDED_FETCH = 1100; -const MILLIS_BOUNDED_FALLBACK = 1200; -const MILLIS_KEYLIST_FALLBACK = 1300; -const MILLIS_BOUNDED = 1400; -const MILLIS_KEYLIST = 1500; -const MILLIS_SEGMENT_REMOVAL = 1600; +const MILLIS_SPLIT_KILL_EVENT = 400; +const MILLIS_NEW_CLIENT = 500; +const MILLIS_SECOND_SSE_OPEN = 600; +const MILLIS_MORE_CLIENTS = 700; +const MILLIS_MEMBERSHIPS_MS_UPDATE_UNBOUNDED_FETCH = 800; +const MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED_FALLBACK = 900; +const MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST_FALLBACK = 1000; +const MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED = 1100; +const MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST = 1200; +const MILLIS_MEMBERSHIPS_MS_UPDATE_SEGMENT_REMOVAL = 1300; +const MILLIS_MEMBERSHIPS_LS_UPDATE_UNBOUNDED_FETCH = 1400; +const MILLIS_MEMBERSHIPS_LS_UPDATE_SEGMENT_REMOVAL = 1900; +const EXPECTED_DELAY_AND_BACKOFF = 241 + 100; /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth, SSE connection + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) * 0.2 secs: SPLIT_UPDATE event -> /splitChanges * 0.3 secs: SPLIT_UPDATE event with old changeNumber - * 0.4 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io - * 0.5 secs: SPLIT_KILL event -> /splitChanges - * 0.6 secs: creates a new client -> new auth and SSE connection - * 0.7 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) - * 0.8 secs: MY_SEGMENTS_UPDATE event for new client (with payload). - * 0.9 secs: MY_SEGMENTS_UPDATE event for new client (with empty payload). - * 1.0 secs: creates more clients - * 1.1 secs: MY_SEGMENTS_UPDATE_V2 UnboundedFetchRequest event. - * 1.2 secs: MY_SEGMENTS_UPDATE_V2 BoundedFetchRequest event error --> UnboundedFetchRequest. - * 1.3 secs: MY_SEGMENTS_UPDATE_V2 KeyList event error --> UnboundedFetchRequest. - * 1.4 secs: MY_SEGMENTS_UPDATE_V2 BoundedFetchRequest event. - * 1.5 secs: MY_SEGMENTS_UPDATE_V2 KeyList event. - * 1.6 secs: MY_SEGMENTS_UPDATE_V2 SegmentRemoval event. + * 0.4 secs: SPLIT_KILL event -> /splitChanges + * 0.5 secs: creates a new client -> new auth and SSE connection + * 0.6 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) + * 0.7 secs: creates more clients + * 0.8 secs: MEMBERSHIPS_MS_UPDATE UnboundedFetchRequest event. + * 0.9 secs: MEMBERSHIPS_MS_UPDATE BoundedFetchRequest event error --> UnboundedFetchRequest. + * 1.0 secs: MEMBERSHIPS_MS_UPDATE KeyList event error --> UnboundedFetchRequest. + * 1.1 secs: MEMBERSHIPS_MS_UPDATE BoundedFetchRequest event. + * 1.2 secs: MEMBERSHIPS_MS_UPDATE KeyList event. + * 1.3 secs: MEMBERSHIPS_MS_UPDATE SegmentRemoval event. + * 1.4 secs: MEMBERSHIPS_LS_UPDATE UnboundedFetchRequest event, with 241 ms delay for 'nicolas@split.io' (hash('nicolas@split.io') % 300) + * 1.641 secs: /memberships/* fetch due to unbounded MEMBERSHIPS_LS_UPDATE event, with an old changeNumber + * 1.741 secs: /memberships/* fetch due to unbounded MEMBERSHIPS_LS_UPDATE event, with the target changeNumber -> SDK_UPDATE event + * 1.9 secs: MEMBERSHIPS_LS_UPDATE SegmentRemoval event -> SPLIT_UPDATE event */ export function testSynchronization(fetchMock, assert) { - assert.plan(38); + // Force the backoff base of UpdateWorkers to reduce test time + Backoff.__TEST__BASE_MILLIS = 100; + assert.plan(34); fetchMock.reset(); let start, splitio, client, otherClient, keylistAddClient, keylistRemoveClient, bitmapTrueClient, sharedClients = []; // mock SSE open and message events setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'EventSource URL is the expected'); /* events on first SSE connection */ @@ -119,16 +120,6 @@ export function testSynchronization(fetchMock, assert) { eventSourceInstance.emitMessage(oldSplitUpdateMessage); }, MILLIS_SECOND_SPLIT_UPDATE_EVENT); // send a SPLIT_UPDATE event with an old changeNumber after 0.3 seconds - setTimeout(() => { - assert.equal(client.getTreatment('splitters'), 'off', 'evaluation with initial MySegments list'); - client.once(client.Event.SDK_UPDATE, () => { - const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_EVENT_NO_PAYLOAD), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event'); - assert.equal(client.getTreatment('splitters'), 'on', 'evaluation with updated MySegments list'); - }); - eventSourceInstance.emitMessage(mySegmentsUpdateMessageNoPayload); - }, MILLIS_MY_SEGMENTS_UPDATE_EVENT_NO_PAYLOAD); // send a MY_SEGMENTS_UPDATE event with a new changeNumber after 0.4 seconds - setTimeout(() => { assert.equal(client.getTreatment('whitelist'), 'allowed', 'evaluation with not killed Split'); const onUpdateCb = () => { @@ -146,7 +137,7 @@ export function testSynchronization(fetchMock, assert) { otherClient = splitio.client(otherUserKey); setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_MjE0MTkxOTU2Mg%3D%3D_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'new EventSource URL is the expected'); /* events on second SSE connection */ @@ -154,41 +145,6 @@ export function testSynchronization(fetchMock, assert) { eventSourceInstance.emitOpen(); }, MILLIS_SECOND_SSE_OPEN - MILLIS_NEW_CLIENT); // open new SSE connection - setTimeout(() => { - assert.equal(otherClient.getTreatment('qc_team'), 'no', 'evaluation with initial MySegments list (shared client)'); - otherClient.once(otherClient.Event.SDK_UPDATE, () => { - const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_WITH_PAYLOAD), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event (with payload)'); - assert.equal(otherClient.getTreatment('qc_team'), 'yes', 'evaluation with updated MySegments list (shared client)'); - }); - eventSourceInstance.emitMessage(mySegmentsUpdateMessageWithPayload); - }, MILLIS_MY_SEGMENTS_UPDATE_WITH_PAYLOAD - MILLIS_NEW_CLIENT); // send a MY_SEGMENTS_UPDATE event with payload after 0.1 seconds from new SSE connection opened - - setTimeout(() => { - assert.equal(otherClient.getTreatment('qc_team'), 'yes', 'evaluation with updated MySegments list (shared client)'); - otherClient.once(otherClient.Event.SDK_UPDATE, () => { - const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_WITH_EMPTY_PAYLOAD), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event (with empty payload)'); - assert.equal(otherClient.getTreatment('qc_team'), 'no', 'evaluation with re-updated MySegments list (shared client)'); - }); - - // assert that user error on callback is an Uncaught Exception - otherClient.once(otherClient.Event.SDK_UPDATE, () => { - const previousErrorHandler = window.onerror; - const exceptionHandler = err => { - if (includes(err, 'willThrowFor')) { - assert.pass(`User error on SDK_UPDATE callback should throw as Uncaught Exception: ${err}`); - } else { - assert.fail(err); - } - window.onerror = previousErrorHandler; - }; - window.onerror = exceptionHandler; - null.willThrowForUpdate(); - }); - eventSourceInstance.emitMessage(mySegmentsUpdateMessageWithEmptyPayload); - }, MILLIS_MY_SEGMENTS_UPDATE_WITH_EMPTY_PAYLOAD - MILLIS_NEW_CLIENT); // send a MY_SEGMENTS_UPDATE event with payload after 0.1 seconds from new SSE connection opened - setTimeout(() => { keylistAddClient = splitio.client(keylistAddKey); keylistRemoveClient = splitio.client(keylistRemoveKey); @@ -200,17 +156,17 @@ export function testSynchronization(fetchMock, assert) { setTimeout(() => { eventSourceInstance.emitMessage(unboundedMessage); - }, MILLIS_UNBOUNDED_FETCH - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_UNBOUNDED_FETCH - MILLIS_MORE_CLIENTS); setTimeout(() => { const malformedMessage = { ...boundedZlibMessage, data: boundedZlibMessage.data.replace('eJxiGAX4AMd', '').replace('1457552651000', '1457552650100') }; eventSourceInstance.emitMessage(malformedMessage); - }, MILLIS_BOUNDED_FALLBACK - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED_FALLBACK - MILLIS_MORE_CLIENTS); setTimeout(() => { const malformedMessage = { ...keylistGzipMessage, data: keylistGzipMessage.data.replace('H4sIAAAAAAA', '').replace('1457552652000', '1457552650200') }; eventSourceInstance.emitMessage(malformedMessage); - }, MILLIS_KEYLIST_FALLBACK - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST_FALLBACK - MILLIS_MORE_CLIENTS); setTimeout(() => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'on', 'off'], 'evaluation before bounded fetch'); @@ -218,7 +174,7 @@ export function testSynchronization(fetchMock, assert) { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'on', 'on'], 'evaluation after bounded fetch'); }); eventSourceInstance.emitMessage(boundedZlibMessage); - }, MILLIS_BOUNDED - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED - MILLIS_MORE_CLIENTS); setTimeout(() => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'on', 'on'], 'evaluation before keylist message'); @@ -230,30 +186,56 @@ export function testSynchronization(fetchMock, assert) { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'on', 'off', 'on'], 'evaluation after keylist message (removed key)'); }); eventSourceInstance.emitMessage(keylistGzipMessage); - }, MILLIS_KEYLIST - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST - MILLIS_MORE_CLIENTS); setTimeout(() => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'on', 'off', 'on'], 'evaluation before segment removal'); bitmapTrueClient.once(bitmapTrueClient.Event.SDK_UPDATE, () => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'off', 'off'], 'evaluation after segment removal'); + }); + + eventSourceInstance.emitMessage(segmentRemovalMessage); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS); + + setTimeout(() => { + assert.equal(client.getTreatment('in_large_segment'), 'no', 'evaluation before myLargeSegment fetch'); + + const timestampUnboundEvent = Date.now(); + + client.once(client.Event.SDK_UPDATE, () => { + assert.true(nearlyEqual(Date.now() - timestampUnboundEvent, EXPECTED_DELAY_AND_BACKOFF), 'SDK_UPDATE after fetching memberships with a delay'); + assert.equal(client.getTreatment('in_large_segment'), 'yes', 'evaluation after myLargeSegment fetch'); + }); + + eventSourceInstance.emitMessage(unboundedLSMessage); + }, MILLIS_MEMBERSHIPS_LS_UPDATE_UNBOUNDED_FETCH - MILLIS_MORE_CLIENTS); + + setTimeout(() => { + assert.equal(client.getTreatment('in_large_segment'), 'yes', 'evaluation before large segment removal'); + assert.deepEqual(sharedClients.map(c => c.getTreatment('in_large_segment')), ['no', 'no', 'no', 'no'], 'evaluation before large segment removal'); + + client.once(client.Event.SDK_UPDATE, () => { + assert.equal(client.getTreatment('in_large_segment'), 'no', 'evaluation after large segment removal'); // destroy shared clients and then main client Promise.all(sharedClients.map(c => c.destroy())) .then(() => { assert.equal(otherClient.getTreatment('whitelist'), 'control', 'evaluation returns control for shared client if it is destroyed'); - assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation returns correct tratment for main client'); + assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation returns correct treatment for main client'); assert.equal(eventSourceInstance.readyState, EventSourceMock.OPEN, 'streaming is still open'); client.destroy().then(() => { assert.equal(client.getTreatment('whitelist'), 'control', 'evaluation returns control for main client if it is destroyed'); assert.equal(eventSourceInstance.readyState, EventSourceMock.CLOSED, 'streaming is closed after destroy'); + + Backoff.__TEST__BASE_MILLIS = undefined; assert.end(); }); }); }); - eventSourceInstance.emitMessage(segmentRemovalMessage); - }, MILLIS_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS); + eventSourceInstance.emitMessage(segmentRemovalLSMessage); + }, MILLIS_MEMBERSHIPS_LS_UPDATE_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS); }); }, MILLIS_MORE_CLIENTS - MILLIS_NEW_CLIENT); @@ -265,7 +247,7 @@ export function testSynchronization(fetchMock, assert) { // initial auth let authParams = `users=${encodeURIComponent(userKey)}`; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&${authParams}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; @@ -273,7 +255,7 @@ export function testSynchronization(fetchMock, assert) { // reauth due to new client authParams += `&users=${encodeURIComponent(otherUserKey)}`; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&${authParams}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('second auth success'); return { status: 200, body: authPushEnabledNicolasAndMarcio }; @@ -281,90 +263,87 @@ export function testSynchronization(fetchMock, assert) { // reauth due to more clients authParams += `&users=${encodeURIComponent(keylistAddKey)}&users=${encodeURIComponent(keylistRemoveKey)}&users=${encodeURIComponent(bitmapTrueKey)}`; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), { status: 200, body: authPushEnabledNicolasAndMarcio }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&${authParams}`), { status: 200, body: authPushEnabledNicolasAndMarcio }); - // initial split and mySegments sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function (url, opts) { + // initial sync + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function (url, opts) { + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock1 }; + return { status: 200, body: membershipsNicolasMock2 }; }); - // split and segment sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function (url, opts) { + // sync all after SSE opened + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SSE_OPEN), 'sync after SSE connection is opened'); if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function (url, opts) { + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock1 }; + return { status: 200, body: membershipsNicolasMock2 }; }); // fetch due to SPLIT_UPDATE event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function (url, opts) { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must include `Cache-Control` header'); return { status: 200, body: splitChangesMock3 }; }); - // fetch due to first MY_SEGMENTS_UPDATE event - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function (url, opts) { - if (!hasNoCacheHeader(opts)) assert.fail('request must include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock2 }; - }); - // fetch due to SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function (url, opts) { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must include `Cache-Control` header'); assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation with split killed immediately, before fetch is done'); return { status: 200, body: splitChangesMock4 }; }); - // initial fetch of mySegments for new client - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), function (url, opts) { + // initial fetch of memberships for new client + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsMarcio }; + return { status: 200, body: membershipsMarcio }; }); - // split and mySegment sync after second SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552650000'), function (url, opts) { + // sync all after second SSE opened + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552650000'), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SECOND_SSE_OPEN), 'sync after second SSE connection is opened'); if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); return { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } }; }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 2 }, function (url, opts) { + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 2 }, function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock2 }; + return { status: 200, body: membershipsNicolasMock2 }; }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 2 }, function (url, opts) { + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 2 }, function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsMarcio }; + return { status: 200, body: membershipsMarcio }; }); - // 3 unbounded fetch requests - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 3 }, function (url, opts) { + + // 3 unbounded fetch for MEMBERSHIPS_MS_UPDATE + 1 unbounded fetch for MEMBERSHIPS_LS_UPDATE + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 4 }, function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock2 }; + return { status: 200, body: membershipsMarcio }; }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 3 }, function (url, opts) { + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 3 }, function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsMarcio }; + return { status: 200, body: membershipsNicolasMock2 }; }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: { ms: { k: [{ n: 'developers' }, { n: 'engineers' }] }, ls: { k: [], cn: 1457552640000 } } }); // not target changeNumber + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: { ms: { k: [{ n: 'developers' }, { n: 'engineers' }] }, ls: { k: [{ n: 'employees' }, { n: 'splitters' }], cn: 1457552650000 } } }); // target changeNumber - // initial fetch of mySegments for other clients + sync after third SSE opened + 3 unbounded fetch requests - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552650000'), { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } }); - fetchMock.get({ url: url(settings, '/mySegments/key1'), repeat: 5 }, { status: 200, body: { mySegments: [] } }); - fetchMock.get({ url: url(settings, '/mySegments/key3'), repeat: 5 }, { status: 200, body: { mySegments: [{ name: 'splitters' }] } }); - fetchMock.get({ url: url(settings, `/mySegments/${bitmapTrueKey}`), repeat: 5 }, { status: 200, body: { mySegments: [] } }); + // initial fetch of memberships for other clients + sync all after third SSE opened + 3 unbounded fetch for MEMBERSHIPS_MS_UPDATE + 1 unbounded fetch for MEMBERSHIPS_LS_UPDATE + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552650000'), { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } }); + fetchMock.get({ url: url(settings, '/memberships/key1'), repeat: 6 }, { status: 200, body: { ms: {} } }); + fetchMock.get({ url: url(settings, '/memberships/key3'), repeat: 6 }, { status: 200, body: { ms: { k: [{ n: 'splitters' }] } } }); + fetchMock.get({ url: url(settings, `/memberships/${bitmapTrueKey}`), repeat: 5 }, { status: 200, body: { ms: { k: [] } } }); // bounded fetch request - fetchMock.get(url(settings, `/mySegments/${bitmapTrueKey}`), { status: 200, body: { mySegments: [{ name: 'splitters' }] } }); + fetchMock.get(url(settings, `/memberships/${bitmapTrueKey}`), { status: 200, body: { ms: { k: [{ n: 'splitters' }] } } }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/readiness.spec.js b/src/__tests__/browserSuites/readiness.spec.js index 3b98d5132..f81fafccd 100644 --- a/src/__tests__/browserSuites/readiness.spec.js +++ b/src/__tests__/browserSuites/readiness.spec.js @@ -2,9 +2,9 @@ import { SplitFactory } from '../../'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; -// mocks for mySegments readiness tests +// mocks for memberships readiness tests import splitChangesStartWithoutSegmentsMock from '../mocks/splitchanges.real.json'; import splitChangesUpdateWithSegmentsMock from '../mocks/splitchanges.real.updateWithSegments.json'; import splitChangesUpdateWithoutSegmentsMock from '../mocks/splitchanges.real.updateWithoutSegments.json'; @@ -38,13 +38,13 @@ export default function (fetchMock, assert) { sdk: 'https://sdk.baseurl/readinessSuite1', events: 'https://events.baseurl/readinessSuite1' }; - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: mySegmentsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: membershipsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); const splitio = SplitFactory({ ...baseConfig, urls: testUrls @@ -62,18 +62,18 @@ export default function (fetchMock, assert) { }); }); - assert.test(t => { // Timeout test, we have retries but mySegmnets takes too long + assert.test(t => { // Timeout test, we have retries but memberships takes too long const testUrls = { sdk: 'https://sdk.baseurl/readinessSuite2', events: 'https://events.baseurl/readinessSuite2' }; - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: mySegmentsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: membershipsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); const splitio = SplitFactory({ ...baseConfig, urls: testUrls }); const client = splitio.client(); @@ -95,16 +95,16 @@ export default function (fetchMock, assert) { events: 'https://events.baseurl/readinessSuite3' }; - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); }); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); // Faster, it should get ready on the retry. }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: mySegmentsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: membershipsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); const splitio = SplitFactory({ ...baseConfig, urls: testUrls }); const client = splitio.client(); @@ -119,40 +119,40 @@ export default function (fetchMock, assert) { }); }); - /************** Now we will validate the intelligent mySegments pausing, which requires lots of code. Related code below. **************/ + /************** Now we will validate the intelligent memberships pausing, which requires lots of code. Related code below. **************/ localStorage.clear(); - const mySegmentsEndpointDelay = 450; + const membershipsEndpointDelay = 450; function mockForSegmentsPauseTest(testUrls, startWithSegments = false) { - let mySegmentsHits = 0; + let membershipsHits = 0; - fetchMock.get(new RegExp(`${testUrls.sdk}/mySegments/nicolas\\d?%40split.io`), function () { // Mock any mySegments call, so we can test with multiple clients. - mySegmentsHits++; - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: { mySegments: [] } }); }, mySegmentsEndpointDelay); }); + fetchMock.get(new RegExp(`${testUrls.sdk}/memberships/nicolas\\d?%40split.io`), function () { // Mock any memberships call, so we can test with multiple clients. + membershipsHits++; + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: { ms: {} } }); }, membershipsEndpointDelay); }); }); // Now mock the no more updates state - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552669999', { status: 200, body: { splits: [], since: 1457552669999, till: 1457552669999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552669999', { status: 200, body: { splits: [], since: 1457552669999, till: 1457552669999 } }); if (startWithSegments) { // Adjust since and till so the order is inverted. - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesStartWithSegmentsMock }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552620999, till: 1457552649999 } }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552649999', { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552649999, till: 1457552669999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesStartWithSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552620999, till: 1457552649999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552649999', { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552649999, till: 1457552669999 } }); } else { - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesStartWithoutSegmentsMock }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesUpdateWithSegmentsMock }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552649999', { status: 200, body: splitChangesUpdateWithoutSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesStartWithoutSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesUpdateWithSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552649999', { status: 200, body: splitChangesUpdateWithoutSegmentsMock }); } - return () => mySegmentsHits; + return () => membershipsHits; } assert.test(t => { // Testing how the SDK pauses/resumes segments synchronization. const testUrls = { - sdk: 'https://sdk.baseurl/readinessMySegmentsSuite', - events: 'https://events.baseurl/readinessMySegmentsSuite' + sdk: 'https://sdk.baseurl/readinessMembershipsSuite', + events: 'https://events.baseurl/readinessMembershipsSuite' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -174,47 +174,42 @@ export default function (fetchMock, assert) { let readyCount = 0; client2.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; // create a client on a different event-loop tick than client and client2. client3 = splitio.client('nicolas3@split.io'); client3.once(client3.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT - 1, 'mySegments should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT - 1, 'memberships should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); }, 2500); // Now we will wait until it picks up Splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s. client.once(client.Event.SDK_UPDATE, () => { - // This update came with segments, it should have tried to fetch mySegments for all used keys. + // This update came with segments, it should have tried to fetch memberships for all used keys. setTimeout(() => { - t.equal(getMySegmentsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments as soon as it received a new Split with segments.'); + t.equal(getMembershipsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships as soon as it received a new Split with segments.'); }, 0); - setTimeout(() => { // Nasty ugly crap to avoid listening to the update coming from mySegment calls. + setTimeout(() => { // Nasty ugly code to avoid listening to the update coming from membership calls. client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with no segments (removed the matcher we fetched on the previous one), it should stop the producer and not trigger more requests. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); t.equal(readyCount, CLIENTS_COUNT, 'all clients must be ready'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); - + splitio.destroy().then(() => { t.end(); }); }, 10000); }, 0); }); @@ -229,10 +224,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing how the SDK pauses/resumes segments synchronization in localstorage from scratch (no SDK_READY_FROM_CACHE). const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite', - events: 'https://events.baseurl/readinessLSMySegmentsSuite' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite', + events: 'https://events.baseurl/readinessLSMembershipsSuite' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -258,47 +253,42 @@ export default function (fetchMock, assert) { let readyCount = 0; client2.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; // create a client on a different event-loop tick than client and client2. client3 = splitio.client('nicolas3@split.io'); client3.once(client3.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT -1, 'mySegments should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT -1, 'memberships should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); }, 2500); // Now we will wait until it picks up Splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s. client.once(client.Event.SDK_UPDATE, () => { - // This update came with segments, it should have tried to fetch mySegments for all used keys. + // This update came with segments, it should have tried to fetch memberships for all used keys. setTimeout(() => { - t.equal(getMySegmentsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments as soon as it received a new Split with segments.'); + t.equal(getMembershipsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships as soon as it received a new Split with segments.'); }, 0); - setTimeout(() => { // Nasty ugly crap to avoid listening to the update coming from mySegment calls. + setTimeout(() => { // Nasty ugly code to avoid listening to the update coming from membership calls. client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with no segments (removed the matcher we fetched on the previous one), it should stop the producer and not trigger more requests. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); t.equal(readyCount, CLIENTS_COUNT, 'all clients must be ready'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); - + splitio.destroy().then(() => { t.end(); }); }, 10000); }, 0); }); @@ -313,10 +303,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing how the SDK pauses/resumes segments synchronization. const testUrls = { - sdk: 'https://sdk.baseurl/readinessMySegmentsSuite2', - events: 'https://events.baseurl/readinessMySegmentsSuite2' + sdk: 'https://sdk.baseurl/readinessMembershipsSuite2', + events: 'https://events.baseurl/readinessMembershipsSuite2' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, true); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, true); const start = Date.now(); const splitio = SplitFactory({ @@ -338,47 +328,42 @@ export default function (fetchMock, assert) { let readyCount = 0; client2.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'Shared client should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'Shared client should not be ready without waiting for memberships, as there are segments in the first splits payload.'); readyCount++; }); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, as there are segments in the first splits payload.'); readyCount++; // create a client on a different event-loop tick than client and client2. client3 = splitio.client('nicolas3@split.io'); client3.once(client3.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'Shared client should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'Shared client should not be ready without waiting for memberships, as there are segments in the first splits payload.'); readyCount++; }); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT - 1, 'mySegments should had been hit once per client on the first attempt (excluding one for client3) and keep syncing afterwards.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT - 1, 'memberships should had been hit once per client on the first attempt (excluding one for client3) and keep syncing afterwards.'); }, 2500); // Now we will wait until it picks up splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s (plus sync time). client.once(client.Event.SDK_UPDATE, () => { // This update came without segments, it should not trigger an extra fetch. setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT - 1, 'It should have stopped synchronizing mySegments since it transitioned to no segments state.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT - 1, 'It should have stopped synchronizing memberships since it transitioned to no segments state.'); }, 0); setTimeout(() => { client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with segments again, it should trigger a request ASAP and restart the producer. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 6 * CLIENTS_COUNT - 1, 'It should keep the producer synchronizing periodically..'); + t.equal(getMembershipsHits(), 6 * CLIENTS_COUNT - 1, 'It should keep the producer synchronizing periodically..'); t.equal(readyCount, CLIENTS_COUNT, 'all clients must be ready'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); - + splitio.destroy().then(() => { t.end(); }); }, 3000); }, 0); }); @@ -393,10 +378,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from scratch const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite2', - events: 'https://events.baseurl/readinessLSMySegmentsSuite2' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite2', + events: 'https://events.baseurl/readinessLSMembershipsSuite2' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, true); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, true); const start = Date.now(); const splitio = SplitFactory({ @@ -417,37 +402,32 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, as there are segments in the first splits payload.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and keep syncing afterwards.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and keep syncing afterwards.'); }, 2500); // Now we will wait until it picks up splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s (plus sync time). client.once(client.Event.SDK_UPDATE, () => { // This update came without segments, it should not trigger an extra fetch. setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'It should have stopped synchronizing mySegments since it transitioned to no segments state.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'It should have stopped synchronizing memberships since it transitioned to no segments state.'); }, 0); setTimeout(() => { client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with segments again, it should trigger a request ASAP and restart the producer. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 6 * CLIENTS_COUNT, 'It should keep the producer synchronizing periodically..'); - - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 6 * CLIENTS_COUNT, 'It should keep the producer synchronizing periodically..'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }, 0); }); @@ -462,10 +442,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from scratch with segments being previously used const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite3', - events: 'https://events.baseurl/readinessLSMySegmentsSuite3' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite3', + events: 'https://events.baseurl/readinessLSMembershipsSuite3' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, true); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, true); const start = Date.now(); const splitio = SplitFactory({ @@ -486,19 +466,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, when we start from cache it might be stale.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, when we start from cache it might be stale.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and keep syncing afterwards.'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and keep syncing afterwards.'); + splitio.destroy().then(() => { t.end(); }); }, 2500); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -509,10 +485,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache without segments being previously used, and first update has no segments. const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite4', - events: 'https://events.baseurl/readinessLSMySegmentsSuite4' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite4', + events: 'https://events.baseurl/readinessLSMembershipsSuite4' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -533,19 +509,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { t.ok(Date.now() - start < 50, 'It should be ready quickly, since it had no segments and update has no segments either.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt but stopped syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt but stopped syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 4500); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -556,14 +528,14 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache without segments being previously used, and first update HAS segments. const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite5', - events: 'https://events.baseurl/readinessLSMySegmentsSuite5' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite5', + events: 'https://events.baseurl/readinessLSMembershipsSuite5' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); - // I'm having the first update of Splits come with segments. In this scenario it'll wait for mySegments to download before being ready. - fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.1&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552669999, till: 1457552679999 } }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); + // I'm having the first update of Splits come with segments. In this scenario it'll wait for memberships to download before being ready. + fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.2&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552669999, till: 1457552679999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); const start = Date.now(); const splitio = SplitFactory({ @@ -584,19 +556,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { const delay = Date.now() - start; - t.ok(delay >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, when we start from cache it might be stale.'); + t.ok(delay >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, when we start from cache it might be stale.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt but stopped syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt but stopped syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -607,10 +575,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache with segments being previously used, and update is empty. const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite6', - events: 'https://events.baseurl/readinessLSMySegmentsSuite6' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite6', + events: 'https://events.baseurl/readinessLSMembershipsSuite6' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -631,19 +599,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, when we start from cache it might be stale and we had segments even though the update has nothing.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, when we start from cache it might be stale and we had segments even though the update has nothing.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and kept syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and kept syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -654,13 +618,13 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache with segments being previously used and first update removes segments const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite7', - events: 'https://events.baseurl/readinessLSMySegmentsSuite7' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite7', + events: 'https://events.baseurl/readinessLSMembershipsSuite7' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); - // I'm having the first update of Splits come without segments. In this scenario it'll NOT wait for mySegments to download before being ready. - fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.1&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552669999, till: 1457552679999 } }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); + // I'm having the first update of Splits come without segments. In this scenario it'll NOT wait for memberships to download before being ready. + fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.2&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552669999, till: 1457552679999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); const start = Date.now(); const splitio = SplitFactory({ @@ -681,19 +645,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'It should be ready without waiting for mySegments, since when it downloads changes it will have no more use for them.'); + t.ok(Date.now() - start < 50, 'It should be ready without waiting for memberships, since when it downloads changes it will have no more use for them.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and stopped syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and stopped syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { diff --git a/src/__tests__/browserSuites/ready-from-cache.spec.js b/src/__tests__/browserSuites/ready-from-cache.spec.js index 70eb2cda9..b0ffd6706 100644 --- a/src/__tests__/browserSuites/ready-from-cache.spec.js +++ b/src/__tests__/browserSuites/ready-from-cache.spec.js @@ -3,7 +3,7 @@ import { SplitFactory } from '../../'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; import { nearlyEqual } from '../testUtils'; @@ -83,8 +83,8 @@ const baseConfig = { streamingEnabled: false }; -const expectedHashNullFilter = '2a2c20bb'; // for SDK key '', filter query null, and flags spec version '1.1' -const expectedHashWithFilter = 'fdf7bd89'; // for SDK key '', filter query '&names=p1__split,p2__split', and flags spec version '1.1' +const expectedHashNullFilter = 'db8943b4'; // for SDK key '', filter query null, and flags spec version '1.2' +const expectedHashWithFilter = '7ccd6b31'; // for SDK key '', filter query '&names=p1__split,p2__split', and flags spec version '1.2' export default function (fetchMock, assert) { @@ -96,11 +96,11 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(3); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesMock1 }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: mySegmentsNicolas }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', { status: 200, body: { 'mySegments': [] } }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesMock1 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: membershipsNicolas }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', { status: 200, body: { 'ms': {} } }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', { status: 200, body: { 'ms': {} } }); const splitio = SplitFactory({ ...baseConfig, @@ -147,18 +147,18 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(12 * 2 + 3); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=25', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=25', function () { return new Promise(res => { setTimeout(() => res({ status: 200, body: { ...splitChangesMock1, since: 25 }, headers: {} }), 200); }); // 400ms is how long it'll take to reply with Splits, no SDK_READY should be emitted before that. }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: mySegmentsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: membershipsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 700); }); // Second client gets segments after 700ms + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 700); }); // Second client gets segments after 700ms }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 1000); }); // Third client mySegments will come after 1s + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 1000); }); // Third client memberships will come after 1s }); fetchMock.postOnce(testUrls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(testUrls.events + '/testImpressions/count', 200); @@ -231,7 +231,7 @@ export default function (fetchMock, assert) { Promise.all([client3.destroy(), client2.destroy(), client.destroy()]).then(() => { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_2.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); - t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_2.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last Split and MySegments fetch */), 'lastUpdated is added and must correspond to the timestamp of the last successfully fetched Splits'); + t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_2.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last splitChanges and memberships fetch */), 'lastUpdated is added and must correspond to the timestamp of the last successfully fetched Splits'); }); }); t.true(Date.now() - startTime >= 1000, 'It should emit SDK_READY too but after syncing with the cloud.'); @@ -239,11 +239,11 @@ export default function (fetchMock, assert) { }); client3.on(client3.Event.SDK_READY_TIMED_OUT, () => { client3.ready().catch(() => { - t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); - t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); }); @@ -255,21 +255,21 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(12 * 2 + 5); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=25', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=25', function () { t.equal(localStorage.getItem('readyFromCache_3.SPLITIO.split.always_on'), alwaysOnSplitInverted, 'feature flags must not be cleaned from cache'); return new Promise(res => { setTimeout(() => res({ status: 200, body: { ...splitChangesMock1, since: 25 }, headers: {} }), 200); }); // 400ms is how long it'll take to reply with Splits, no SDK_READY should be emitted before that. }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: mySegmentsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: membershipsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 700); }); // Second client gets segments after 700ms + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 700); }); // Second client gets segments after 700ms }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 1000); }); // Third client mySegments will come after 1s + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 1000); }); // Third client memberships will come after 1s }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas4%40split.io', { 'mySegments': [] }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas4%40split.io', { 'ms': {} }); fetchMock.postOnce(testUrls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(testUrls.events + '/testImpressions/count', 200); @@ -349,7 +349,7 @@ export default function (fetchMock, assert) { Promise.all([client3.destroy(), client2.destroy(), client.destroy()]).then(() => { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_3.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); - t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_3.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last Split and MySegments fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); + t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_3.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last Split and memberships fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); }); }); t.true(Date.now() - startTime >= 1000, 'It should emit SDK_READY too but after syncing with the cloud.'); @@ -357,11 +357,11 @@ export default function (fetchMock, assert) { }); client3.on(client3.Event.SDK_READY_TIMED_OUT, () => { client3.ready().catch(() => { - t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); - t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); }); @@ -372,21 +372,21 @@ export default function (fetchMock, assert) { }; localStorage.clear(); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_4.SPLITIO.hash'), expectedHashNullFilter, 'storage hash must not be changed'); t.equal(localStorage.length, 2, 'feature flags cache data must be cleaned from localStorage'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: mySegmentsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: membershipsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 700); }); // Second client gets segments after 700ms + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 700); }); // Second client gets segments after 700ms }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 1000); }); // Third client mySegments will come after 1s + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 1000); }); // Third client memberships will come after 1s }); fetchMock.postOnce(testUrls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(testUrls.events + '/testImpressions/count', 200); @@ -460,7 +460,7 @@ export default function (fetchMock, assert) { Promise.all([client3.destroy(), client2.destroy(), client.destroy()]).then(() => { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_4.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); - t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_4.SPLITIO.splits.lastUpdated')), Date.now() - 1000 /* 1000 ms between last Split and MySegments fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); + t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_4.SPLITIO.splits.lastUpdated')), Date.now() - 1000 /* 1000 ms between last Split and memberships fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); t.end(); }); @@ -470,11 +470,11 @@ export default function (fetchMock, assert) { }); client3.on(client3.Event.SDK_READY_TIMED_OUT, () => { client3.ready().catch(() => { - t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'control', 'It should not evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'control', 'It should not evaluate treatments with memberships data from cache.'); }); - t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'control', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'control', 'It should evaluate treatments with memberships data from cache.'); }); }); @@ -488,9 +488,8 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(7); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - // fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999&names=p1__split', { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_5.SPLITIO.splits.till', 25); @@ -541,8 +540,8 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(5); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); const splitio = SplitFactory({ ...baseConfig, @@ -585,10 +584,10 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(7); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=25&names=p2__split&prefixes=p1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: 25, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=25&names=p2__split&prefixes=p1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: 25, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); - const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.1' } }); + const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.2' } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_6.SPLITIO.splits.till', 25); localStorage.setItem('readyFromCache_6.SPLITIO.split.p1__split', JSON.stringify(splitDeclarations.p1__split)); @@ -636,10 +635,10 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(6); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&prefixes=p1,p2', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&prefixes=p1,p2', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); - const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&prefixes=p1,p2' }, flagSpecVersion: '1.1' } }); + const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&prefixes=p1,p2' }, flagSpecVersion: '1.2' } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_7.SPLITIO.splits.till', 25); localStorage.setItem('readyFromCache_7.SPLITIO.split.p1__split', JSON.stringify(splitDeclarations.p1__split)); @@ -702,8 +701,8 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(7); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_8.SPLITIO.splits.till', 25); @@ -756,14 +755,14 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(6); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2', { status: 200, body: { splits: [splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2', { status: 200, body: { splits: [splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_9.SPLITIO.splits.till', 25); localStorage.setItem('readyFromCache_9.SPLITIO.split.p1__split', JSON.stringify(splitDeclarations.p1__split)); localStorage.setItem('readyFromCache_9.SPLITIO.split.p2__split', JSON.stringify(splitDeclarations.p2__split)); - localStorage.setItem('readyFromCache_9.SPLITIO.hash', getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.1' } })); + localStorage.setItem('readyFromCache_9.SPLITIO.hash', getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.2' } })); const splitio = SplitFactory({ ...baseConfig, @@ -788,7 +787,7 @@ export default function (fetchMock, assert) { t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.split.p2__split'), JSON.stringify(splitDeclarations.p2__split), 'feature flag declarations must be cached'); t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.split.p3__split'), JSON.stringify(splitDeclarations.p3__split), 'feature flag declarations must be cached'); - t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.hash'), getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2' }, flagSpecVersion: '1.1' } }), 'Storage hash must correspond to the split filter query and SDK key'); + t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.hash'), getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2' }, flagSpecVersion: '1.2' } }), 'Storage hash must correspond to the split filter query and SDK key'); t.end(); }); }); diff --git a/src/__tests__/browserSuites/ready-promise.spec.js b/src/__tests__/browserSuites/ready-promise.spec.js index 56ac7922e..cb13c8107 100644 --- a/src/__tests__/browserSuites/ready-promise.spec.js +++ b/src/__tests__/browserSuites/ready-promise.spec.js @@ -13,7 +13,7 @@ const consoleSpy = { import { SplitFactory } from '../../'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; const baseConfig = { core: { @@ -59,9 +59,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' in both attempts - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -107,9 +107,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -157,9 +157,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -226,16 +226,16 @@ export default function readyPromiseAssertions(fetchMock, assert) { config.scheduler.featuresRefreshRate) - config.startup.readyTimeout) + refreshTimeMillis; // /splitChanges takes longer than 'requestTimeoutBeforeReady' in both initial attempts - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: refreshTimeMillis }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: refreshTimeMillis }); // main client endpoint configured to fetch segments before request timeout - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=1457552620999', { splits: [], since: 1457552620999, till: 1457552620999 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=1457552620999', { splits: [], since: 1457552620999, till: 1457552620999 }); // shared client endpoint configured to fetch segments immediately, in order to emit SDK_READY as soon as splits arrives - fetchMock.get(config.urls.sdk + '/mySegments/nicolas%40split.io', mySegmentsFacundo); + fetchMock.get(config.urls.sdk + '/memberships/nicolas%40split.io', membershipsFacundo); // shared client endpoint configured to emit SDK_READY_TIMED_OUT - fetchMock.get(config.urls.sdk + '/mySegments/emiliano%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.readyTimeout) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/emiliano%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.readyTimeout) + 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -264,7 +264,7 @@ export default function readyPromiseAssertions(fetchMock, assert) { const timeoutClient = splitio.client('emiliano@split.io'); timeoutClient.ready().then(undefined, () => { // setting onRejected handler via `then` method - t.pass('### Shared client TIMED OUT - promise rejected since mySegments fetch took more time than readyTimeout'); + t.pass('### Shared client TIMED OUT - promise rejected since memberships fetch took more time than readyTimeout'); timeoutClient.ready().catch(() => { // setting onRejected handler via `catch` method t.pass('### Shared client TIMED OUT - promise keeps being rejected'); timeoutClient.on(timeoutClient.Event.SDK_READY, () => { @@ -310,8 +310,8 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -359,9 +359,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { } }; - // Both /splitChanges and /mySegments take less than 'requestTimeoutBeforeReady' - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + // Both /splitChanges and /memberships take less than 'requestTimeoutBeforeReady' + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -407,9 +407,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -496,11 +496,11 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/nicolas%40split.io', mySegmentsFacundo); - fetchMock.get(config.urls.sdk + '/mySegments/emiliano%40split.io', mySegmentsFacundo); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/nicolas%40split.io', membershipsFacundo); + fetchMock.get(config.urls.sdk + '/memberships/emiliano%40split.io', membershipsFacundo); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -561,7 +561,7 @@ export default function readyPromiseAssertions(fetchMock, assert) { }); }, 0); }); - }, fromSecondsToMillis(0.2)); + }, fromSecondsToMillis(0.25)); }, 'Validate that warning messages are properly sent'); @@ -581,9 +581,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/nicolas%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/nicolas%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); diff --git a/src/__tests__/browserSuites/shared-instantiation.spec.js b/src/__tests__/browserSuites/shared-instantiation.spec.js index dddbe9fc6..9f9423ac9 100644 --- a/src/__tests__/browserSuites/shared-instantiation.spec.js +++ b/src/__tests__/browserSuites/shared-instantiation.spec.js @@ -14,9 +14,9 @@ const settings = settingsFactory({ * @param {boolean} sdkIgnoredTT whether the SDK ignores TT (i.e, clients without bound TT) or not (client with optional bound TT) */ export default function sharedInstantiationSuite(startWithTT, sdkIgnoresTT, fetchMock, assert) { - // mocking mySegments endpoints with delays for new clients - fetchMock.get(url(settings, '/mySegments/emiliano%2Fsplit.io'), { status: 200, body: { mySegments: [] } }, { delay: 100 }); - fetchMock.get(url(settings, '/mySegments/matias%25split.io'), { status: 200, body: { mySegments: [] } }, { delay: 200 }); + // mocking memberships endpoints with delays for new clients + fetchMock.get(url(settings, '/memberships/emiliano%2Fsplit.io'), { status: 200, body: { ms: {} } }, { delay: 100 }); + fetchMock.get(url(settings, '/memberships/matias%25split.io'), { status: 200, body: { ms: {} } }, { delay: 200 }); const factory = SplitFactory({ core: { diff --git a/src/__tests__/browserSuites/single-sync.spec.js b/src/__tests__/browserSuites/single-sync.spec.js index 19e46db52..a2d7c16a9 100644 --- a/src/__tests__/browserSuites/single-sync.spec.js +++ b/src/__tests__/browserSuites/single-sync.spec.js @@ -4,7 +4,7 @@ import { url } from '../testUtils'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.json'; const baseUrls = { sdk: 'https://sdk.single-sync/api', @@ -36,7 +36,7 @@ const settings = settingsFactory(config); export default function singleSync(fetchMock, assert) { - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { assert.pass('first splitChanges fetch'); return { status: 200, body: splitChangesMock1 }; }); @@ -45,13 +45,13 @@ export default function singleSync(fetchMock, assert) { return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { - assert.pass('first mySegments fetch'); - return { status: 200, body: mySegmentsNicolasMock2 }; + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { + assert.pass('first memberships fetch'); + return { status: 200, body: membershipsNicolasMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { - assert.fail('mySegments should not be called again'); - return { status: 200, body: mySegmentsNicolasMock2 }; + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { + assert.fail('memberships should not be called again'); + return { status: 200, body: membershipsNicolasMock2 }; }); let splitio, client = false; diff --git a/src/__tests__/browserSuites/telemetry.spec.js b/src/__tests__/browserSuites/telemetry.spec.js index d1f77d129..5ed7c331d 100644 --- a/src/__tests__/browserSuites/telemetry.spec.js +++ b/src/__tests__/browserSuites/telemetry.spec.js @@ -33,10 +33,10 @@ const SplitFactoryForTest = (config) => { export default async function telemetryBrowserSuite(fetchMock, t) { t.test(async (assert) => { - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', 500); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(baseUrls.sdk + '/mySegments/user-key', 500); - fetchMock.getOnce(baseUrls.sdk + '/mySegments/user-key', { status: 200, body: { 'mySegments': [ 'one_segment'] } }); + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', 500); + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(baseUrls.sdk + '/memberships/user-key', 500); + fetchMock.getOnce(baseUrls.sdk + '/memberships/user-key', { status: 200, body: { 'ms': { k: [{ n: 'one_segment' }] } } }); // We need to handle all requests properly fetchMock.postOnce(baseUrls.events + '/testImpressions/bulk', 200); @@ -59,13 +59,13 @@ export default async function telemetryBrowserSuite(fetchMock, t) { const data = JSON.parse(opts.body); // Validate last successful sync - assert.deepEqual(Object.keys(data.lS), ['ms', 'sp', 'te'], 'Successful splitChanges, mySegments and metrics/config requests'); + assert.deepEqual(Object.keys(data.lS), ['ms', 'sp', 'te'], 'Successful splitChanges, memberships and metrics/config requests'); lastSync = data.lS; delete data.lS; // Validate http and method latencies const getLatencyCount = buckets => buckets ? buckets.reduce((accum, entry) => accum + entry, 0) : 0; assert.equal(getLatencyCount(data.hL.sp), 2, 'Two latency metrics for splitChanges GET request'); - assert.equal(getLatencyCount(data.hL.ms), 2, 'Two latency metrics for mySegments GET request'); + assert.equal(getLatencyCount(data.hL.ms), 2, 'Two latency metrics for memberships GET request'); assert.equal(getLatencyCount(data.hL.te), 1, 'One latency metric for telemetry config POST request'); assert.equal(getLatencyCount(data.mL.t), 2, 'Two latency metrics for getTreatment (one not ready usage'); assert.equal(getLatencyCount(data.mL.ts), 1, 'One latency metric for getTreatments'); @@ -76,7 +76,7 @@ export default async function telemetryBrowserSuite(fetchMock, t) { // @TODO check if iDe value is correct assert.deepEqual(data, { - mE: {}, hE: { sp: { 500: 1 }, ms: { 500: 1 } }, tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 31, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: { sp: 0, ms: 0 } + mE: {}, hE: { sp: { 500: 1 }, ms: { 500: 1 } }, tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 32, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: {} }, 'metrics/usage JSON payload should be the expected'); finish.next(); @@ -96,7 +96,7 @@ export default async function telemetryBrowserSuite(fetchMock, t) { // @TODO check if iDe value is correct assert.deepEqual(data, { mL: {}, mE: {}, hE: {}, hL: {}, // errors and latencies were popped - tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 31, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: { sp: 0, ms: 0 } + tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 32, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: {} }, '2nd metrics/usage JSON payload should be the expected'); return 200; }); @@ -111,7 +111,7 @@ export default async function telemetryBrowserSuite(fetchMock, t) { oM: 0, st: 'memory', aF: 1, rF: 0, sE: false, rR: { sp: 99999, ms: 60, im: 300, ev: 60, te: 1 } /* override featuresRefreshRate */, uO: { s: true, e: true, a: false, st: false, t: true } /* override sdk, events and telemetry URLs */, - iQ: 30000, eQ: 500, iM: 0, iL: false, hP: false, nR: 1 /* 1 non ready usage */, t: [], i: [], uC: 2 /* Default GRANTED */, + iQ: 30000, eQ: 500, iM: 0, iL: false, hP: false, nR: 1 /* 1 non ready usage */, t: [], uC: 2 /* Default GRANTED */, fsT: 0, fsI: 0 }, 'metrics/config JSON payload should be the expected'); @@ -187,8 +187,8 @@ export default async function telemetryBrowserSuite(fetchMock, t) { let factory; const splitFilters = [{ type: 'bySet', values: ['a', '_b', 'a', 'a', 'c', 'd', '_d'] }]; - fetchMock.get(baseUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1&sets=a,c,d', { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.get(baseUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1&sets=a,c,d', { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); fetchMock.postOnce(baseUrls.telemetry + '/v1/metrics/config', (url, opts) => { const data = JSON.parse(opts.body); @@ -202,10 +202,10 @@ export default async function telemetryBrowserSuite(fetchMock, t) { fetchMock.postOnce(baseUrls.telemetry + '/v1/metrics/usage', (url, opts) => { const data = JSON.parse(opts.body); - assert.deepEqual(data.mL.tf, [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], 'Latencies stats'); - assert.deepEqual(data.mL.tfs, [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], 'Latencies stats'); - assert.deepEqual(data.mL.tcf, [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], 'Latencies stats'); - assert.deepEqual(data.mL.tcfs, [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], 'Latencies stats'); + assert.deepEqual(data.mL.tf, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'Latencies stats'); + assert.deepEqual(data.mL.tfs, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'Latencies stats'); + assert.deepEqual(data.mL.tcf, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'Latencies stats'); + assert.deepEqual(data.mL.tcfs, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'Latencies stats'); factory.client().destroy().then(() => { assert.end(); @@ -213,14 +213,14 @@ export default async function telemetryBrowserSuite(fetchMock, t) { return 200; }); - fetchMock.postOnce(baseUrls.telemetry + '/v1/metrics/usage', 200); + fetchMock.postOnce(baseUrls.telemetry + '/v1/metrics/usage', 200); - factory = SplitFactoryForTest({...baseConfig, sync: {splitFilters}}); + factory = SplitFactoryForTest({ ...baseConfig, sync: { splitFilters } }); const client = factory.client(); - assert.deepEqual(client.getTreatmentsByFlagSet('a'),[]); - assert.deepEqual(client.getTreatmentsByFlagSets(['a']),[]); - assert.deepEqual(client.getTreatmentsWithConfigByFlagSet('a'),[]); - assert.deepEqual(client.getTreatmentsWithConfigByFlagSets(['a']),[]); + assert.deepEqual(client.getTreatmentsByFlagSet('a'), []); + assert.deepEqual(client.getTreatmentsByFlagSets(['a']), []); + assert.deepEqual(client.getTreatmentsWithConfigByFlagSet('a'), []); + assert.deepEqual(client.getTreatmentsWithConfigByFlagSets(['a']), []); }, 'SDK with sets configured has sets information in config POST and evaluation by sets telemetry in stats POST'); diff --git a/src/__tests__/browserSuites/use-beacon-api.debug.spec.js b/src/__tests__/browserSuites/use-beacon-api.debug.spec.js index 2dede6e28..7de576e6c 100644 --- a/src/__tests__/browserSuites/use-beacon-api.debug.spec.js +++ b/src/__tests__/browserSuites/use-beacon-api.debug.spec.js @@ -2,7 +2,7 @@ import sinon from 'sinon'; import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; import { url } from '../testUtils'; import { triggerPagehideEvent, triggerVisibilitychange } from '../testUtils/browser'; @@ -66,9 +66,9 @@ function beaconApiNotSendTestDebug(fetchMock, assert) { sendBeaconSpyDebug = sinon.spy(window.navigator, 'sendBeacon'); // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); // Init and run Split client const splitio = SplitFactory(config); diff --git a/src/__tests__/browserSuites/use-beacon-api.spec.js b/src/__tests__/browserSuites/use-beacon-api.spec.js index 9065cfd61..bde426077 100644 --- a/src/__tests__/browserSuites/use-beacon-api.spec.js +++ b/src/__tests__/browserSuites/use-beacon-api.spec.js @@ -2,7 +2,7 @@ import sinon from 'sinon'; import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { url } from '../testUtils'; import { OPTIMIZED } from '@splitsoftware/splitio-commons/src/utils/constants'; import { triggerPagehideEvent, triggerVisibilitychange } from '../testUtils/browser'; @@ -78,9 +78,9 @@ function beaconApiNotSendTest(fetchMock, assert) { sendBeaconSpy = sinon.spy(window.navigator, 'sendBeacon'); // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); // Init and run Split client const splitio = SplitFactory(config); diff --git a/src/__tests__/browserSuites/user-consent.spec.js b/src/__tests__/browserSuites/user-consent.spec.js index 024fcb44d..ed1caba2c 100644 --- a/src/__tests__/browserSuites/user-consent.spec.js +++ b/src/__tests__/browserSuites/user-consent.spec.js @@ -72,7 +72,7 @@ function mockSubmittersRequests(fetchMock, assert, impressionFeature, eventTypeI export default function userConsent(fetchMock, t) { - // Validate trackers, submitters and browser listener behaviour on different consent status transitions + // Validate trackers, submitters and browser listener behavior on different consent status transitions t.test(async (assert) => { const sendBeaconSpy = sinon.spy(window.navigator, 'sendBeacon'); let expectedTrackedImpressions = 0; @@ -97,7 +97,7 @@ export default function userConsent(fetchMock, t) { ], ['on', 'on', 'on', 'on', 'on', 'on', 'on', 'on'], 'evaluating on SDK ready'); if (isTracking) expectedTrackedImpressions += 8; - // Trigger pagehide event to validate browser listener behaviour + // Trigger pagehide event to validate browser listener behavior // Beacon API is used only if user consent is GRANTED triggerPagehideEvent(); if (factory.UserConsent.getStatus() === factory.UserConsent.Status.GRANTED) { @@ -137,9 +137,9 @@ export default function userConsent(fetchMock, t) { assert.equal(trackedImpressions.length, expectedTrackedImpressions, 'Tracked impressions are the expected'); sendBeaconSpy.restore(); assert.end(); - }, 'Validate trackers, submitters and browser listener behaviour on different consent status transitions'); + }, 'Validate trackers, submitters and browser listener behavior on different consent status transitions'); - // Validate submitter's behaviour with full queues and with events first push window + // Validate submitter's behavior with full queues and with events first push window t.test(async (assert) => { const config = { ...baseConfig, @@ -182,6 +182,6 @@ export default function userConsent(fetchMock, t) { await client.destroy(); assert.end(); - }, 'Validate submitter\'s behaviour with full queues and with events first push window'); + }, 'Validate submitter\'s behavior with full queues and with events first push window'); } diff --git a/src/__tests__/destroy/browser.spec.js b/src/__tests__/destroy/browser.spec.js index 5cbf88fa5..88afc744d 100644 --- a/src/__tests__/destroy/browser.spec.js +++ b/src/__tests__/destroy/browser.spec.js @@ -8,7 +8,7 @@ import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitChanges.since.-1.till.1500492097547.json'; import splitChangesMock2 from '../mocks/splitChanges.since.1500492097547.json'; -import mySegmentsMock from '../mocks/mySegmentsEmpty.json'; +import membershipsMock from '../mocks/membershipsEmpty.json'; import impressionsMock from '../mocks/impressions.json'; const settings = settingsFactory({ @@ -18,12 +18,12 @@ const settings = settingsFactory({ streamingEnabled: false }); -fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); -fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1500492097547'), { status: 200, body: splitChangesMock2 }); -fetchMock.getOnce(url(settings, '/mySegments/ut1'), { status: 200, body: mySegmentsMock }); -fetchMock.getOnce(url(settings, '/mySegments/ut2'), { status: 200, body: mySegmentsMock }); -fetchMock.getOnce(url(settings, '/mySegments/ut3'), { status: 200, body: mySegmentsMock }); -fetchMock.getOnce(url(settings, '/mySegments/ut4'), { status: 200, body: mySegmentsMock }); +fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); +fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1500492097547'), { status: 200, body: splitChangesMock2 }); +fetchMock.getOnce(url(settings, '/memberships/ut1'), { status: 200, body: membershipsMock }); +fetchMock.getOnce(url(settings, '/memberships/ut2'), { status: 200, body: membershipsMock }); +fetchMock.getOnce(url(settings, '/memberships/ut3'), { status: 200, body: membershipsMock }); +fetchMock.getOnce(url(settings, '/memberships/ut4'), { status: 200, body: membershipsMock }); fetchMock.postOnce(url(settings, '/v1/metrics/config'), 200); // 0.1% sample rate tape('SDK destroy for BrowserJS', async function (assert) { diff --git a/src/__tests__/errorCatching/browser.spec.js b/src/__tests__/errorCatching/browser.spec.js index c5719ed3b..4ad0c8222 100644 --- a/src/__tests__/errorCatching/browser.spec.js +++ b/src/__tests__/errorCatching/browser.spec.js @@ -5,7 +5,7 @@ import includes from 'lodash/includes'; import fetchMock from '../testUtils/fetchMock'; import { url } from '../testUtils'; import splitChangesMock1 from '../mocks/splitChanges.since.-1.till.1500492097547.json'; -import mySegmentsMock from '../mocks/mySegmentsEmpty.json'; +import membershipsMock from '../mocks/membershipsEmpty.json'; import splitChangesMock2 from '../mocks/splitChanges.since.1500492097547.till.1500492297547.json'; import splitChangesMock3 from '../mocks/splitChanges.since.1500492297547.json'; import { SplitFactory } from '../../'; @@ -21,14 +21,14 @@ const settings = settingsFactory({ // prepare localstorage to emit SDK_READY_FROM_CACHE localStorage.clear(); localStorage.setItem('SPLITIO.splits.till', 25); -localStorage.setItem('SPLITIO.hash', getStorageHash({ core: { authorizationKey: '' }, sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.1' } })); +localStorage.setItem('SPLITIO.hash', getStorageHash({ core: { authorizationKey: '' }, sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.2' } })); -fetchMock.get(url(settings, '/splitChanges?s=1.1&since=25'), function () { +fetchMock.get(url(settings, '/splitChanges?s=1.2&since=25'), function () { return new Promise((res) => { setTimeout(() => res({ status: 200, body: splitChangesMock1 }), 1000); }); }); -fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1500492097547'), { status: 200, body: splitChangesMock2 }); -fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1500492297547'), { status: 200, body: splitChangesMock3 }); -fetchMock.get(url(settings, '/mySegments/nico%40split.io'), { status: 200, body: mySegmentsMock }); +fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1500492097547'), { status: 200, body: splitChangesMock2 }); +fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1500492297547'), { status: 200, body: splitChangesMock3 }); +fetchMock.get(url(settings, '/memberships/nico%40split.io'), { status: 200, body: membershipsMock }); fetchMock.post('*', 200); const assertionsPlanned = 4; diff --git a/src/__tests__/gaIntegration/both-integrations.spec.js b/src/__tests__/gaIntegration/both-integrations.spec.js deleted file mode 100644 index 17b4ae736..000000000 --- a/src/__tests__/gaIntegration/both-integrations.spec.js +++ /dev/null @@ -1,138 +0,0 @@ -import { SplitFactory } from '../../'; -import { settingsFactory } from '../../settings'; -import { gaSpy, gaTag } from './gaTestUtils'; -import includes from 'lodash/includes'; -import { DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; -import { url } from '../testUtils'; - -function countImpressions(parsedImpressionsBulkPayload) { - return parsedImpressionsBulkPayload - .reduce((accumulator, currentValue) => { return accumulator + currentValue.i.length; }, 0); -} - -const config = { - core: { - key: 'facundo@split.io', - trafficType: 'user', - }, - integrations: [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - }, { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - }], - streamingEnabled: false, - sync: { - impressionsMode: DEBUG, - } -}; -const settings = settingsFactory(config); - -export default function (fetchMock, assert) { - - let client; - - // test default behavior of both integrations - assert.test(t => { - const customHits = [{ hitType: 'pageview' }, { hitType: 'event' }]; - - /* [eventType] */ - const splitTrackParams = [['some_event'], ['other_event'], ['another_event']]; - /* [splitName] */ - const splitGetTreatmentParams = [['hierarchical_splits_test']]; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - const totalHits = customHits.length + splitTrackParams.length + splitGetTreatmentParams.length; - - t.equal(window.gaSpy.getHits().length, totalHits, 'Total hits'); - setTimeout(() => { - client.destroy(); - t.end(); - }, 0); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - // we can assert payload and ga hits, once ga is ready and after `SplitToGa.queue`, that is timeout wrapped, make to the queue stack. - window.ga(() => { - setTimeout(() => { - try { - const resp = JSON.parse(opts.body); - const numberOfSentImpressions = countImpressions(resp); - const sentImpressionHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(numberOfSentImpressions, splitGetTreatmentParams.length, 'Number of impressions'); - t.equal(sentImpressionHits.length, splitGetTreatmentParams.length, `Number of sent impression hits must be equal to the number of impressions (${splitGetTreatmentParams.length})`); - - finish.next(); - } catch (err) { - console.error(err); - } - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - window.ga(() => { - setTimeout(() => { - try { - const sentEvents = JSON.parse(opts.body); - const sentEventsFromSplitToGa = sentEvents.filter(event => { - return event.properties && event.properties.eventCategory && includes(event.properties.eventCategory, 'split'); - }); - - t.equal(sentEvents.length, splitTrackParams.length + customHits.length, 'Number of sent events is equal to custom events plus hits tracked as events'); - t.equal(sentEventsFromSplitToGa.length, 0, 'GA hits comming from Split-to-GA integration must not be tracked again as Split events'); - - const sentHitsNoSplitData = window.gaSpy.getHits().filter(hit => !hit.eventCategory || !includes(hit.eventCategory, 'split')); - const sentHitsSplitEvents = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-event'); - - t.equal(sentHitsNoSplitData.length, customHits.length, 'Number of custom hits'); - t.equal(sentHitsSplitEvents.length, splitTrackParams.length, 'Number of Split event hits'); - finish.next(); - } catch (err) { - console.error(err); - } - }); - }); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('require', 'splitTracker'); - customHits.forEach(hit => { - window.ga('send', hit); - }); - - const factory = SplitFactory({ - ...config, - startup: { - eventsFirstPushWindow: 0, - }, - scheduler: { - impressionsRefreshRate: 1, - // @TODO eventsPushRate is too high, but using eventsQueueSize don't let us assert `filterSplitToGaHits` - eventsPushRate: 10, - // eventsQueueSize: splitTrackParams.length + customHits.length, - }, - }); - client = factory.client(); - - client.ready().then(() => { - splitTrackParams.forEach(trackParams => { - client.track.apply(client, trackParams); - }); - splitGetTreatmentParams.forEach(getTreatmentParams => { - client.getTreatment.apply(client, getTreatmentParams); - }); - }); - }); - -} diff --git a/src/__tests__/gaIntegration/browser.spec.js b/src/__tests__/gaIntegration/browser.spec.js deleted file mode 100644 index c97a2922b..000000000 --- a/src/__tests__/gaIntegration/browser.spec.js +++ /dev/null @@ -1,31 +0,0 @@ -import tape from 'tape-catch'; -import fetchMock from '../testUtils/fetchMock'; -import { url } from '../testUtils'; -import gaToSplitSuite from './ga-to-split.spec'; -import splitToGaSuite from './split-to-ga.spec'; -import bothIntegrationsSuite from './both-integrations.spec'; - -import { settingsFactory } from '../../settings'; - -import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; - -const settings = settingsFactory({ - core: { - key: 'facundo@split.io' - } -}); - -tape('## E2E CI Tests ##', function (assert) { - - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); - fetchMock.post(/\/v1\/metrics/, 200); // 0.1% sample rate - - /* Validate GA integration */ - assert.test('E2E / GA-to-Split', gaToSplitSuite.bind(null, fetchMock)); - assert.test('E2E / Split-to-GA', splitToGaSuite.bind(null, fetchMock)); - assert.test('E2E / Both GA integrations', bothIntegrationsSuite.bind(null, fetchMock)); - - assert.end(); -}); diff --git a/src/__tests__/gaIntegration/ga-to-split.spec.js b/src/__tests__/gaIntegration/ga-to-split.spec.js deleted file mode 100644 index cc8e8764f..000000000 --- a/src/__tests__/gaIntegration/ga-to-split.spec.js +++ /dev/null @@ -1,461 +0,0 @@ -import sinon from 'sinon'; -import { SplitFactory } from '../../'; -import { settingsFactory } from '../../settings'; -import { gaSpy, gaTag, addGaTag, removeGaTag } from './gaTestUtils'; -import { url } from '../testUtils'; - -const config = { - core: { - key: 'facundo@split.io', - trafficType: 'user', - }, - integrations: [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - }], - startup: { - eventsFirstPushWindow: 0.2, - }, - streamingEnabled: false -}; -const settings = settingsFactory(config); - -export default function (fetchMock, assert) { - - let client; - - // test default behavior on default tracker - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits(); - - t.equal(resp.length, sentHits.length, `Number of sent hits must be equal to sent events (${resp.length})`); - t.equal(resp[0].key, settings.core.key, 'Event key is same that SDK config key'); - t.equal(resp[0].trafficTypeName, settings.core.trafficType, 'Event trafficTypeName is same that SDK config key'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('require', 'splitTracker'); - window.ga('send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - - }); - - // test default behavior on named tracker, tracking N events, and GA in a different global variable - assert.test(t => { - const numberOfCustomEvents = 5; - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits('myTracker'); - - t.equal(resp.length, sentHits.length, `Number of sent hits must be equal to sent events (${resp.length})`); - t.equal(resp[0].key, settings.core.key, 'Event key is same that SDK config key'); - t.equal(resp[0].trafficTypeName, settings.core.trafficType, 'Event trafficTypeName is same that SDK config key'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag('other_location_for_ga'); - - window.other_location_for_ga('create', 'UA-00000001-1', 'example1.com', 'myTracker', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker']); - - const factory = SplitFactory(config); - client = factory.client(); - - window.other_location_for_ga('myTracker.require', 'splitTracker'); - // this second 'require' is not applied (does not overwrite previous command) - window.other_location_for_ga('myTracker.require', 'splitTracker', { mapper: function () { throw 'error'; } }); - - for (let i = 0; i < numberOfCustomEvents; i++) - window.other_location_for_ga('myTracker.send', 'pageview'); - - }); - - // test error: no TT in SDK config - assert.test(t => { - const numberOfCustomEvents = 5; - - gaTag(); - - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - const logSpy = sinon.spy(console, 'log'); - - const factory = SplitFactory({ - ...config, - core: { key: config.core.key }, - debug: true, - }); - - window.ga('require', 'splitTracker'); - for (let i = 0; i < numberOfCustomEvents; i++) - window.ga('send', 'pageview'); - - // We must wait until ga is ready to get SplitTracker required and invoked, and to assert the test - window.ga(() => { - t.ok(logSpy.calledWith('[WARN] splitio => ga-to-split: No valid identities were provided. Please check that you are passing a valid list of identities or providing a traffic type at the SDK configuration.')); - t.equal(window.gaSpy.getHits().length, numberOfCustomEvents, `Number of sent hits must be equal to ${numberOfCustomEvents}`); - - logSpy.restore(); - t.end(); - }); - - factory.client().destroy(); - - }); - - // test default behavior, providing a list of identities as SDK options - assert.test(t => { - const numberOfCustomEvents = 3; - const identities = [{ key: 'user1', trafficType: 'user' }, { key: 'user2', trafficType: 'user' }]; - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits('myTracker3'); - - t.equal(sentHits.length, numberOfCustomEvents, `Number of sent hits must be equal to sent custom events (${numberOfCustomEvents})`); - t.equal(resp.length, numberOfCustomEvents * identities.length, 'The number of sent events must be equal to the number of sent hits multiply by the number of identities'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000003-1', 'example3.com', 'myTracker3', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker3']); - - const factory = SplitFactory({ - ...config, - core: { key: config.core.key }, - integrations: [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - identities: identities, - }], - }); - client = factory.client(); - - window.ga('myTracker3.require', 'splitTracker'); - for (let i = 0; i < numberOfCustomEvents; i++) - window.ga('myTracker3.send', 'pageview'); - - }); - - - // test default behavior in multiple trackers, providing a list of identities in plugin options for one tracker and in sdk options for another - assert.test(t => { - const identitiesPluginOpts = [{ key: 'user1', trafficType: 'user' }, { key: 'user2', trafficType: 'user' }]; - const identitiesSdkOpts = [{ key: 'user3', trafficType: 'user' }]; - const gaSendIterations = 3; - const expectedNumberOfSplitEvents = gaSendIterations * (identitiesPluginOpts.length + identitiesSdkOpts.length); - - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - t.equal(resp.length, expectedNumberOfSplitEvents, 'The number of sent Split events must be equal to the number of sent hits multiply by the number of identities'); - - const sentHitsTracker4 = window.gaSpy.getHits('myTracker4'); - const sentHitsTracker5 = window.gaSpy.getHits('myTracker5'); - - t.equal(sentHitsTracker4.length, gaSendIterations, `Number of sent hits must be equal to the times 'send' command was invoked (${gaSendIterations})`); - t.equal(sentHitsTracker5.length, gaSendIterations, `Number of sent hits must be equal to the times 'send' command was invoked (${gaSendIterations})`); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000004-1', 'example4.com', 'myTracker4', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000005-1', 'example5.com', 'myTracker5', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker4', 'myTracker5']); - - const factory = SplitFactory({ - ...config, - core: { key: config.core.key }, - integrations: [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - identities: identitiesSdkOpts, - }], - }); - client = factory.client(); - - window.ga('myTracker4.require', 'splitTracker', { identities: identitiesPluginOpts }); - window.ga('myTracker5.require', 'splitTracker'); - - for (let i = 0; i < gaSendIterations; i++) { - window.ga('myTracker4.send', 'pageview'); - window.ga('myTracker5.send', 'event', 'mycategory', 'myaction'); - } - - }); - - // test custom filter and mapper in multiple trackers, passed as plugin options for one tracker and as sdk options for another - assert.test(t => { - const gaSendIterations = 3; - const prefixPluginOpts = 'plugin'; - const prefixSdkOpts = 'sdk'; - - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - t.equal(resp.length, gaSendIterations * 2, 'The number of sent Split events must be equal to the number of no filtered sent hits'); - t.equal(resp.filter(event => event.eventTypeId === prefixSdkOpts + '.mapperSdkOpts').length, gaSendIterations, 'Custom Split events'); - t.equal(resp.filter(event => event.eventTypeId === prefixPluginOpts + '.mapperPluginOpts').length, gaSendIterations, 'Custom Split events'); - - const sentHitsTracker4 = window.gaSpy.getHits('myTracker4'); - const sentHitsTracker5 = window.gaSpy.getHits('myTracker5'); - - t.equal(sentHitsTracker4.length, gaSendIterations * 2, 'Number of sent hits must be equal to the times `send` command was invoked'); - t.equal(sentHitsTracker5.length, gaSendIterations * 2, 'Number of sent hits must be equal to the times `send` command was invoked'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000004-1', 'example4.com', 'myTracker4', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000005-1', 'example5.com', 'myTracker5', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker4', 'myTracker5']); - - const factory = SplitFactory({ - ...config, - integrations: [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - filter: model => model.get('hitType') === 'pageview', // accepts only pageviews - mapper: () => ({ eventTypeId: 'mapperSdkOpts' }), // return a fixed event instance - prefix: prefixSdkOpts, - }], - }); - client = factory.client(); - - window.ga('myTracker4.require', 'splitTracker', { - filter: model => model.get('hitType') === 'event', // accepts only events - mapper: (model, defaultEvent) => ({ ...defaultEvent, eventTypeId: 'mapperPluginOpts' }), // updates the eventTypeId of default event - prefix: prefixPluginOpts, - }); - window.ga('myTracker5.require', 'splitTracker'); - - for (let i = 0; i < gaSendIterations; i++) { - window.ga('myTracker4.send', 'pageview'); - window.ga('myTracker5.send', 'pageview'); - window.ga('myTracker4.send', 'event', 'mycategory', 'myaction'); - window.ga('myTracker5.send', 'event', 'mycategory', 'myaction'); - } - - }); - - // exception in custom mapper or invalid mapper result must not block sending hits - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - t.equal(resp.length, 1, 'only a custom event is sent. no events associated to ga hit'); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000001-1', 'example1.com', 'myTracker', { siteSpeedSampleRate: 0 }); - - gaSpy(['t0', 'myTracker']); - - window.ga('require', 'splitTracker', { mapper: function () { throw 'error'; } }); - // this second 'require' is not applied (it does not overwrite previous command) - window.ga('require', 'splitTracker'); - - window.ga('myTracker.require', 'splitTracker', { mapper: function () { return { value: 'invalid value' }; } }); - - const logSpy = sinon.spy(console, 'log'); - - window.ga('send', 'pageview'); - window.ga('myTracker.send', 'pageview'); - - const factory = SplitFactory(config); - factory.Logger.enable(); // Enable debug logs. Equivalent to `config.debug` true - client = factory.client(); - client.track('some_event'); - - setTimeout(() => { - const sentHitsT0 = window.gaSpy.getHits('t0'); - const sentHitsMyTracker = window.gaSpy.getHits('myTracker'); - t.equal(sentHitsT0.length, 1, 'Hits must be sent even if a custom mapper throw an exception'); - t.equal(sentHitsMyTracker.length, 1, 'Hits must be sent even if a custom mapper return an invalid event instance'); - t.ok(logSpy.calledWith('[ERROR] splitio => ga-to-split:mapper: value must be a finite number.')); - client.destroy(); - logSpy.restore(); - t.end(); - }); - - }); - - // test default behavior on default tracker: Split ready before GA init, and keep sending hits after Split destroyed - assert.test(t => { - const hits = [{ hitType: 'pageview' }, { hitType: 'event' }]; - const hitsAfterDestroyed = [{ hitType: 'screenview' }]; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits(); - - t.equal(resp.length, sentHits.length, `Number of sent hits must be equal to sent events (${hits.length})`); - t.equal(resp.length, hits.length, `Number of sent hits must be equal to sent events (${hits.length})`); - - setTimeout(() => { - client.destroy().then(() => { - hitsAfterDestroyed.forEach(hit => window.ga('send', hit)); - setTimeout(() => { - t.equal(sentHits.length, hits.length + hitsAfterDestroyed.length, 'sending hits must not be bloqued if Split SDK is destroyed'); - t.end(); - }); - }); - }); - return 200; - }); - - removeGaTag(); - - const factory = SplitFactory({ - ...config, - startup: { - eventsFirstPushWindow: 1000, - }, - scheduler: { - eventsQueueSize: hits.length, - } - }); - client = factory.client(); - - client.ready().then(() => { - addGaTag(); - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - gaSpy(); - - window.ga('require', 'splitTracker'); - hits.forEach(hit => window.ga('send', hit)); - - }); - }); - - // test `hits` flag - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits(); - - t.equal(resp.filter(event => event.eventTypeId === 'ga.pageview').length, 0, 'No events associated to GA hits must be sent'); - t.equal(resp.filter(event => event.eventTypeId === 'some_event').length, 1, 'Tracked events must be sent to Split'); - t.equal(sentHits.length, 1, 'Hits must be sent to GA'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('require', 'splitTracker', { hits: false }); - window.ga('send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - client.track('some_event'); - - }); - - // test 'autoRequire' script placed right after GA script tag. - // We get same result if it is placed right before, and also applies for Universal Analytics configured with GTM and gtag.js tags. - // If it is executed asynchronously, trackers creation might be missed. - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHitsTracker1 = window.gaSpy.getHits('tracker1'); - const sentHitsTracker2 = window.gaSpy.getHits('tracker2'); - - t.equal(resp.length, sentHitsTracker1.length + sentHitsTracker2.length, 'All hits of all trackers are captured as Split events'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - // Run autoRequire iife: - // require('@splitsoftware/splitio-commons/src/integrations/ga/autoRequire'); - require('../../../scripts/ga-to-split-autorequire'); - - window.ga('create', 'UA-00000000-1', { name: 'tracker1', cookieDomain: 'auto', siteSpeedSampleRate: 0 }); - - gaSpy(['tracker1']); - - window.ga('tracker1.send', 'event', 'mycategory', 'myaction1'); // Captured - - const factory = SplitFactory({ - ...config, - integrations: [{ - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - autoRequire: true - }], - }); - - window.ga('tracker1.send', 'event', 'mycategory', 'myaction2'); // Captured - window.ga('create', 'UA-00000001-1', 'auto', 'tracker2', { siteSpeedSampleRate: 0 }); // New tracker - gaSpy(['tracker2'], false); - window.ga('tracker2.send', 'event', 'mycategory', 'myaction3'); // Captured - - client = factory.client(); - - }); - -} diff --git a/src/__tests__/gaIntegration/gaTestUtils.js b/src/__tests__/gaIntegration/gaTestUtils.js deleted file mode 100644 index e0aac43e3..000000000 --- a/src/__tests__/gaIntegration/gaTestUtils.js +++ /dev/null @@ -1,94 +0,0 @@ -export const DEFAULT_TRACKER = 't0'; - -const HIT_FIELDS = ['hitType', 'nonInteraction']; -const EVENT_FIELDS = ['eventCategory', 'eventAction', 'eventLabel', 'eventValue']; -const FIELDS = [...HIT_FIELDS, ...EVENT_FIELDS]; // List of hit fields to spy, which are the ones set by the default SplitToGa mapper. - -let hits = {}; - -/** - * Spy ga hits per tracker. - * - * @param {string[]} trackerNames names of the trackers to spy. If not provided, it spies the default tracker. i.e., `gaSpy()` is equivalent to `gaSpy(['t0'])`. - * @param {boolean} resetSpy true to reset the list of captured hits. - * - * @see {@link https://developers.google.com/analytics/devguides/collection/analyticsjs/field-reference} - */ -export function gaSpy(trackerNames = [DEFAULT_TRACKER], resetSpy = true) { - - if (resetSpy) hits = {}; - - // access ga via its gaAlias, accounting for the possibility that the global command queue - // has been renamed or not yet defined (analytics.js mutates window[gaAlias] reference) - const gaAlias = window['GoogleAnalyticsObject'] || 'ga'; - - if (typeof window[gaAlias] === 'function') { - window[gaAlias](function () { - // We try-catch the following code, since errors are catched by `ga` and thus cannot be traced for debugging. - try { - trackerNames.forEach(trackerName => { - const trackerToSniff = window[gaAlias].getByName(trackerName); - hits[trackerName] = []; - const originalSendHitTask = trackerToSniff.get('sendHitTask'); - trackerToSniff.set('sendHitTask', function (model) { - originalSendHitTask(model); - const hit = {}; - FIELDS.forEach(fieldName => { - hit[fieldName] = model.get(fieldName); - }); - hits[trackerName].push(hit); - }); - }); - } catch (err) { - console.log(err); - } - }); - } else { - console.error('GA command queue was not found'); - } - - window.gaSpy = { - // getHits may return `undefined` if `ga` is not ready or `trackerName` is not in the list of `trackerNames` - getHits: function (trackerName = DEFAULT_TRACKER) { - const trackerHits = hits[trackerName]; - return trackerHits; - } - }; - - return window.gaSpy; -} - -/** - * Add Google Analytics tag, removing previous one if exists. - * - * @see {@link https://developers.google.com/analytics/devguides/collection/analyticsjs#the_google_analytics_tag} - */ -export function gaTag(gaAlias = 'ga') { - removeGaTag(gaAlias); - addGaTag(gaAlias); -} - -/** - * Add Google Analytics tag. - */ -export function addGaTag(gaAlias = 'ga') { - (function (i, s, o, g, r, a, m) { - i['GoogleAnalyticsObject'] = r; - i[r] = i[r] || function () { - (i[r].q = i[r].q || []).push(arguments); - }, - i[r].l = 1 * new Date(); - a = s.createElement(o), - m = s.getElementsByTagName(o)[0]; - a.async = 1; - a.src = g; - m.parentNode.insertBefore(a, m); - })(window, document, 'script', 'https://www.google-analytics.com/analytics.js', gaAlias); -} - -/** - * Remove Google Analytics command queue. - */ -export function removeGaTag(gaAlias = 'ga') { - window[window['GoogleAnalyticsObject'] || gaAlias] = undefined; -} diff --git a/src/__tests__/gaIntegration/split-to-ga.spec.js b/src/__tests__/gaIntegration/split-to-ga.spec.js deleted file mode 100644 index 1285d6f01..000000000 --- a/src/__tests__/gaIntegration/split-to-ga.spec.js +++ /dev/null @@ -1,438 +0,0 @@ -import sinon from 'sinon'; -import { SplitFactory } from '../../'; -import { settingsFactory } from '../../settings'; -import { gaSpy, gaTag, removeGaTag, addGaTag } from './gaTestUtils'; -import { SPLIT_IMPRESSION, SPLIT_EVENT, DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; -import { url } from '../testUtils'; - -function countImpressions(parsedImpressionsBulkPayload) { - return parsedImpressionsBulkPayload - .reduce((accumulator, currentValue) => { return accumulator + currentValue.i.length; }, 0); -} - -const config = { - core: { - authorizationKey: '', - key: 'facundo@split.io', - trafficType: 'user', - }, - integrations: [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - }], - scheduler: { - impressionsRefreshRate: 0.2, - eventsQueueSize: 1, - }, - streamingEnabled: false, - sync: { - impressionsMode: DEBUG, - } -}; - -const settings = settingsFactory(config); - -export default function (fetchMock, assert) { - - // test default behavior - assert.test(t => { - - let client; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - t.equal(window.gaSpy.getHits().length, 3, 'Total hits are 3: pageview, split event and impression'); - setTimeout(() => { - client.destroy(); - t.end(); - }); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - // we can assert payload and ga hits, once ga is ready and after `SplitToGa.queue`, that is timeout wrapped, make to the queue stack. - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentImpressionHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(sentImpressions, 1, 'Number of impressions'); - t.equal(sentImpressions, sentImpressionHits.length, `Number of sent impression hits must be equal to the number of impressions (${sentImpressions})`); - - finish.next(); - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentEvents = resp.length; - const sentEventHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-event'); - - t.equal(sentEvents, 1, 'Number of events'); - t.equal(sentEvents, sentEventHits.length, `Number of sent event hits must be equal to sent events: (${sentEvents})`); - - finish.next(); - }); - }, 10); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - client.ready().then(() => { - client.track('some_event'); - client.getTreatment('hierarchical_splits_test'); - }); - - }); - - // test default behavior in multiple trackers, with multiple impressions, and GA in a different global variable - assert.test(t => { - - let client; - const numOfEvaluations = 4; - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.other_location_for_ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentHitsTracker1 = window.gaSpy.getHits('myTracker1'); - const sentHitsTracker2 = window.gaSpy.getHits('myTracker2'); - - t.equal(sentImpressions, numOfEvaluations, 'Number of impressions equals the number of evaluations'); - t.equal(sentImpressions, sentHitsTracker1.length, 'Number of sent hits must be equal to the number of impressions'); - t.equal(sentImpressions, sentHitsTracker2.length, 'Number of sent hits must be equal to the number of impressions'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - }); - }); - return 200; - }); - - gaTag('other_location_for_ga'); - - window.other_location_for_ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - window.other_location_for_ga('create', 'UA-00000001-1', 'example1.com', 'myTracker1', { siteSpeedSampleRate: 0 }); - window.other_location_for_ga('create', 'UA-00000002-1', 'example2.com', 'myTracker2', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker1', 'myTracker2']); - - const factory = SplitFactory({ - ...config, - core: { - ...config.core, - authorizationKey: '', - }, - integrations: [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - trackerNames: ['myTracker1'], - }, { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - trackerNames: ['myTracker2'], - }], - }); - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) - client.getTreatment('split_with_config'); - }); - - }); - - // test several SplitToGa integration items, with custom filter and mapper - assert.test(t => { - - let client; - const numOfEvaluations = 4; - const numOfEvents = 3; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - setTimeout(() => { - client.destroy(); - t.end(); - }); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentImpressionHitsTracker3 = window.gaSpy.getHits('myTracker3').filter(hit => hit.eventCategory === 'split-impression'); - const sentImpressionHitsTracker4 = window.gaSpy.getHits('myTracker4').filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(sentImpressionHitsTracker3.length, sentImpressions, 'For tracker3, no impressions are filtered'); - t.equal(sentImpressionHitsTracker4.length, 0, 'For tracker4, all impressions are filtered'); - - finish.next(); - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentEvents = resp.length; - const sentEventHitsTracker3 = window.gaSpy.getHits('myTracker3').filter(hit => hit.eventCategory === 'mycategory'); - const sentEventHitsTracker4 = window.gaSpy.getHits('myTracker4').filter(hit => hit.eventCategory === 'mycategory'); - - t.equal(sentEventHitsTracker3.length, 0, 'For tracker3, all events are filtered'); - t.equal(sentEventHitsTracker4.length, sentEvents, 'For tracker4, no events are filtered'); - - finish.next(); - }); - }, 10); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000003-1', 'example3.com', 'myTracker3', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000004-1', 'example4.com', 'myTracker4', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker3', 'myTracker4']); - - const onlyImpressionsFilter = ({ type }) => type === SPLIT_IMPRESSION; - const onlyEventsMapper = function ({ payload, type }) { - return type === SPLIT_EVENT ? - { hitType: 'event', eventCategory: 'mycategory', eventAction: payload.eventTypeId } : - undefined; - }; - const factory = SplitFactory({ - ...config, - core: { - ...config.core, - authorizationKey: '', - }, - scheduler: { - impressionsRefreshRate: 0.2, - eventsQueueSize: numOfEvents, - }, - integrations: [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - trackerNames: ['myTracker3'], - filter: onlyImpressionsFilter, - }, { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - trackerNames: ['myTracker4'], - mapper: onlyEventsMapper, - }], - }); - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) { - client.getTreatment('split_with_config'); - } - for (let i = 0; i < numOfEvents; i++) { - client.track('eventType'); - } - }); - - }); - - // exception in custom mapper or invalid mapper result must not send a hit - assert.test(t => { - - const logSpy = sinon.spy(console, 'log'); - const error = 'some error'; - let client; - const numOfEvaluations = 1; - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentHitsDefault = window.gaSpy.getHits(); - const sentHitsTracker1 = window.gaSpy.getHits('myTracker1'); - const sentHitsTracker2 = window.gaSpy.getHits('myTracker2'); - - t.equal(sentImpressions, numOfEvaluations, 'Number of impressions equals the number of evaluations'); - t.equal(sentHitsDefault.length, 0, 'No hits sent if custom mapper throws error'); - t.equal(sentHitsTracker1.length, 0, 'No hits sent if custom mapper returns invalid result'); - t.equal(sentHitsTracker2.length, numOfEvaluations, 'Number of sent hits must be equal to the number of impressions'); - - setTimeout(() => { - t.ok(logSpy.calledWith(`[WARN] splitio => split-to-ga: queue method threw: ${error}. No hit was sent.`)); - t.ok(logSpy.calledWith('[WARN] splitio => split-to-ga: your custom mapper returned an invalid FieldsObject instance. It must be an object with at least a `hitType` field.')); - client.destroy(); - logSpy.restore(); - t.end(); - }); - }); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000001-1', 'example1.com', 'myTracker1', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000002-1', 'example2.com', 'myTracker2', { siteSpeedSampleRate: 0 }); - - gaSpy(['t0', 'myTracker1', 'myTracker2']); - - const factory = SplitFactory({ - ...config, - debug: true, - integrations: [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - mapper: function () { throw error; }, - }, { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - trackerNames: ['myTracker1'], - mapper: function () { return {}; }, - }, { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - trackerNames: ['myTracker2'], - mapper: function () { return { hitType: 'event', eventCategory: 'my-split-impression', eventAction: 'some-action' }; }, - }], - }); - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) - client.getTreatment('split_with_config'); - }); - - }); - - // Split created before GA initialized - assert.test(t => { - - const logSpy = sinon.spy(console, 'log'); - let client; - const numOfEvaluations = 1; - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentHitsDefault = window.gaSpy.getHits(); - - t.equal(sentImpressions, numOfEvaluations, 'Number of impressions equals the number of evaluations'); - t.equal(sentHitsDefault.length, numOfEvaluations, 'Hits sent if ga initialized before Split evaluation (client.getTreatment***)'); - - setTimeout(() => { - client.destroy().then(() => { - logSpy.restore(); - t.end(); - }); - }); - }); - }); - return 200; - }); - - removeGaTag(); - - const factory = SplitFactory({ - ...config, - debug: true, - }); - t.ok(logSpy.calledWith('[WARN] splitio => split-to-ga: `ga` command queue not found. No hits will be sent until it is available.'), 'warning GA not found'); - - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) - client.getTreatment('split_with_config'); - }); - - addGaTag(); - - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - }); - - // test `events` and `impressions` flags - assert.test(t => { - - let client; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - t.equal(window.gaSpy.getHits().length, 1, 'Total hits are 1: pageview'); - setTimeout(() => { - client.destroy(); - t.end(); - }); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - // we can assert payload and ga hits, once ga is ready and after `SplitToGa.queue`, that is timeout wrapped, make to the queue stack. - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentImpressionHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(sentImpressions, 1, 'Number of impressions'); - t.equal(sentImpressionHits.length, 0, 'No hits associated to Split impressions must be sent'); - - finish.next(); - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentEvents = resp.length; - const sentEventHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-event'); - - t.equal(sentEvents, 1, 'Number of events'); - t.equal(sentEventHits.length, 0, 'No hits associated to Split events must be sent'); - - finish.next(); - }); - }); - return 200; - }); - - gaTag(); - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - gaSpy(); - window.ga('send', 'pageview'); - - const factory = SplitFactory({ - ...config, - integrations: [{ - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - events: false, - impressions: false, - }] - }); - client = factory.client(); - client.ready().then(() => { - client.track('some_event'); - client.getTreatment('hierarchical_splits_test'); - }); - - }); -} diff --git a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json index 3930e1d09..d7b68e966 100644 --- a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json +++ b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json @@ -1,5 +1,5 @@ { "pushEnabled": true, - "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X05UY3dPVGMzTURReF9teVNlZ21lbnRzXCI6W1wic3Vic2NyaWJlXCJdLFwiTnpNMk1ESTVNemMwX05ERXpNalExTXpBME53PT1fc3BsaXRzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE1ODY5MTU3NjksImlhdCI6MTU4NjkxMjE2OX0.iq6k65WcCx8s-yqDj4FpIOUEP6-G3VdB-NLhR0fXQUw", + "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US5MZzMtZWciLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X2NvbnRyb2xcIjpbXCJzdWJzY3JpYmVcIl0sXCJOek0yTURJNU16YzBfTkRFek1qUTFNekEwTnc9PV9mbGFnc1wiOltcInN1YnNjcmliZVwiXSxcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X21lbWJlcnNoaXBzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE3MjUzODM2NDEsImlhdCI6MTcyNTM4MDA0MX0.Qqyixo2ZG-2tAkxjad7O-iphK3DVK5_xICypbIDh3IM", "connDelay": 0 } \ No newline at end of file diff --git a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json index 8ed677bbe..e266b5928 100644 --- a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json +++ b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json @@ -1,5 +1,5 @@ { "pushEnabled": true, - "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X01qRTBNVGt4T1RVMk1nPT1fbXlTZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X05UY3dPVGMzTURReF9teVNlZ21lbnRzXCI6W1wic3Vic2NyaWJlXCJdLFwiTnpNMk1ESTVNemMwX05ERXpNalExTXpBME53PT1fc3BsaXRzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE1ODY5MTYyMDAsImlhdCI6MTU4NjkxMjYwMH0.iq6k65WcCx8s-yqDj4FpIOUEP6-G3VdB-NLhR0fXQUw", + "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US5MZzMtZWciLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X2NvbnRyb2xcIjpbXCJzdWJzY3JpYmVcIl0sXCJOek0yTURJNU16YzBfTkRFek1qUTFNekEwTnc9PV9mbGFnc1wiOltcInN1YnNjcmliZVwiXSxcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X21lbWJlcnNoaXBzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE3MjUzODk4MjgsImlhdCI6MTcyNTM4NjIyOH0.KaEa6CjNM489dLgHxDbL8RP1DUFCMtkGLI6W3JZcTTs", "connDelay": 0 } \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.emmanuel@split.io.json b/src/__tests__/mocks/memberships.emmanuel@split.io.json new file mode 100644 index 000000000..aec8b8705 --- /dev/null +++ b/src/__tests__/mocks/memberships.emmanuel@split.io.json @@ -0,0 +1,15 @@ +{ + "ms": { + "k": [ + { + "n": "developers" + }, + { + "n": "engineers" + }, + { + "n": "employees" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.facundo@split.io.json b/src/__tests__/mocks/memberships.facundo@split.io.json new file mode 100644 index 000000000..d64b10ee3 --- /dev/null +++ b/src/__tests__/mocks/memberships.facundo@split.io.json @@ -0,0 +1,9 @@ +{ + "ms": { + "k": [ + { + "n": "splitters" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.marcio@split.io.json b/src/__tests__/mocks/memberships.marcio@split.io.json new file mode 100644 index 000000000..476ddc3f4 --- /dev/null +++ b/src/__tests__/mocks/memberships.marcio@split.io.json @@ -0,0 +1,3 @@ +{ + "ms": {} +} diff --git a/src/__tests__/mocks/memberships.nicolas@split.io.json b/src/__tests__/mocks/memberships.nicolas@split.io.json new file mode 100644 index 000000000..aec8b8705 --- /dev/null +++ b/src/__tests__/mocks/memberships.nicolas@split.io.json @@ -0,0 +1,15 @@ +{ + "ms": { + "k": [ + { + "n": "developers" + }, + { + "n": "engineers" + }, + { + "n": "employees" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.nicolas@split.io.mock2.json b/src/__tests__/mocks/memberships.nicolas@split.io.mock2.json new file mode 100644 index 000000000..9de7489d1 --- /dev/null +++ b/src/__tests__/mocks/memberships.nicolas@split.io.mock2.json @@ -0,0 +1,15 @@ +{ + "ms": { + "k": [ + { + "n": "developers" + }, + { + "n": "engineers" + }, + { + "n": "splitters" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/membershipsEmpty.json b/src/__tests__/mocks/membershipsEmpty.json new file mode 100644 index 000000000..7473a4b01 --- /dev/null +++ b/src/__tests__/mocks/membershipsEmpty.json @@ -0,0 +1,5 @@ +{ + "ms": { + "k": [] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 000000000..baa24fe3f --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"n\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json new file mode 100644 index 000000000..99c839cb0 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"n\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json new file mode 100644 index 000000000..59da6ea6f --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"n\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json new file mode 100644 index 000000000..fe4ff5a38 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"n\\\":[],\\\"c\\\": 2,\\\"u\\\": 1,\\\"d\\\":\\\"eJxiGAX4AMdAO2AU4AeMA+2AAQACA+0AuoORGMvDBDANtAPoDBQG2gGDGQz16pRloB0wCkbBKBgFo4As0EBYyZCqoojwDwEACAAA//+W/QFR\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json new file mode 100644 index 000000000..320522553 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552652000,\\\"n\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 000000000..23e582775 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"n\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json new file mode 100644 index 000000000..dcce251be --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552640000,\\\"u\\\": 0,\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 000000000..a69df9dd9 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"n\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json deleted file mode 100644 index 415bc2b09..000000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552645900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_MjE0MTkxOTU2Mg==_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552645000,\\\"segmentList\\\":[\\\"employees\\\"],\\\"includesPayload\\\":true}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json deleted file mode 100644 index 18daa83bd..000000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552646900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_MjE0MTkxOTU2Mg==_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552646000,\\\"includesPayload\\\":true}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json deleted file mode 100644 index 951d8a31d..000000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552640900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552640000,\\\"includesPayload\\\":false}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552641000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552641000.json deleted file mode 100644 index 1c659961e..000000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552641000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552641900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552641000,\\\"includesPayload\\\":false}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json deleted file mode 100644 index 97c2a73c9..000000000 --- a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552651000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.BOUNDED.ZLIB.1457552651000.json b/src/__tests__/mocks/message.V2.BOUNDED.ZLIB.1457552651000.json deleted file mode 100644 index 44c008928..000000000 --- a/src/__tests__/mocks/message.V2.BOUNDED.ZLIB.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552651000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 2,\\\"u\\\": 1,\\\"d\\\":\\\"eJxiGAX4AMdAO2AU4AeMA+2AAQACA+0AuoORGMvDBDANtAPoDBQG2gGDGQz16pRloB0wCkbBKBgFo4As0EBYyZCqoojwDwEACAAA//+W/QFR\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json deleted file mode 100644 index c44ee3ac3..000000000 --- a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552652000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index aaf1a3f33..000000000 --- a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552653000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json deleted file mode 100644 index a7a2e793e..000000000 --- a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552650000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/mySegmentsEmpty.json b/src/__tests__/mocks/mySegmentsEmpty.json deleted file mode 100644 index 619bab21b..000000000 --- a/src/__tests__/mocks/mySegmentsEmpty.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "mySegments": [] -} diff --git a/src/__tests__/mocks/mysegments.emmanuel@split.io.json b/src/__tests__/mocks/mysegments.emmanuel@split.io.json deleted file mode 100644 index 6d53eb431..000000000 --- a/src/__tests__/mocks/mysegments.emmanuel@split.io.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df151-e63f-11e9-9275-924a43dg782b", - "name": "developers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783e", - "name": "engineers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc7840", - "name": "employees" - } - ] -} diff --git a/src/__tests__/mocks/mysegments.facundo@split.io.json b/src/__tests__/mocks/mysegments.facundo@split.io.json deleted file mode 100644 index de85cc2f9..000000000 --- a/src/__tests__/mocks/mysegments.facundo@split.io.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df150-e62f-11e5-9265-924a43db712b", - "name": "splitters" - } - ] -} diff --git a/src/__tests__/mocks/mysegments.marcio@split.io.json b/src/__tests__/mocks/mysegments.marcio@split.io.json deleted file mode 100644 index 619bab21b..000000000 --- a/src/__tests__/mocks/mysegments.marcio@split.io.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "mySegments": [] -} diff --git a/src/__tests__/mocks/mysegments.nicolas@split.io.json b/src/__tests__/mocks/mysegments.nicolas@split.io.json deleted file mode 100644 index 6d53eb431..000000000 --- a/src/__tests__/mocks/mysegments.nicolas@split.io.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df151-e63f-11e9-9275-924a43dg782b", - "name": "developers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783e", - "name": "engineers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc7840", - "name": "employees" - } - ] -} diff --git a/src/__tests__/mocks/mysegments.nicolas@split.io.mock2.json b/src/__tests__/mocks/mysegments.nicolas@split.io.mock2.json deleted file mode 100644 index 98a1538ae..000000000 --- a/src/__tests__/mocks/mysegments.nicolas@split.io.mock2.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df151-e63f-11e9-9275-924a43dg782b", - "name": "developers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783e", - "name": "engineers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783f", - "name": "splitters" - } - ] -} \ No newline at end of file diff --git a/src/__tests__/mocks/splitchanges.since.-1.json b/src/__tests__/mocks/splitchanges.since.-1.json index 6198d41cc..ee21cf9cc 100644 --- a/src/__tests__/mocks/splitchanges.since.-1.json +++ b/src/__tests__/mocks/splitchanges.since.-1.json @@ -1,5 +1,75 @@ { "splits": [ + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "in_large_segment", + "seed": -1984784937, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "no", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_LARGE_SEGMENT", + "negate": false, + "userDefinedLargeSegmentMatcherData": { + "largeSegmentName": "harnessians" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "unaryStringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "yes", + "size": 100 + } + ] + }, + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_LARGE_SEGMENT", + "negate": false, + "userDefinedLargeSegmentMatcherData": { + "largeSegmentName": "splitters" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "unaryStringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "yes", + "size": 100 + } + ] + } + ], + "configurations": {} + }, { "orgId": null, "environment": null, diff --git a/src/__tests__/nodeSuites/lazy-init.spec.js b/src/__tests__/nodeSuites/lazy-init.spec.js new file mode 100644 index 000000000..68e92e8f5 --- /dev/null +++ b/src/__tests__/nodeSuites/lazy-init.spec.js @@ -0,0 +1,89 @@ +import { SplitFactory as SplitFactorySS } from '../../factory/node'; +import { SplitFactory as SplitFactoryCS } from '../../factory/browser'; + +// Tests should finish without dangling timers or requests +export default function (settings, fetchMock, t) { + + t.test('Server-side', async (assert) => { + let splitio; + + for (let i = 0; i < 100; i++) { + splitio = SplitFactorySS({ + core: { + authorizationKey: 'fake-token-' + i, + }, + urls: { + sdk: 'https://not-called/api', + events: 'https://not-called/api', + auth: 'https://not-called/api', + } + }, (modules) => { + modules.lazyInit = true; + }); + + const manager = splitio.manager(); + assert.deepEqual(manager.names(), [], 'We should not have done any request yet'); + + const client = splitio.client(); + assert.equal(client.getTreatment('user-1', 'split_test'), 'control', 'We should get control'); + assert.equal(client.track('user-1', 'user', 'my_event'), true, 'We should track the event'); + } + + fetchMock.getOnce('https://not-called/api/splitChanges?s=1.1&since=-1', { status: 200, body: { splits: [], since: -1, till: 1457552620999 } }); + fetchMock.getOnce('https://not-called/api/splitChanges?s=1.1&since=1457552620999', { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.postOnce('https://not-called/api/testImpressions/bulk', 200); + fetchMock.postOnce('https://not-called/api/events/bulk', 200); + + splitio.init(); + await splitio.client().ready(); + assert.true(splitio.client().__getStatus().isReady, 'Split SDK is ready'); + await splitio.destroy(); + + assert.end(); + }); + + t.test('Client-side', async (assert) => { + let splitio; + + for (let i = 0; i < 100; i++) { + splitio = SplitFactoryCS({ + core: { + authorizationKey: 'fake-token-' + i, + key: 'user-' + i, + }, + urls: { + sdk: 'https://not-called/api', + events: 'https://not-called/api', + auth: 'https://not-called/api', + } + }, (modules) => { + modules.lazyInit = true; + }); + + const manager = splitio.manager(); + assert.deepEqual(manager.names(), [], 'We should not have done any request yet'); + + const client = splitio.client(); + assert.equal(client.getTreatment('split_test'), 'control', 'We should get control'); + assert.equal(client.track('user', 'my_event'), true, 'We should track the event'); + + const otherClient = splitio.client('other-user'); + assert.equal(otherClient.getTreatment('split_test'), 'control', 'We should get control'); + assert.equal(otherClient.track('user', 'my_event'), true, 'We should track the event'); + } + + fetchMock.getOnce('https://not-called/api/splitChanges?s=1.2&since=-1', { status: 200, body: { splits: [], since: -1, till: 1457552620999 } }); + fetchMock.getOnce('https://not-called/api/splitChanges?s=1.2&since=1457552620999', { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce('https://not-called/api/memberships/user-99', { status: 200, body: {} }); + fetchMock.getOnce('https://not-called/api/memberships/other-user', { status: 200, body: {} }); + fetchMock.postOnce('https://not-called/api/testImpressions/bulk', 200); + fetchMock.postOnce('https://not-called/api/events/bulk', 200); + + splitio.init(); + await splitio.client().ready(); + assert.true(splitio.client().__getStatus().isReady, 'Split SDK is ready'); + await splitio.destroy(); + + assert.end(); + }); +} diff --git a/src/__tests__/nodeSuites/push-fallbacking.spec.js b/src/__tests__/nodeSuites/push-fallback.spec.js similarity index 98% rename from src/__tests__/nodeSuites/push-fallbacking.spec.js rename to src/__tests__/nodeSuites/push-fallback.spec.js index 5f330634e..7789236a1 100644 --- a/src/__tests__/nodeSuites/push-fallbacking.spec.js +++ b/src/__tests__/nodeSuites/push-fallback.spec.js @@ -35,9 +35,9 @@ import { settingsFactory } from '../../settings'; const key = 'nicolas@split.io'; const baseUrls = { - sdk: 'https://sdk.push-fallbacking/api', - events: 'https://events.push-fallbacking/api', - auth: 'https://auth.push-fallbacking/api' + sdk: 'https://sdk.push-fallback/api', + events: 'https://events.push-fallback/api', + auth: 'https://auth.push-fallback/api' }; const config = { core: { @@ -96,7 +96,7 @@ const MILLIS_DESTROY = MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.fe * 2.1 secs: periodic fetch due to polling (/segmentChanges/*) * 2.1 secs: destroy client */ -export function testFallbacking(fetchMock, assert) { +export function testFallback(fetchMock, assert) { assert.plan(17); fetchMock.reset(); __setEventSource(EventSourceMock); diff --git a/src/__tests__/nodeSuites/push-initialization-retries.spec.js b/src/__tests__/nodeSuites/push-initialization-retries.spec.js index 54c681f18..88cc30af8 100644 --- a/src/__tests__/nodeSuites/push-initialization-retries.spec.js +++ b/src/__tests__/nodeSuites/push-initialization-retries.spec.js @@ -168,7 +168,7 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { * Assert that if the main client is destroyed while authentication request is in progress and successes, the SDK doesn't open the SSE connection * * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt * 0.05 secs: client destroyed * 0.1 secs: first auth attempt response (success) but not SSE connection opened since push was closed * 0.2 secs: test finished @@ -207,8 +207,8 @@ export function testSdkDestroyWhileAuthSuccess(fetchMock, assert) { * Asserts that if the client is destroyed while authentication request is in progress and fails, the SDK doesn't schedule an auth retry * * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt (fail due to bad token) - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt (fail due to bad token) + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second auth attempt request * 0.15 secs: client destroyed * 0.2 secs: second auth attempt response (fail due to network error) diff --git a/src/__tests__/nodeSuites/push-refresh-token.spec.js b/src/__tests__/nodeSuites/push-refresh-token.spec.js index 0cb2c5424..b3246a2c4 100644 --- a/src/__tests__/nodeSuites/push-refresh-token.spec.js +++ b/src/__tests__/nodeSuites/push-refresh-token.spec.js @@ -57,7 +57,7 @@ export function testRefreshToken(fetchMock, assert) { sseCount++; switch (sseCount) { case 1: - assert.true(nearlyEqual(Date.now() - start, 0), 'first connection is created inmediatelly'); + assert.true(nearlyEqual(Date.now() - start, 0), 'first connection is created immediately'); break; case 2: assert.true(nearlyEqual(Date.now() - start, MILLIS_REFRESH_TOKEN + MILLIS_CONNDELAY), 'second connection is created with a delay'); diff --git a/src/__tests__/nodeSuites/push-synchronization.spec.js b/src/__tests__/nodeSuites/push-synchronization.spec.js index 51c17724d..4ee3a20de 100644 --- a/src/__tests__/nodeSuites/push-synchronization.spec.js +++ b/src/__tests__/nodeSuites/push-synchronization.spec.js @@ -44,7 +44,6 @@ const config = { }, urls: baseUrls, streamingEnabled: true, - // debug: true, }; const settings = settingsFactory(config); diff --git a/src/__tests__/nodeSuites/readiness.spec.js b/src/__tests__/nodeSuites/readiness.spec.js index 519571ea3..33e377eb3 100644 --- a/src/__tests__/nodeSuites/readiness.spec.js +++ b/src/__tests__/nodeSuites/readiness.spec.js @@ -49,7 +49,7 @@ export default function (fetchMock, assert) { try { await client.ready(); } catch (e) { - await client.destroy(); + await splitio.destroy(); t.end(); } }); diff --git a/src/__tests__/nodeSuites/telemetry.spec.js b/src/__tests__/nodeSuites/telemetry.spec.js index 22824cd28..a6a6bb66f 100644 --- a/src/__tests__/nodeSuites/telemetry.spec.js +++ b/src/__tests__/nodeSuites/telemetry.spec.js @@ -66,7 +66,7 @@ export default async function telemetryNodejsSuite(key, fetchMock, assert) { // @TODO check if iDe value is correct assert.deepEqual(data, { - mE: {}, hE: { sp: { 500: 1 } }, tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 31, seC: 3, skC: 3, eQ: 1, eD: 0, sE: [], t: [], ufs: { sp: 0, ms: 0 } + mE: {}, hE: { sp: { 500: 1 } }, tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 32, seC: 3, skC: 3, eQ: 1, eD: 0, sE: [], t: [], ufs: {} }, 'metrics/usage JSON payload should be the expected'); finish.next(); @@ -85,7 +85,7 @@ export default async function telemetryNodejsSuite(key, fetchMock, assert) { // @TODO check if iDe value is correct assert.deepEqual(data, { mL: {}, mE: {}, hE: {}, hL: {}, // errors and latencies were popped - tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 31, seC: 3, skC: 3, eQ: 1, eD: 0, sE: [], t: [], ufs: { sp: 0, ms: 0 } + tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 32, seC: 3, skC: 3, eQ: 1, eD: 0, sE: [], t: [], ufs: {} }, '2nd metrics/usage JSON payload should be the expected'); return 200; }); diff --git a/src/__tests__/offline/browser.spec.js b/src/__tests__/offline/browser.spec.js index 10ef846d5..bce2020eb 100644 --- a/src/__tests__/offline/browser.spec.js +++ b/src/__tests__/offline/browser.spec.js @@ -9,7 +9,7 @@ const settings = settingsFactory({ core: { key: 'facundo@split.io' } }); const spySplitChanges = sinon.spy(); const spySegmentChanges = sinon.spy(); -const spyMySegments = sinon.spy(); +const spyMemberships = sinon.spy(); const spyEventsBulk = sinon.spy(); const spyTestImpressionsBulk = sinon.spy(); const spyTestImpressionsCount = sinon.spy(); @@ -27,7 +27,7 @@ const replySpy = spy => { const configMocks = () => { fetchMock.mock(new RegExp(`${url(settings, '/splitChanges/')}.*`), () => replySpy(spySplitChanges)); fetchMock.mock(new RegExp(`${url(settings, '/segmentChanges/')}.*`), () => replySpy(spySegmentChanges)); - fetchMock.mock(new RegExp(`${url(settings, '/mySegments/')}.*`), () => replySpy(spyMySegments)); + fetchMock.mock(new RegExp(`${url(settings, '/memberships/')}.*`), () => replySpy(spyMemberships)); fetchMock.mock(url(settings, '/events/bulk'), () => replySpy(spyEventsBulk)); fetchMock.mock(url(settings, '/testImpressions/bulk'), () => replySpy(spyTestImpressionsBulk)); fetchMock.mock(url(settings, '/testImpressions/count'), () => replySpy(spyTestImpressionsCount)); @@ -336,7 +336,7 @@ tape('Browser offline mode', function (assert) { // We test the breakdown instead of just the misc because it's faster to spot where the issue is assert.notOk(spySplitChanges.called, 'On offline mode we should not call the splitChanges endpoint.'); assert.notOk(spySegmentChanges.called, 'On offline mode we should not call the segmentChanges endpoint.'); - assert.notOk(spyMySegments.called, 'On offline mode we should not call the mySegments endpoint.'); + assert.notOk(spyMemberships.called, 'On offline mode we should not call the Memberships endpoint.'); assert.notOk(spyEventsBulk.called, 'On offline mode we should not call the events endpoint.'); assert.notOk(spyTestImpressionsBulk.called, 'On offline mode we should not call the impressions endpoint.'); assert.notOk(spyTestImpressionsCount.called, 'On offline mode we should not call the impressions count endpoint.'); diff --git a/src/__tests__/offline/node.spec.js b/src/__tests__/offline/node.spec.js index 5d1d52e8c..4d0b8e562 100644 --- a/src/__tests__/offline/node.spec.js +++ b/src/__tests__/offline/node.spec.js @@ -11,7 +11,7 @@ const settings = settingsFactory({ core: { key: 'facundo@split.io' } }); const spySplitChanges = sinon.spy(); const spySegmentChanges = sinon.spy(); -const spyMySegments = sinon.spy(); +const spyMemberships = sinon.spy(); const spyEventsBulk = sinon.spy(); const spyTestImpressionsBulk = sinon.spy(); const spyTestImpressionsCount = sinon.spy(); @@ -29,7 +29,7 @@ const replySpy = spy => { const configMocks = () => { fetchMock.mock(new RegExp(`${url(settings, '/splitChanges/')}.*`), () => replySpy(spySplitChanges)); fetchMock.mock(new RegExp(`${url(settings, '/segmentChanges/')}.*`), () => replySpy(spySegmentChanges)); - fetchMock.mock(new RegExp(`${url(settings, '/mySegments/')}.*`), () => replySpy(spyMySegments)); + fetchMock.mock(new RegExp(`${url(settings, '/memberships/')}.*`), () => replySpy(spyMemberships)); fetchMock.mock(url(settings, '/events/bulk'), () => replySpy(spyEventsBulk)); fetchMock.mock(url(settings, '/testImpressions/bulk'), () => replySpy(spyTestImpressionsBulk)); fetchMock.mock(url(settings, '/testImpressions/count'), () => replySpy(spyTestImpressionsCount)); @@ -98,7 +98,7 @@ function networkAssertions(client, assert) { // We test the breakdown instead of just the misc because it's faster to spot where the issue is assert.notOk(spySplitChanges.called, 'On offline mode we should not call the splitChanges endpoint.'); assert.notOk(spySegmentChanges.called, 'On offline mode we should not call the segmentChanges endpoint.'); - assert.notOk(spyMySegments.called, 'On offline mode we should not call the mySegments endpoint.'); + assert.notOk(spyMemberships.called, 'On offline mode we should not call the memberships endpoint.'); assert.notOk(spyEventsBulk.called, 'On offline mode we should not call the events endpoint.'); assert.notOk(spyTestImpressionsBulk.called, 'On offline mode we should not call the impressions endpoint.'); assert.notOk(spyTestImpressionsCount.called, 'On offline mode we should not call the impressions count endpoint.'); diff --git a/src/__tests__/online/browser.spec.js b/src/__tests__/online/browser.spec.js index f862a3ea8..05fd60de2 100644 --- a/src/__tests__/online/browser.spec.js +++ b/src/__tests__/online/browser.spec.js @@ -10,7 +10,7 @@ import telemetrySuite from '../browserSuites/telemetry.spec'; import impressionsListenerSuite from '../browserSuites/impressions-listener.spec'; import readinessSuite from '../browserSuites/readiness.spec'; import readyFromCache from '../browserSuites/ready-from-cache.spec'; -import { withoutBindingTT, bindingTT } from '../browserSuites/events.spec'; +import { withoutBindingTT } from '../browserSuites/events.spec'; import sharedInstantiationSuite from '../browserSuites/shared-instantiation.spec'; import managerSuite from '../browserSuites/manager.spec'; import ignoreIpAddressesSettingSuite from '../browserSuites/ignore-ip-addresses-setting.spec'; @@ -25,10 +25,10 @@ import flagSets from '../browserSuites/flag-sets.spec'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; -import mySegmentsEmmanuel from '../mocks/mysegments.emmanuel@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; +import membershipsEmmanuel from '../mocks/memberships.emmanuel@split.io.json'; const settings = settingsFactory({ core: { @@ -87,12 +87,12 @@ tape('## E2E CI Tests ##', function (assert) { //If we change the mocks, we need to clear localstorage. Cleaning up after testing ensures "fresh data". localStorage.clear(); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); - fetchMock.get(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolas }); - fetchMock.get(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); - fetchMock.get(url(settings, '/mySegments/emmanuel%40split.io'), { status: 200, body: mySegmentsEmmanuel }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); + fetchMock.get(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolas }); + fetchMock.get(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); + fetchMock.get(url(settings, '/memberships/emmanuel%40split.io'), { status: 200, body: membershipsEmmanuel }); fetchMock.post(url(settings, '/testImpressions/bulk'), 200); fetchMock.post(url(settings, '/testImpressions/count'), 200); Math.random = () => 0.5; // SDKs without telemetry @@ -112,10 +112,8 @@ tape('## E2E CI Tests ##', function (assert) { assert.test('E2E / Telemetry', telemetrySuite.bind(null, fetchMock)); /* Check events */ assert.test('E2E / Events', withoutBindingTT.bind(null, fetchMock)); - assert.test('E2E / Events with TT bound', bindingTT.bind(null, fetchMock)); /* Check shared clients */ - assert.test('E2E / Shared instances', sharedInstantiationSuite.bind(null, false, false, fetchMock)); - assert.test('E2E / Shared instances with Traffic Type on factory settings', sharedInstantiationSuite.bind(null, true, false, fetchMock)); + assert.test('E2E / Shared instances', sharedInstantiationSuite.bind(null, false, true, fetchMock)); /* Validate user consent */ assert.test('E2E / User consent', userConsent.bind(null, fetchMock)); /* Check basic manager functionality */ @@ -127,7 +125,7 @@ tape('## E2E CI Tests ##', function (assert) { /* Check that impressions and events are sended to backend via Beacon API or Fetch when pagehide/visibilitychange events are triggered. */ assert.test('E2E / Use Beacon API (or Fetch if not available) to send remaining impressions and events when browser page is unload or hidden', useBeaconApiSuite.bind(null, fetchMock)); assert.test('E2E / Use Beacon API DEBUG (or Fetch if not available) to send remaining impressions and events when browser page is unload or hidden', useBeaconDebugApiSuite.bind(null, fetchMock)); - /* Validate ready from cache behaviour (might be merged into another suite if we end up having simple behavior around it as expected) */ + /* Validate ready from cache behavior (might be merged into another suite if we end up having simple behavior around it as expected) */ assert.test('E2E / Readiness from cache', readyFromCache.bind(null, fetchMock)); /* Validate readiness with ready promises */ assert.test('E2E / Ready promise', readyPromiseSuite.bind(null, fetchMock)); diff --git a/src/__tests__/online/node.spec.js b/src/__tests__/online/node.spec.js index 94ca9d830..a3e4fcac7 100644 --- a/src/__tests__/online/node.spec.js +++ b/src/__tests__/online/node.spec.js @@ -3,6 +3,9 @@ import fetchMock from '../testUtils/nodeFetchMock'; import { url } from '../testUtils'; import { settingsFactory } from '../../settings/node'; +import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; +import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; + import evaluationsSuite from '../nodeSuites/evaluations.spec'; import evaluationsSemverSuite from '../nodeSuites/evaluations-semver.spec'; import eventsSuite from '../nodeSuites/events.spec'; @@ -18,10 +21,8 @@ import ipAddressesSettingDebug from '../nodeSuites/ip-addresses-setting.debug.sp import readinessSuite from '../nodeSuites/readiness.spec'; import readyPromiseSuite from '../nodeSuites/ready-promise.spec'; import { fetchSpecificSplits, fetchSpecificSplitsForFlagSets } from '../nodeSuites/fetch-specific-splits.spec'; - -import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import flagSets from '../nodeSuites/flag-sets.spec'; +import lazyInitSuite from '../nodeSuites/lazy-init.spec'; const config = { core: { @@ -94,5 +95,7 @@ tape('## Node JS - E2E CI Tests ##', async function (assert) { /* Validate flag sets */ assert.test('E2E / Flag sets', flagSets.bind(null, fetchMock)); + assert.test('E2E / SplitFactory with lazy init', lazyInitSuite.bind(null, settings, fetchMock)); + assert.end(); }); diff --git a/src/__tests__/push/browser.spec.js b/src/__tests__/push/browser.spec.js index 2ff84728d..0d8dbeee1 100644 --- a/src/__tests__/push/browser.spec.js +++ b/src/__tests__/push/browser.spec.js @@ -4,7 +4,7 @@ import { testAuthWithPushDisabled, testAuthWith401, testNoEventSource, testSSEWi import { testPushRetriesDueToAuthErrors, testPushRetriesDueToSseErrors, testSdkDestroyWhileAuthRetries, testSdkDestroyWhileAuthSuccess, testSdkDestroyWhileConnDelay } from '../browserSuites/push-initialization-retries.spec'; import { testSynchronization } from '../browserSuites/push-synchronization.spec'; import { testSynchronizationRetries } from '../browserSuites/push-synchronization-retries.spec'; -import { testFallbacking } from '../browserSuites/push-fallbacking.spec'; +import { testFallback } from '../browserSuites/push-fallback.spec'; import { testRefreshToken } from '../browserSuites/push-refresh-token.spec'; import { testSplitKillOnReadyFromCache } from '../browserSuites/push-corner-cases.spec'; import { testFlagSets } from '../browserSuites/push-flag-sets.spec'; @@ -32,7 +32,7 @@ tape('## Browser JS - E2E CI Tests for PUSH ##', function (assert) { assert.test('E2E / PUSH synchronization: happy paths', testSynchronization.bind(null, fetchMock)); assert.test('E2E / PUSH synchronization: retries', testSynchronizationRetries.bind(null, fetchMock)); - assert.test('E2E / PUSH fallbacking, CONTROL, OCCUPANCY and STREAMING_RESET messages', testFallbacking.bind(null, fetchMock)); + assert.test('E2E / PUSH fallback, CONTROL, OCCUPANCY and STREAMING_RESET messages', testFallback.bind(null, fetchMock)); assert.test('E2E / PUSH refresh token and connection delay', testRefreshToken.bind(null, fetchMock)); diff --git a/src/__tests__/push/node.spec.js b/src/__tests__/push/node.spec.js index 6345e15db..ddb756a00 100644 --- a/src/__tests__/push/node.spec.js +++ b/src/__tests__/push/node.spec.js @@ -4,7 +4,7 @@ import { testAuthWithPushDisabled, testAuthWith401, testAuthWith400, testNoEvent import { testPushRetriesDueToAuthErrors, testPushRetriesDueToSseErrors, testSdkDestroyWhileAuthRetries, testSdkDestroyWhileAuthSuccess } from '../nodeSuites/push-initialization-retries.spec'; import { testSynchronization } from '../nodeSuites/push-synchronization.spec'; import { testSynchronizationRetries } from '../nodeSuites/push-synchronization-retries.spec'; -import { testFallbacking } from '../nodeSuites/push-fallbacking.spec'; +import { testFallback } from '../nodeSuites/push-fallback.spec'; import { testRefreshToken } from '../nodeSuites/push-refresh-token.spec'; import { testFlagSets } from '../nodeSuites/push-flag-sets.spec'; @@ -33,7 +33,7 @@ tape('## Node JS - E2E CI Tests for PUSH ##', async function (assert) { assert.test('E2E / PUSH synchronization: happy paths', testSynchronization.bind(null, fetchMock)); assert.test('E2E / PUSH synchronization: retries', testSynchronizationRetries.bind(null, fetchMock)); - assert.test('E2E / PUSH fallbacking, CONTROL and OCCUPANCY messages', testFallbacking.bind(null, fetchMock)); + assert.test('E2E / PUSH fallback, CONTROL and OCCUPANCY messages', testFallback.bind(null, fetchMock)); assert.test('E2E / PUSH refresh token and connection delay', testRefreshToken.bind(null, fetchMock)); diff --git a/src/__tests__/testUtils/index.js b/src/__tests__/testUtils/index.js index 7f01fc5e4..5994a3c30 100644 --- a/src/__tests__/testUtils/index.js +++ b/src/__tests__/testUtils/index.js @@ -14,7 +14,7 @@ export function nearlyEqual(actual, expected, epsilon = DEFAULT_ERROR_MARGIN) { } /** - * mock the basic behaviour for `/segmentChanges` endpoint: + * mock the basic behavior for `/segmentChanges` endpoint: * - when `?since=-1`, it returns the given segment `keys` in `added` list. * - otherwise, it returns empty `added` and `removed` lists, and the same since and till values. * diff --git a/src/factory/browser.js b/src/factory/browser.js index 9b877a36d..8b71ed235 100644 --- a/src/factory/browser.js +++ b/src/factory/browser.js @@ -5,13 +5,13 @@ import { pollingManagerCSFactory } from '@splitsoftware/splitio-commons/src/sync import { InLocalStorage } from '@splitsoftware/splitio-commons/src/storages/inLocalStorage'; import { InMemoryStorageCSFactory } from '@splitsoftware/splitio-commons/src/storages/inMemory/InMemoryStorageCS'; import { sdkManagerFactory } from '@splitsoftware/splitio-commons/src/sdkManager'; -import { sdkClientMethodCSFactory } from '@splitsoftware/splitio-commons/src/sdkClient/sdkClientMethodCSWithTT'; +import { sdkClientMethodCSFactory } from '@splitsoftware/splitio-commons/src/sdkClient/sdkClientMethodCS'; import { impressionObserverCSFactory } from '@splitsoftware/splitio-commons/src/trackers/impressionObserver/impressionObserverCS'; -import { integrationsManagerFactory } from '@splitsoftware/splitio-commons/src/integrations/browser'; import { __InLocalStorageMockFactory } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/storage/storageCS'; import { sdkFactory } from '@splitsoftware/splitio-commons/src/sdkFactory'; import { LOCALHOST_MODE, STORAGE_LOCALSTORAGE } from '@splitsoftware/splitio-commons/src/utils/constants'; import { createUserConsentAPI } from '@splitsoftware/splitio-commons/src/consent/sdkUserConsent'; +import { localhostFromObjectFactory } from '@splitsoftware/splitio-commons/src/sync/offline/LocalhostFromObject'; import { settingsFactory } from '../settings/browser'; import { platform, SignalListener } from '../platform'; @@ -49,8 +49,6 @@ function getModules(settings) { SignalListener, - integrationsManagerFactory: settings.integrations && settings.integrations.length > 0 ? integrationsManagerFactory.bind(null, settings.integrations) : undefined, - impressionsObserverFactory: impressionObserverCSFactory, extraProps: (params) => { @@ -63,7 +61,7 @@ function getModules(settings) { switch (settings.mode) { case LOCALHOST_MODE: modules.splitApiFactory = undefined; - modules.syncManagerFactory = settings.sync.localhostMode; + modules.syncManagerFactory = localhostFromObjectFactory; modules.SignalListener = undefined; break; } diff --git a/src/factory/node.js b/src/factory/node.js index d8141037a..4142741b9 100644 --- a/src/factory/node.js +++ b/src/factory/node.js @@ -10,6 +10,7 @@ import { impressionObserverSSFactory } from '@splitsoftware/splitio-commons/src/ import { sdkFactory } from '@splitsoftware/splitio-commons/src/sdkFactory'; import { CONSUMER_MODE, LOCALHOST_MODE } from '@splitsoftware/splitio-commons/src/utils/constants'; +import { localhostFromFileFactory } from '../sync/offline/LocalhostFromFile'; import { settingsFactory } from '../settings/node'; import { platform, SignalListener } from '../platform'; import { bloomFilterFactory } from '../platform/filter/bloomFilter'; @@ -53,7 +54,7 @@ function getModules(settings) { switch (settings.mode) { case LOCALHOST_MODE: modules.splitApiFactory = undefined; - modules.syncManagerFactory = settings.sync.localhostMode; + modules.syncManagerFactory = localhostFromFileFactory; modules.SignalListener = undefined; break; case CONSUMER_MODE: diff --git a/src/settings/__tests__/browser.spec.js b/src/settings/__tests__/browser.spec.js index 0a37e0772..481b9e95e 100644 --- a/src/settings/__tests__/browser.spec.js +++ b/src/settings/__tests__/browser.spec.js @@ -1,36 +1,6 @@ import tape from 'tape-catch'; import { settingsFactory } from '../browser'; -tape('SETTINGS / Integrations should be properly parsed', assert => { - const settings = settingsFactory({ - core: { - authorizationKey: 'dummy token' - }, - integrations: [ - { type: 'GOOGLE_ANALYTICS_TO_SPLIT', prefix: 'prefix1' }, - { type: 'INVALID_INTEGRATION', prefix: 'prefix2' }, - { type: 'SPLIT_TO_GOOGLE_ANALYTICS', prefix: 'prefix3' }, - { type: 'INVALID_INTEGRATION_2', prefix: 'prefix4' }, - {}, - 'INVALID' - ] - }); - - assert.deepEqual(settings.integrations, [ - { type: 'GOOGLE_ANALYTICS_TO_SPLIT', prefix: 'prefix1' }, - { type: 'SPLIT_TO_GOOGLE_ANALYTICS', prefix: 'prefix3' } - ], 'Filters invalid integrations from `integrations` array'); - - assert.deepEqual(settingsFactory({ - core: { - authorizationKey: 'dummy token' - }, - integrations: 'INVALID' - }).integrations, [], 'Returns an empty array if `integrations` is an invalid object'); - - assert.end(); -}); - tape('SETTINGS / Consent is overwritable and "GRANTED" by default in client-side', assert => { let settings = settingsFactory({}); assert.equal(settings.userConsent, 'GRANTED', 'userConsent defaults to granted if not provided.'); diff --git a/src/settings/__tests__/node.spec.js b/src/settings/__tests__/node.spec.js index 54418b874..4e0942e0a 100644 --- a/src/settings/__tests__/node.spec.js +++ b/src/settings/__tests__/node.spec.js @@ -134,8 +134,8 @@ tape('SETTINGS / Log error and fallback to InMemory storage if no valid storage ]; assert.deepEqual(logSpy.args, [ - ['[ERROR] splitio => The provided REDIS storage is invalid for this mode. It requires consumer mode. Fallbacking into default MEMORY storage.'], - ['[ERROR] splitio => The provided \'INVALID\' storage type is invalid. Fallbacking into default MEMORY storage.'] + ['[ERROR] splitio => The provided REDIS storage is invalid for this mode. It requires consumer mode. Fallback into default MEMORY storage.'], + ['[ERROR] splitio => The provided \'INVALID\' storage type is invalid. Fallback into default MEMORY storage.'] ], 'logs error message'); settings.forEach(setting => { assert.equal(setting.storage.type, 'MEMORY', 'fallbacks to memory storage'); }); diff --git a/src/settings/browser.js b/src/settings/browser.js index d3f04443e..584b11a23 100644 --- a/src/settings/browser.js +++ b/src/settings/browser.js @@ -1,21 +1,17 @@ import { settingsValidation } from '@splitsoftware/splitio-commons/src/utils/settingsValidation'; import { validateRuntime } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/runtime'; import { validateLogger } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/logger/builtinLogger'; -import { LocalhostFromObject } from '@splitsoftware/splitio-commons/src/sync/offline/LocalhostFromObject'; import { validateConsent } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/consent'; import { defaults } from './defaults/browser'; import { validateStorage } from './storage/browser'; -import { validateIntegrations } from './integrations/browser'; const params = { defaults, - acceptKey: true, acceptTT: true, // Client with bound key and optional TT + acceptKey: true, // Client with bound key runtime: validateRuntime, storage: validateStorage, - integrations: validateIntegrations, logger: validateLogger, - localhost: () => LocalhostFromObject(), consent: validateConsent, }; diff --git a/src/settings/defaults/version.js b/src/settings/defaults/version.js index 271ef6cd7..087efdda2 100644 --- a/src/settings/defaults/version.js +++ b/src/settings/defaults/version.js @@ -1 +1 @@ -export const packageVersion = '10.28.0'; +export const packageVersion = '11.0.0'; diff --git a/src/settings/node.js b/src/settings/node.js index 4d65e46bd..5fe7d5535 100644 --- a/src/settings/node.js +++ b/src/settings/node.js @@ -1,18 +1,18 @@ import { settingsValidation } from '@splitsoftware/splitio-commons/src/utils/settingsValidation'; import { validateLogger } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/logger/builtinLogger'; -import { LocalhostFromFile } from '../sync/offline/LocalhostFromFile'; import { defaults } from './defaults/node'; import { validateStorage } from './storage/node'; import { validateRuntime } from './runtime/node'; +const FLAG_SPEC_VERSION = '1.1'; + const params = { defaults, runtime: validateRuntime, storage: validateStorage, logger: validateLogger, - localhost: () => LocalhostFromFile(), - consent: () => undefined, // resets settings.userConsent to the default + flagSpec: () => FLAG_SPEC_VERSION // In Node.js the SDK ignores `config.integrations`, so a validator for integrations is not required }; diff --git a/src/settings/storage/browser.js b/src/settings/storage/browser.js index 92daa2cd2..10a9e3eea 100644 --- a/src/settings/storage/browser.js +++ b/src/settings/storage/browser.js @@ -31,7 +31,7 @@ export function validateStorage(settings) { if (type !== STORAGE_MEMORY && type !== STORAGE_LOCALSTORAGE || type === STORAGE_LOCALSTORAGE && !isLocalStorageAvailable()) { fallbackToMemory(); - log.error('Invalid or unavailable storage. Fallbacking into MEMORY storage'); + log.error('Invalid or unavailable storage. Fallback into MEMORY storage'); } return { diff --git a/src/settings/storage/node.js b/src/settings/storage/node.js index ea85045df..b5296889f 100644 --- a/src/settings/storage/node.js +++ b/src/settings/storage/node.js @@ -16,7 +16,7 @@ export function validateStorage(settings) { case STORAGE_REDIS: { // If passing REDIS storage in localhost or standalone mode, we log an error and fallback to MEMORY storage if (mode === STANDALONE_MODE || mode === LOCALHOST_MODE) { - log.error('The provided REDIS storage is invalid for this mode. It requires consumer mode. Fallbacking into default MEMORY storage.'); + log.error('The provided REDIS storage is invalid for this mode. It requires consumer mode. Fallback into default MEMORY storage.'); return { type: STORAGE_MEMORY, prefix @@ -74,7 +74,7 @@ export function validateStorage(settings) { // If passing MEMORY storage in consumer mode, throw an error (no way to fallback to REDIS storage) if (mode === CONSUMER_MODE) throw new Error('A REDIS storage is required on consumer mode'); // If passing an invalid storage type, log an error - if (type !== STORAGE_MEMORY) log.error(`The provided '${type}' storage type is invalid. Fallbacking into default MEMORY storage.`); + if (type !== STORAGE_MEMORY) log.error(`The provided '${type}' storage type is invalid. Fallback into default MEMORY storage.`); return { type: STORAGE_MEMORY, prefix diff --git a/src/sync/offline/LocalhostFromFile.js b/src/sync/offline/LocalhostFromFile.js index 3f63173ac..3dca10929 100644 --- a/src/sync/offline/LocalhostFromFile.js +++ b/src/sync/offline/LocalhostFromFile.js @@ -1,11 +1,6 @@ import { splitsParserFromFileFactory } from './splitsParserFromFile'; import { syncManagerOfflineFactory } from '@splitsoftware/splitio-commons/src/sync/offline/syncManagerOffline'; -// Singleton instance of the factory function for offline SyncManager from YAML file (a.k.a. localhostFromFile) +// Singleton instance of the factory function for offline SyncManager from YAML file // It uses NodeJS APIs. -const localhostFromFile = syncManagerOfflineFactory(splitsParserFromFileFactory); -localhostFromFile.type = 'LocalhostFromFile'; - -export function LocalhostFromFile() { - return localhostFromFile; -} +export const localhostFromFileFactory = syncManagerOfflineFactory(splitsParserFromFileFactory); diff --git a/src/sync/offline/splitsParserFromFile.js b/src/sync/offline/splitsParserFromFile.js index aef992544..13f2f1c29 100644 --- a/src/sync/offline/splitsParserFromFile.js +++ b/src/sync/offline/splitsParserFromFile.js @@ -31,7 +31,7 @@ function configFilesPath(configFilePath) { } // This function is not pure nor meant to be. Here we apply modifications to cover -// for behaviour that's ensured by the BE. +// for behavior that's ensured by the BE. function arrangeConditions(mocksData) { // Iterate through each feature flag data forOwn(mocksData, data => { diff --git a/ts-tests/index.ts b/ts-tests/index.ts index a37c52808..2e2588d9f 100644 --- a/ts-tests/index.ts +++ b/ts-tests/index.ts @@ -11,7 +11,9 @@ * @author Nico Zelaya */ -import { SplitFactory } from '@splitsoftware/splitio'; +import { SplitFactory } from '../types/index'; +import { SplitFactory as SplitFactoryCS } from '../types/client'; +import { SplitFactory as SplitFactorySS } from '../types/server'; let stringPromise: Promise; let splitNamesPromise: Promise; @@ -163,28 +165,24 @@ browserSettings = { } }; // With sync settings should return ISDK, if settings have async storage it should return IAsyncSDK -SDK = SplitFactory(browserSettings); SDK = SplitFactory(nodeSettings); AsyncSDK = SplitFactory(asyncSettings); BrowserSDK = SplitFactory(browserSettings); +SDK = SplitFactorySS(nodeSettings); +AsyncSDK = SplitFactorySS(asyncSettings); +BrowserSDK = SplitFactoryCS(browserSettings); // The settings values the SDK expose. const instantiatedSettingsCore: { authorizationKey: string, key: SplitIO.SplitKey, - trafficType: string, labelsEnabled: boolean, IPAddressesEnabled: boolean } = SDK.settings.core; -const instantiatedSettingsMode: ('standalone' | 'consumer') = SDK.settings.mode; +const instantiatedSettingsMode: ('standalone' | 'consumer' | 'consumer_partial' | 'localhost') = SDK.settings.mode; const instantiatedSettingsScheduler: { [key: string]: number } = SDK.settings.scheduler; const instantiatedSettingsStartup: { [key: string]: number } = SDK.settings.startup; -const instantiatedSettingsStorage: { - prefix: string, - options: Object, - // It can have any of the storages. - type: SplitIO.NodeSyncStorage | SplitIO.NodeAsyncStorage | SplitIO.BrowserStorage -} = SDK.settings.storage; +const instantiatedSettingsStorage = SDK.settings.storage as SplitIO.StorageOptions; const instantiatedSettingsUrls: { [key: string]: string } = SDK.settings.urls; const instantiatedSettingsVersion: string = SDK.settings.version; let instantiatedSettingsFeatures = SDK.settings.features as SplitIO.MockedFeaturesMap; @@ -195,9 +193,8 @@ SDK.settings.features = { 'split_x': 'on' }; // Browser // Client and Manager client = SDK.client(); -client = SDK.client('a customer key'); -client = SDK.client('a customer key', 'a traffic type'); manager = SDK.manager(); +manager = BrowserSDK.manager(); // Today async clients are only possible on Node. Shared client creation not available here. asyncClient = AsyncSDK.client(); asyncManager = AsyncSDK.manager(); @@ -240,79 +237,77 @@ const b: number = client.listenerCount(splitEvent); let nodeEventEmitter: NodeJS.EventEmitter = client; // Ready, destroy and flush -const readyPromise: Promise = client.ready(); -const destroyPromise: Promise = client.destroy(); -// @ts-ignore -const flushPromise: Promise = client.flush(); +let promise: Promise = client.ready(); +promise = client.destroy(); +promise = SDK.destroy(); +// @TODO not public yet +// promise = client.flush(); // We can call getTreatment with or without a key. treatment = client.getTreatment(splitKey, 'mySplit'); -treatment = client.getTreatment('mySplit'); +treatment = browserClient.getTreatment('mySplit'); // Attributes parameter is optional on both signatures. treatment = client.getTreatment(splitKey, 'mySplit', attributes); -treatment = client.getTreatment('mySplit', attributes); +treatment = browserClient.getTreatment('mySplit', attributes); // We can call getTreatments with or without a key. treatments = client.getTreatments(splitKey, ['mySplit']); -treatments = client.getTreatments(['mySplit']); +treatments = browserClient.getTreatments(['mySplit']); // Attributes parameter is optional on both signatures. treatments = client.getTreatments(splitKey, ['mySplit'], attributes); -treatments = client.getTreatments(['mySplit'], attributes); +treatments = browserClient.getTreatments(['mySplit'], attributes); // We can call getTreatmentWithConfig with or without a key. treatmentWithConfig = client.getTreatmentWithConfig(splitKey, 'mySplit'); -treatmentWithConfig = client.getTreatmentWithConfig('mySplit'); +treatmentWithConfig = browserClient.getTreatmentWithConfig('mySplit'); // Attributes parameter is optional on both signatures. treatmentWithConfig = client.getTreatmentWithConfig(splitKey, 'mySplit', attributes); -treatmentWithConfig = client.getTreatmentWithConfig('mySplit', attributes); +treatmentWithConfig = browserClient.getTreatmentWithConfig('mySplit', attributes); // We can call getTreatmentsWithConfig with or without a key. treatmentsWithConfig = client.getTreatmentsWithConfig(splitKey, ['mySplit']); -treatmentsWithConfig = client.getTreatmentsWithConfig(['mySplit']); +treatmentsWithConfig = browserClient.getTreatmentsWithConfig(['mySplit']); // Attributes parameter is optional on both signatures. treatmentsWithConfig = client.getTreatmentsWithConfig(splitKey, ['mySplit'], attributes); -treatmentsWithConfig = client.getTreatmentsWithConfig(['mySplit'], attributes); +treatmentsWithConfig = browserClient.getTreatmentsWithConfig(['mySplit'], attributes); -// We can call getTreatmentsByFlagSet without a key. +// We can call getTreatmentsByFlagSet with or without a key. treatments = client.getTreatmentsByFlagSet(splitKey, 'set_a'); -treatments = client.getTreatmentsByFlagSet('set_a'); +treatments = browserClient.getTreatmentsByFlagSet('set_a'); // Attributes parameter is optional. treatments = client.getTreatmentsByFlagSet(splitKey, 'set_a', attributes); -treatments = client.getTreatmentsByFlagSet('set_a', attributes); +treatments = browserClient.getTreatmentsByFlagSet('set_a', attributes); -// We can call getTreatmentsByFlagSets without a key. +// We can call getTreatmentsByFlagSets with or without a key. treatments = client.getTreatmentsByFlagSets(splitKey, ['set_a']); -treatments = client.getTreatmentsByFlagSets(['set_a']); +treatments = browserClient.getTreatmentsByFlagSets(['set_a']); // Attributes parameter is optional. treatments = client.getTreatmentsByFlagSets(splitKey, ['set_a'], attributes); -treatments = client.getTreatmentsByFlagSets(['set_a'], attributes); +treatments = browserClient.getTreatmentsByFlagSets(['set_a'], attributes); -// We can call getTreatmentsWithConfigByFlagSet without a key. +// We can call getTreatmentsWithConfigByFlagSet with or without a key. treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSet(splitKey, 'set_a'); -treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSet('set_a'); +treatmentsWithConfig = browserClient.getTreatmentsWithConfigByFlagSet('set_a'); // Attributes parameter is optional. treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSet(splitKey, 'set_a', attributes); -treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSet('set_a', attributes); +treatmentsWithConfig = browserClient.getTreatmentsWithConfigByFlagSet('set_a', attributes); -// We can call getTreatmentsWithConfigByFlagSets without a key. +// We can call getTreatmentsWithConfigByFlagSets with or without a key. treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSets(splitKey, ['set_a']); -treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSets(['set_a']); +treatmentsWithConfig = browserClient.getTreatmentsWithConfigByFlagSets(['set_a']); // Attributes parameter is optional. treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSets(splitKey, ['set_a'], attributes); -treatmentsWithConfig = client.getTreatmentsWithConfigByFlagSets(['set_a'], attributes); +treatmentsWithConfig = browserClient.getTreatmentsWithConfigByFlagSets(['set_a'], attributes); -// We can call track with or without a key. Traffic type can also be bound to the client. +// We can call track with or without a key. tracked = client.track(splitKey, 'myTrafficType', 'myEventType'); // all params -tracked = client.track('myTrafficType', 'myEventType'); // key bound, tt provided. -tracked = client.track('myEventType'); // key and tt bound. +tracked = browserClient.track('myTrafficType', 'myEventType'); // key bound, tt provided. // Value parameter is optional on all signatures. tracked = client.track(splitKey, 'myTrafficType', 'myEventType', 10); -tracked = client.track('myTrafficType', 'myEventType', 10); -tracked = client.track('myEventType', 10); +tracked = browserClient.track('myTrafficType', 'myEventType', 10); // Properties parameter is optional on all signatures. tracked = client.track(splitKey, 'myTrafficType', 'myEventType', 10, { prop1: 1, prop2: '2', prop3: false, prop4: null }); -tracked = client.track('myTrafficType', 'myEventType', null, { prop1: 1, prop2: '2', prop3: false, prop4: null }); -tracked = client.track('myEventType', undefined, { prop1: 1, prop2: '2', prop3: false, prop4: null }); +tracked = browserClient.track('myTrafficType', 'myEventType', undefined, { prop1: 1, prop2: '2', prop3: false, prop4: null }); /*** Repeating tests for Async Client ***/ @@ -332,10 +327,11 @@ const b1: number = asyncClient.listenerCount(splitEvent); nodeEventEmitter = asyncClient; // Ready, destroy and flush (same as for sync client, just for interface checking) -const readyPromise1: Promise = asyncClient.ready(); -asyncClient.destroy(); -// @ts-ignore -asyncClient.flush(); +promise = asyncClient.ready(); +promise = asyncClient.destroy(); +promise = AsyncSDK.destroy(); +// @TODO not public yet +// promise = asyncClient.flush(); // We can call getTreatment but always with a key. asyncTreatment = asyncClient.getTreatment(splitKey, 'mySplit'); @@ -391,7 +387,7 @@ splitView = manager.split('mySplit'); splitViews = manager.splits(); // Manager implements ready promise. -const managerReadyPromise: Promise = manager.ready(); +promise = manager.ready(); // Manager implements methods from NodeJS.Events. Testing a few. manager = manager.on(splitEvent, () => { }); @@ -415,7 +411,7 @@ splitViewAsync = asyncManager.split('mySplit'); splitViewsAsync = asyncManager.splits(); // asyncManager implements ready promise. -const asyncManagerReadyPromise: Promise = asyncManager.ready(); +promise = asyncManager.ready(); // asyncManager implements methods from NodeJS.Events. Testing a few. asyncManager = asyncManager.on(splitEvent, () => { }); @@ -492,40 +488,10 @@ userConsent = BrowserSDK.UserConsent.Status.UNKNOWN; // Split filters let splitFilters: SplitIO.SplitFilter[] = [{ type: 'bySet', values: ['set_a', 'set_b'] }, { type: 'byName', values: ['my_split_1', 'my_split_1'] }, { type: 'byPrefix', values: ['my_split', 'test_split_'] }] -// Browser integrations -let fieldsObjectSample: UniversalAnalytics.FieldsObject = { hitType: 'event', eventAction: 'action' }; -let eventDataSample: SplitIO.EventData = { eventTypeId: 'someEventTypeId', value: 10, properties: {} } - -let googleAnalyticsToSplitConfig: SplitIO.IGoogleAnalyticsToSplitConfig = { - type: 'GOOGLE_ANALYTICS_TO_SPLIT', -}; -let splitToGoogleAnalyticsConfig: SplitIO.ISplitToGoogleAnalyticsConfig = { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', -}; - -let customGoogleAnalyticsToSplitConfig: SplitIO.IGoogleAnalyticsToSplitConfig = { - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - hits: false, - filter: function (model: UniversalAnalytics.Model): boolean { return true; }, - mapper: function (model: UniversalAnalytics.Model, defaultMapping: SplitIO.EventData): SplitIO.EventData { return eventDataSample; }, - prefix: 'PREFIX', - identities: [{ key: 'key1', trafficType: 'tt1' }, { key: 'key2', trafficType: 'tt2' }], - autoRequire: true -}; -let customSplitToGoogleAnalyticsConfig: SplitIO.ISplitToGoogleAnalyticsConfig = { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - events: false, - impressions: true, - filter: function (model: SplitIO.IntegrationData): boolean { return true; }, - mapper: function (model: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject): UniversalAnalytics.FieldsObject { return fieldsObjectSample; }, - trackerNames: ['t0', 'myTracker'], -} - let fullBrowserSettings: SplitIO.IBrowserSettings = { core: { authorizationKey: 'asd', key: 'asd', - trafficType: 'myTT', labelsEnabled: false }, scheduler: { @@ -560,7 +526,6 @@ let fullBrowserSettings: SplitIO.IBrowserSettings = { }, impressionListener: impressionListener, debug: true, - integrations: [googleAnalyticsToSplitConfig, splitToGoogleAnalyticsConfig, customGoogleAnalyticsToSplitConfig, customSplitToGoogleAnalyticsConfig], streamingEnabled: true, sync: { splitFilters: splitFilters, @@ -573,7 +538,6 @@ let fullBrowserSettings: SplitIO.IBrowserSettings = { userConsent: 'GRANTED' }; fullBrowserSettings.storage.type = 'MEMORY'; -fullBrowserSettings.integrations[0].type = 'GOOGLE_ANALYTICS_TO_SPLIT'; fullBrowserSettings.userConsent = 'DECLINED'; fullBrowserSettings.userConsent = 'UNKNOWN'; @@ -671,6 +635,7 @@ let fullAsyncSettings: SplitIO.INodeAsyncSettings = { debug: true, sync: { splitFilters: splitFilters, + impressionsMode: 'DEBUG', } }; diff --git a/ts-tests/package.json b/ts-tests/package.json deleted file mode 100644 index d359b9e25..000000000 --- a/ts-tests/package.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "ts-tests", - "version": "1.0.0", - "description": "SDK tests for TypeScript declaration files", - "author": "Nico Zelaya", - "license": "Apache-2.0", - "repository": "splitio/javascript-client", - "dependencies": { - "@types/node": "^14.18.63", - "typescript": "^3.7.4" - } -} diff --git a/ts-tests/tsconfig.json b/ts-tests/tsconfig.json index 4adc23925..2c1ab20a8 100644 --- a/ts-tests/tsconfig.json +++ b/ts-tests/tsconfig.json @@ -2,7 +2,8 @@ "compilerOptions": { "noImplicitAny": true, "target": "es5", - "module": "commonjs" + "module": "commonjs", + "noEmit": true, }, "files": [ "index" diff --git a/types/client/index.d.ts b/types/client/index.d.ts index d94090d91..efa3a8f03 100644 --- a/types/client/index.d.ts +++ b/types/client/index.d.ts @@ -1,12 +1,13 @@ // Declaration file for JavaScript Split Software SDK // Project: http://www.split.io/ -/// +import '@splitsoftware/splitio-commons'; + export = JsSdk; declare module JsSdk { /** - * Split.io sdk factory function. + * Split.io SDK factory function. * The settings parameter should be an object that complies with the SplitIO.IBrowserSettings. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ diff --git a/types/index.d.ts b/types/index.d.ts index 78c5d824f..e3f81c3e4 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -2,25 +2,25 @@ // Project: http://www.split.io/ // Definitions by: Nico Zelaya -/// +import '@splitsoftware/splitio-commons'; export = JsSdk; declare module JsSdk { /** - * Split.io sdk factory function. + * Split.io SDK factory function. * The settings parameter should be an object that complies with the SplitIO.INodeAsyncSettings. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ export function SplitFactory(settings: SplitIO.INodeAsyncSettings): SplitIO.IAsyncSDK; /** - * Split.io sdk factory function. + * Split.io SDK factory function. * The settings parameter should be an object that complies with the SplitIO.INodeSettings. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ export function SplitFactory(settings: SplitIO.INodeSettings): SplitIO.ISDK; /** - * Split.io sdk factory function. + * Split.io SDK factory function. * The settings parameter should be an object that complies with the SplitIO.IBrowserSettings. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} */ diff --git a/types/server/index.d.ts b/types/server/index.d.ts index a9a145a2f..fcbc2bd59 100644 --- a/types/server/index.d.ts +++ b/types/server/index.d.ts @@ -1,18 +1,19 @@ // Declaration file for JavaScript Split Software SDK // Project: http://www.split.io/ -/// +import '@splitsoftware/splitio-commons'; + export = JsSdk; declare module JsSdk { /** - * Split.io sdk factory function. + * Split.io SDK factory function. * The settings parameter should be an object that complies with the SplitIO.INodeAsyncSettings. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ export function SplitFactory(settings: SplitIO.INodeAsyncSettings): SplitIO.IAsyncSDK; /** - * Split.io sdk factory function. + * Split.io SDK factory function. * The settings parameter should be an object that complies with the SplitIO.INodeSettings. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} */ diff --git a/types/splitio.d.ts b/types/splitio.d.ts deleted file mode 100644 index bb88476bc..000000000 --- a/types/splitio.d.ts +++ /dev/null @@ -1,1825 +0,0 @@ -// Type definitions for JavaScript and NodeJS Split Software SDK -// Project: http://www.split.io/ -// Definitions by: Nico Zelaya - -/// -import { RedisOptions } from "ioredis"; -import { RequestOptions } from "http"; - -export as namespace SplitIO; -export = SplitIO; - -/** - * NodeJS.EventEmitter interface - * @see {@link https://nodejs.org/api/events.html} - */ -interface EventEmitter { - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - on(event: string | symbol, listener: (...args: any[]) => void): this; - once(event: string | symbol, listener: (...args: any[]) => void): this; - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - off(event: string | symbol, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string | symbol): this; - setMaxListeners(n: number): this; - getMaxListeners(): number; - listeners(event: string | symbol): Function[]; - rawListeners(event: string | symbol): Function[]; - emit(event: string | symbol, ...args: any[]): boolean; - listenerCount(type: string | symbol): number; - // Added in Node 6... - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; - eventNames(): Array; -} -/** - * @typedef {Object} EventConsts - * @property {string} SDK_READY The ready event. - * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. - * @property {string} SDK_READY_TIMED_OUT The timeout event. - * @property {string} SDK_UPDATE The update event. - */ -type EventConsts = { - SDK_READY: 'init::ready', - SDK_READY_FROM_CACHE: 'init::cache-ready', - SDK_READY_TIMED_OUT: 'init::timeout', - SDK_UPDATE: 'state::update' -}; -/** - * SDK Modes. - * @typedef {string} SDKMode - */ -type SDKMode = 'standalone' | 'consumer'; -/** - * Storage types. - * @typedef {string} StorageType - */ -type StorageType = 'MEMORY' | 'LOCALSTORAGE' | 'REDIS'; -/** - * Settings interface. This is a representation of the settings the SDK expose, that's why - * most of it's props are readonly. Only features should be rewritten when localhost mode is active. - * @interface ISettings - */ -interface ISettings { - readonly core: { - authorizationKey: string, - key: SplitIO.SplitKey, - trafficType: string, - labelsEnabled: boolean, - IPAddressesEnabled: boolean - }, - readonly mode: SDKMode, - readonly scheduler: { - featuresRefreshRate: number, - impressionsRefreshRate: number, - impressionsQueueSize: number, - /** - * @deprecated - */ - metricsRefreshRate?: number, - telemetryRefreshRate: number, - segmentsRefreshRate: number, - offlineRefreshRate: number, - eventsPushRate: number, - eventsQueueSize: number, - pushRetryBackoffBase: number - }, - readonly startup: { - readyTimeout: number, - requestTimeoutBeforeReady: number, - retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number - }, - readonly storage: { - prefix: string, - options: Object, - type: StorageType - }, - readonly urls: { - events: string, - sdk: string, - auth: string, - streaming: string, - telemetry: string - }, - readonly debug: boolean | LogLevel, - readonly version: string, - /** - * Mocked features map if using in browser, or mocked features file path string if using in NodeJS. - */ - features: SplitIO.MockedFeaturesMap | SplitIO.MockedFeaturesFilePath, - readonly streamingEnabled: boolean, - readonly sync: { - splitFilters: SplitIO.SplitFilter[], - impressionsMode: SplitIO.ImpressionsMode, - enabled: boolean, - flagSpecVersion: string, - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - } - } - /** - * User consent status if using in browser. Undefined if using in NodeJS. - */ - readonly userConsent?: SplitIO.ConsentStatus -} -/** - * Log levels. - * @typedef {string} LogLevel - */ -type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; -/** - * Logger API - * @interface ILoggerAPI - */ -interface ILoggerAPI { - /** - * Enables SDK logging to the console. - * @function enable - * @returns {void} - */ - enable(): void, - /** - * Disables SDK logging. - * @function disable - * @returns {void} - */ - disable(): void, - /** - * Sets a log level for the SDK logs. - * @function setLogLevel - * @returns {void} - */ - setLogLevel(logLevel: LogLevel): void, - /** - * Log level constants. Use this to pass them to setLogLevel function. - */ - LogLevel: { - [level in LogLevel]: LogLevel - } -} -/** - * User consent API - * @interface IUserConsentAPI - */ -interface IUserConsentAPI { - /** - * Sets or updates the user consent status. Possible values are `true` and `false`, which represent user consent `'GRANTED'` and `'DECLINED'` respectively. - * - `true ('GRANTED')`: the user has granted consent for tracking events and impressions. The SDK will send them to Split cloud. - * - `false ('DECLINED')`: the user has declined consent for tracking events and impressions. The SDK will not send them to Split cloud. - * - * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. - * - * @function setStatus - * @param {boolean} userConsent The user consent status, true for 'GRANTED' and false for 'DECLINED'. - * @returns {boolean} Whether the provided param is a valid value (i.e., a boolean value) or not. - */ - setStatus(userConsent: boolean): boolean; - /** - * Gets the user consent status. - * - * @function getStatus - * @returns {ConsentStatus} The user consent status. - */ - getStatus(): SplitIO.ConsentStatus; - /** - * Consent status constants. Use this to compare with the getStatus function result. - */ - Status: { - [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus - } -} -/** - * Common settings between Browser and NodeJS settings interface. - * @interface ISharedSettings - */ -interface ISharedSettings { - /** - * Boolean value to indicate whether the logger should be enabled or disabled, or a log level string. - * - * Examples: - * ```javascript - * config.debug = true - * config.debug = 'WARN' - * ``` - * @property {boolean | LogLevel} debug - * @default false - */ - debug?: boolean | LogLevel, - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined - */ - impressionListener?: SplitIO.IImpressionListener, - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean, - /** - * SDK synchronization settings. - * @property {Object} sync - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters - */ - splitFilters?: SplitIO.SplitFilter[] - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. - * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. - * - * @property {string} impressionsMode - * @default 'OPTIMIZED' - */ - impressionsMode?: SplitIO.ImpressionsMode, - /** - * Controls the SDK continuous synchronization flags. - * - * When `true` a running SDK will process rollout plan updates performed on the UI (default). - * When false it'll just fetch all data upon init - * - * @property {boolean} enabled - * @default true - */ - enabled?: boolean - } -} -/** - * Common settings interface for SDK instances on NodeJS. - * @interface INodeBasicSettings - * @extends ISharedSettings - */ -interface INodeBasicSettings extends ISharedSettings { - /** - * SDK Startup settings for NodeJS. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 15 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 15 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 0 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 300 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - }, - /** - * SDK Core settings for NodeJS. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - /** - * Disable machine IP and Name from being sent to Split backend. - * @property {boolean} IPAddressesEnabled - * @default true - */ - IPAddressesEnabled?: boolean - }, - /** - * Defines which kind of storage we should instantiate. - * @property {Object} storage - */ - storage?: { - /** - * Storage type to be instantiated by the SDK. - * @property {StorageType} type - * @default 'MEMORY' - */ - type?: StorageType, - /** - * Options to be passed to the selected storage. - * @property {Object} options - */ - options?: Object, - /** - * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string - }, - /** - * The SDK mode. Possible values are "standalone", which is the default when using a synchronous storage, like 'MEMORY' and 'LOCALSTORAGE', - * and "consumer", which must be set when using an asynchronous storage, like 'REDIS'. For "localhost" mode, use "localhost" as authorizationKey. - * @property {SDKMode} mode - * @default 'standalone' - */ - mode?: SDKMode, - /** - * Mocked features file path. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#localhost-mode} - * @property {MockedFeaturesFilePath} features - * @default '$HOME/.split' - */ - features?: SplitIO.MockedFeaturesFilePath, -} -/** - * Common API for entities that expose status handlers. - * @interface IStatusInterface - * @extends EventEmitter - */ -interface IStatusInterface extends EventEmitter { - /** - * Constant object containing the SDK events for you to use. - * @property {EventConsts} Event - */ - Event: EventConsts, - /** - * Returns a promise that resolves once the SDK has finished loading (SDK_READY event emitted) or rejected if the SDK has timedout (SDK_READY_TIMED_OUT event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, calling the `ready` method after the - * SDK had timed out will return a new promise that should eventually resolve if the SDK gets ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} - */ - ready(): Promise -} -/** - * Common definitions between clients for different environments interface. - * @interface IBasicClient - * @extends IStatusInterface - */ -interface IBasicClient extends IStatusInterface { - /** - * Destroys the client instance. - * In 'standalone' mode, this method will flush any pending impressions and events, and stop the synchronization of feature flag definitions with the backend. - * In 'consumer' mode, this method will disconnect the SDK from the Redis or Pluggable storage. - * - * @function destroy - * @returns {Promise} A promise that resolves once the client is destroyed. - */ - destroy(): Promise -} -/** - * Common definitions between SDK instances for different environments interface. - * @interface IBasicSDK - */ -interface IBasicSDK { - /** - * Current settings of the SDK instance. - * @property settings - */ - settings: ISettings, - /** - * Logger API. - * @property Logger - */ - Logger: ILoggerAPI -} -/****** Exposed namespace ******/ -/** - * Types and interfaces for @splitsoftware/splitio package for usage when integrating javascript sdk on typescript apps. - * For the SDK package information - * @see {@link https://www.npmjs.com/package/@splitsoftware/splitio} - */ -declare namespace SplitIO { - /** - * Feature flag treatment value, returned by getTreatment. - * @typedef {string} Treatment - */ - type Treatment = string; - /** - * Feature flag treatment promise that resolves to actual treatment value. - * @typedef {Promise} AsyncTreatment - */ - type AsyncTreatment = Promise; - /** - * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: - * { - * feature1: 'on', - * feature2: 'off - * } - * @typedef {Object.} Treatments - */ - type Treatments = { - [featureName: string]: Treatment - }; - /** - * Feature flag treatments promise that resolves to the actual SplitIO.Treatments object. - * @typedef {Promise} AsyncTreatments - */ - type AsyncTreatments = Promise; - /** - * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. - * @typedef {Object} TreatmentWithConfig - * @property {string} treatment The treatment string - * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. - */ - type TreatmentWithConfig = { - treatment: string, - config: string | null - }; - /** - * Feature flag treatment promise that resolves to actual treatment with config value. - * @typedef {Promise} AsyncTreatmentWithConfig - */ - type AsyncTreatmentWithConfig = Promise; - /** - * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: - * { - * feature1: { treatment: 'on', config: null } - * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } - * } - * @typedef {Object.} Treatments - */ - type TreatmentsWithConfig = { - [featureName: string]: TreatmentWithConfig - }; - /** - * Feature flag treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. - * @typedef {Promise} AsyncTreatmentsWithConfig - */ - type AsyncTreatmentsWithConfig = Promise; - /** - * Possible Split SDK events. - * @typedef {string} Event - */ - type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; - /** - * Attributes should be on object with values of type string, boolean, number (dates should be sent as millis since epoch) or array of strings or numbers. - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#attribute-syntax} - */ - type Attributes = { - [attributeName: string]: AttributeType - }; - /** - * Type of an attribute value - * @typedef {string | number | boolean | Array} AttributeType - */ - type AttributeType = string | number | boolean | Array; - /** - * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. - * @typedef {Object.} Properties - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#track - */ - type Properties = { - [propertyName: string]: string | number | boolean | null - }; - /** - * The SplitKey object format. - * @typedef {Object.} SplitKeyObject - */ - type SplitKeyObject = { - matchingKey: string, - bucketingKey: string - }; - /** - * The customer identifier. Could be a SplitKeyObject or a string. - * @typedef {SplitKeyObject|string} SplitKey - */ - type SplitKey = SplitKeyObject | string; - /** - * Path to file with mocked features (for node). - * @typedef {string} MockedFeaturesFilePath - */ - type MockedFeaturesFilePath = string; - /** - * Object with mocked features mapping (for browser). We need to specify the featureName as key, and the mocked treatment as value. - * @typedef {Object} MockedFeaturesMap - */ - type MockedFeaturesMap = { - [featureName: string]: string | TreatmentWithConfig - }; - /** - * Object with information about an impression. It contains the generated impression DTO as well as - * complementary information around where and how it was generated in that way. - * @typedef {Object} ImpressionData - */ - type ImpressionData = { - impression: { - feature: string, - keyName: string, - treatment: string, - time: number, - bucketingKey?: string, - label: string, - changeNumber: number, - pt?: number, - }, - attributes?: SplitIO.Attributes, - ip: string, - hostname: string, - sdkLanguageVersion: string - }; - /** - * Data corresponding to one feature flag view. - * @typedef {Object} SplitView - */ - type SplitView = { - /** - * The name of the feature flag. - * @property {string} name - */ - name: string, - /** - * The traffic type of the feature flag. - * @property {string} trafficType - */ - trafficType: string, - /** - * Whether the feature flag is killed or not. - * @property {boolean} killed - */ - killed: boolean, - /** - * The list of treatments available for the feature flag. - * @property {Array} treatments - */ - treatments: Array, - /** - * Current change number of the feature flag. - * @property {number} changeNumber - */ - changeNumber: number, - /** - * Map of configurations per treatment. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. - * @property {Object.} configs - */ - configs: { - [treatmentName: string]: string - }, - /** - * List of sets of the feature flag. - * @property {string[]} sets - */ - sets: string[], - /** - * The default treatment of the feature flag. - * @property {string} defaultTreatment - */ - defaultTreatment: string, - }; - /** - * A promise that resolves to a feature flag view. - * @typedef {Promise} SplitView - */ - type SplitViewAsync = Promise; - /** - * An array containing the SplitIO.SplitView elements. - */ - type SplitViews = Array; - /** - * A promise that resolves to an SplitIO.SplitViews array. - * @typedef {Promise} SplitViewsAsync - */ - type SplitViewsAsync = Promise; - /** - * An array of feature flag names. - * @typedef {Array} SplitNames - */ - type SplitNames = Array; - /** - * A promise that resolves to an array of feature flag names. - * @typedef {Promise} SplitNamesAsync - */ - type SplitNamesAsync = Promise; - /** - * Synchronous storage valid types for NodeJS. - * @typedef {string} NodeSyncStorage - */ - type NodeSyncStorage = 'MEMORY'; - /** - * Asynchronous storages valid types for NodeJS. - * @typedef {string} NodeAsyncStorage - */ - type NodeAsyncStorage = 'REDIS'; - /** - * Storage valid types for the browser. - * @typedef {string} BrowserStorage - */ - type BrowserStorage = 'MEMORY' | 'LOCALSTORAGE'; - /** - * Impression listener interface. This is the interface that needs to be implemented - * by the element you provide to the SDK as impression listener. - * @interface IImpressionListener - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#listener} - */ - interface IImpressionListener { - logImpression(data: SplitIO.ImpressionData): void - } - /** - * A pair of user key and it's trafficType, required for tracking valid Split events. - * @typedef {Object} Identity - * @property {string} key The user key. - * @property {string} trafficType The key traffic type. - */ - type Identity = { - key: string; - trafficType: string; - }; - /** - * Object with information about a Split event. - * @typedef {Object} EventData - */ - type EventData = { - eventTypeId: string; - value?: number; - properties?: Properties; - trafficTypeName?: string; - key?: string; - timestamp?: number; - }; - /** - * Enable 'Google Analytics to Split' integration, to track Google Analytics hits as Split events. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#google-analytics-to-split} - */ - interface IGoogleAnalyticsToSplitConfig { - type: 'GOOGLE_ANALYTICS_TO_SPLIT', - /** - * Optional flag to filter GA hits from being tracked as Split events. - * @property {boolean} hits - * @default true - */ - hits?: boolean, - /** - * Optional predicate used to define a custom filter for tracking GA hits as Split events. - * For example, the following filter allows to track only 'event' hits: - * `(model) => model.get('hitType') === 'event'` - * By default, all hits are tracked as Split events. - */ - filter?: (model: UniversalAnalytics.Model) => boolean, - /** - * Optional function useful when you need to modify the Split event before tracking it. - * This function is invoked with two arguments: - * 1. the GA model object representing the hit. - * 2. the default format of the mapped Split event instance. - * The return value must be a Split event, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom property to events: - * `(model, defaultMapping) => { - * defaultMapping.properties.someProperty = SOME_VALUE; - * return defaultMapping; - * }` - */ - mapper?: (model: UniversalAnalytics.Model, defaultMapping: SplitIO.EventData) => SplitIO.EventData, - /** - * Optional prefix for EventTypeId, to prevent any kind of data collision between events. - * @property {string} prefix - * @default 'ga' - */ - prefix?: string, - /** - * List of Split identities (key & traffic type pairs) used to track events. - * If not provided, events are sent using the key and traffic type provided at SDK config - */ - identities?: Identity[], - /** - * Optional flag to log an error if the `auto-require` script is not detected. - * The auto-require script automatically requires the `splitTracker` plugin for created trackers, - * and should be placed right after your Google Analytics, Google Tag Manager or gtag.js script tag. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#set-up-with-gtm-and-gtag.js} - * - * @property {boolean} autoRequire - * @default false - */ - autoRequire?: boolean, - } - /** - * Object representing the data sent by Split (events and impressions). - * @typedef {Object} IntegrationData - * @property {string} type The type of Split data, either 'IMPRESSION' or 'EVENT'. - * @property {ImpressionData | EventData} payload The data instance itself. - */ - type IntegrationData = { type: 'IMPRESSION', payload: SplitIO.ImpressionData } | { type: 'EVENT', payload: SplitIO.EventData }; - /** - * Enable 'Split to Google Analytics' integration, to track Split impressions and events as Google Analytics hits. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#split-to-google-analytics} - */ - interface ISplitToGoogleAnalyticsConfig { - type: 'SPLIT_TO_GOOGLE_ANALYTICS', - /** - * Optional flag to filter Split impressions from being tracked as GA hits. - * @property {boolean} impressions - * @default true - */ - impressions?: boolean, - /** - * Optional flag to filter Split events from being tracked as GA hits. - * @property {boolean} events - * @default true - */ - events?: boolean, - /** - * Optional predicate used to define a custom filter for tracking Split data (events and impressions) as GA hits. - * For example, the following filter allows to track only impressions, equivalent to setting events to false: - * `(data) => data.type === 'IMPRESSION'` - */ - filter?: (data: SplitIO.IntegrationData) => boolean, - /** - * Optional function useful when you need to modify the GA hit before sending it. - * This function is invoked with two arguments: - * 1. the input data (Split event or impression). - * 2. the default format of the mapped FieldsObject instance (GA hit). - * The return value must be a FieldsObject, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom dimension to hits: - * `(data, defaultMapping) => { - * defaultMapping.dimension1 = SOME_VALUE; - * return defaultMapping; - * }` - * - * Default FieldsObject instance for data.type === 'IMPRESSION': - * `{ - * hitType: 'event', - * eventCategory: 'split-impression', - * eventAction: 'Evaluate ' + data.payload.impression.feature, - * eventLabel: 'Treatment: ' + data.payload.impression.treatment + '. Targeting rule: ' + data.payload.impression.label + '.', - * nonInteraction: true, - * }` - * Default FieldsObject instance for data.type === 'EVENT': - * `{ - * hitType: 'event', - * eventCategory: 'split-event', - * eventAction: data.payload.eventTypeId, - * eventValue: data.payload.value, - * nonInteraction: true, - * }` - */ - mapper?: (data: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject) => UniversalAnalytics.FieldsObject, - /** - * List of tracker names to send the hit. An empty string represents the default tracker. - * If not provided, hits are only sent to default tracker. - */ - trackerNames?: string[], - } - /** - * Available URL settings for the SDKs. - */ - type UrlSettings = { - /** - * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. - * @property {string} sdk - * @default 'https://sdk.split.io/api' - */ - sdk?: string, - /** - * String property to override the base URL where the SDK will post event-related information like impressions. - * @property {string} events - * @default 'https://events.split.io/api' - */ - events?: string, - /** - * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. - * @property {string} auth - * @default 'https://auth.split.io/api' - */ - auth?: string, - /** - * String property to override the base URL where the SDK will connect to receive streaming updates. - * @property {string} streaming - * @default 'https://streaming.split.io' - */ - streaming?: string, - /** - * String property to override the base URL where the SDK will post telemetry data. - * @property {string} telemetry - * @default 'https://telemetry.split.io/api' - */ - telemetry?: string - }; - - /** - * Available integration options for the browser - */ - type BrowserIntegration = ISplitToGoogleAnalyticsConfig | IGoogleAnalyticsToSplitConfig; - /** - * SplitFilter type. - * - * @typedef {string} SplitFilterType - */ - type SplitFilterType = 'bySet' | 'byName' | 'byPrefix'; - /** - * Defines a feature flag filter, described by a type and list of values. - */ - interface SplitFilter { - /** - * Type of the filter. - * - * @property {SplitFilterType} type - */ - type: SplitFilterType, - /** - * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. - * - * @property {string[]} values - */ - values: string[], - } - /** - * ImpressionsMode type - * @typedef {string} ImpressionsMode - */ - type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE'; - /** - * User consent status. - * @typedef {string} ConsentStatus - */ - type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; - /** - * Settings interface for SDK instances created on the browser - * @interface IBrowserSettings - * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#configuration} - */ - interface IBrowserSettings extends ISharedSettings { - /** - * SDK Startup settings for the Browser. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 1.5 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 1.5 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 10 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 60 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} metricsRefreshRate - * @default 120 - * @deprecated This parameter is ignored now. Use `telemetryRefreshRate` instead. - */ - metricsRefreshRate?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number, - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - }, - /** - * SDK Core settings for the browser. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Customer identifier. Whatever this means to you. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey, - /** - * Traffic type associated with the customer identifier. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * If no provided as a setting it will be required on the client.track() calls. - * @property {string} trafficType - */ - trafficType?: string, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - }, - /** - * Mocked features map. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#localhost-mode} - */ - features?: MockedFeaturesMap, - /** - * Defines which kind of storage we can instantiate on the browser. - * Possible storage types are 'MEMORY', which is the default, and 'LOCALSTORAGE'. - * @property {Object} storage - */ - storage?: { - /** - * Storage type to be instantiated by the SDK. - * @property {BrowserStorage} type - * @default 'MEMORY' - */ - type?: BrowserStorage, - /** - * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string - }, - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - /** - * SDK integration settings for the Browser. - * @property {Object} integrations - */ - integrations?: BrowserIntegration[], - /** - * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. - * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. - * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. - * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends - * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. - * - * @typedef {string} userConsent - * @default 'GRANTED' - */ - userConsent?: ConsentStatus, - sync?: ISharedSettings['sync'] & { - /** - * Custom options object for HTTP(S) requests in the Browser. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - } - } - /** - * Settings interface for SDK instances created on NodeJS. - * If your storage is asynchronous (Redis for example) use SplitIO.INodeAsyncSettings instead. - * @interface INodeSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - interface INodeSettings extends INodeBasicSettings { - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - /** - * Defines which kind of storage we can instantiate on NodeJS for 'standalone' mode. - * The only possible storage type is 'MEMORY', which is the default. - * @property {Object} storage - */ - storage?: { - /** - * Synchronous storage type to be instantiated by the SDK. - * @property {NodeSyncStorage} type - * @default 'MEMORY' - */ - type?: NodeSyncStorage, - /** - * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string - }, - /** - * The SDK mode. When using the default 'MEMORY' storage, the only possible value is "standalone", which is the default. - * For "localhost" mode, use "localhost" as authorizationKey. - * - * @property {'standalone'} mode - * @default 'standalone' - */ - mode?: 'standalone' - sync?: INodeBasicSettings['sync'] & { - /** - * Custom options object for HTTP(S) requests in NodeJS. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Node-Fetch calls. - * @see {@link https://www.npmjs.com/package/node-fetch#options} - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record - /** - * Custom NodeJS HTTP(S) Agent used by the SDK for HTTP(S) requests. - * - * You can use it, for example, for certificate pinning or setting a network proxy: - * - * ```javascript - * const { HttpsProxyAgent } = require('https-proxy-agent'); - * - * const proxyAgent = new HttpsProxyAgent(process.env.HTTPS_PROXY || 'http://10.10.1.10:1080'); - * - * const factory = SplitFactory({ - * ... - * sync: { - * requestOptions: { - * agent: proxyAgent - * } - * } - * }) - * ``` - * - * @see {@link https://nodejs.org/api/https.html#class-httpsagent} - * - * @property {http.Agent | https.Agent} agent - * @default undefined - */ - agent?: RequestOptions["agent"] - }, - } - } - /** - * Settings interface with async storage for SDK instances created on NodeJS. - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.INodeSettings instead. - * @interface INodeAsyncSettings - * @extends INodeBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#configuration} - */ - interface INodeAsyncSettings extends INodeBasicSettings { - /** - * Defines which kind of async storage we can instantiate on NodeJS for 'consumer' mode. - * The only possible storage type is 'REDIS'. - * @property {Object} storage - */ - storage: { - /** - * 'REDIS' storage type to be instantiated by the SDK. - * @property {NodeAsyncStorage} type - */ - type: NodeAsyncStorage, - /** - * Options to be passed to the Redis storage. Use it with storage type: 'REDIS'. - * @property {Object} options - */ - options?: { - /** - * Redis URL. If set, `host`, `port`, `db` and `pass` params will be ignored. - * - * Examples: - * ``` - * url: 'localhost' - * url: '127.0.0.1:6379' - * url: 'redis://:authpassword@127.0.0.1:6379/0' - * ``` - * @property {string=} url - */ - url?: string, - /** - * Redis host. - * @property {string=} host - * @default 'localhost' - */ - host?: string, - /** - * Redis port. - * @property {number=} port - * @default 6379 - */ - port?: number, - /** - * Redis database to be used. - * @property {number=} db - * @default 0 - */ - db?: number, - /** - * Redis password. Don't define if no password is used. - * @property {string=} pass - * @default undefined - */ - pass?: string, - /** - * The milliseconds before a timeout occurs during the initial connection to the Redis server. - * @property {number=} connectionTimeout - * @default 10000 - */ - connectionTimeout?: number, - /** - * The milliseconds before Redis commands are timeout by the SDK. - * Method calls that involve Redis commands, like `client.getTreatment` or `client.track` calls, are resolved when the commands success or timeout. - * @property {number=} operationTimeout - * @default 5000 - */ - operationTimeout?: number, - /** - * TLS configuration for Redis connection. - * @see {@link https://www.npmjs.com/package/ioredis#tls-options } - * - * @property {Object=} tls - * @default undefined - */ - tls?: RedisOptions['tls'], - }, - /** - * Optional prefix to prevent any kind of data collision between SDK versions. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string - }, - /** - * The SDK mode. When using 'REDIS' storage type, the only possible value is "consumer", which is required. - * - * @see {@link https://help.split.io/hc/en-us/articles/360020564931-Node-js-SDK#state-sharing-redis-integration} - * - * @property {'consumer'} mode - */ - mode: 'consumer' - } - /** - * This represents the interface for the SDK instance with synchronous storage. - * @interface ISDK - * @extends IBasicSDK - */ - interface ISDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK. - * @function client - * @returns {IClient} The client instance. - */ - client(): IClient, - /** - * Returns a shared client of the SDK. For usage on the browser. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @param {string=} trafficType The traffic type of the provided key. - * @returns {IClient} The client instance. - */ - client(key: SplitKey, trafficType?: string): IClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IManager - } - /** - * This represents the interface for the SDK instance with synchronous storage. - * @interface IBrowserSDK - * @extends ISDK - */ - interface IBrowserSDK extends ISDK { - /** - * Returns the default client instance of the SDK. - * @function client - * @returns {IBrowserClient} The client instance. - */ - client(): IBrowserClient, - /** - * Returns a shared client of the SDK. For usage on the browser. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @param {string=} trafficType The traffic type of the provided key. - * @returns {IBrowserClient} The client instance. - */ - client(key: SplitKey, trafficType?: string): IBrowserClient - /** - * User consent API. - * @property UserConsent - */ - UserConsent: IUserConsentAPI - } - /** - * This represents the interface for the SDK instance with asynchronous storage. - * @interface IAsyncSDK - * @extends IBasicSDK - */ - interface IAsyncSDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK. - * @function client - * @returns {IAsyncClient} The asynchronous client instance. - */ - client(): IAsyncClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IAsyncManager - } - /** - * This represents the interface for the Client instance with synchronous storage. - * @interface IClient - * @extends IBasicClient - */ - interface IClient extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * For usage on NodeJS as we don't have only one key. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * For usage on the Browser as we defined the key on the settings. - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the - * configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a TreatmentWithConfig value, which an object with both treatment and config string for the given feature. - * For usage on the Browser as we defined the key on the settings. - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the - * configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * For usage on the Browser as we defined the key on the settings. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flags names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on the Browser as we defined the key on the settings. - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects - */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects - */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * For usage on NodeJS as we don't have only one key. - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - /** - * Tracks an event to be fed to the results product on Split user interface. - * For usage on the Browser as we defined the key on the settings. - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - /** - * Tracks an event to be fed to the results product on Split user interface. - * For usage on the Browser if we defined the key and also the trafficType on the settings. - * @function track - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(eventType: string, value?: number, properties?: Properties): boolean - } - /** - * This represents the interface for the Client instance with attributes binding. - * @interface IBrowserClient - * @Extends IClient - */ - interface IBrowserClient extends IClient { - /** - * Add an attribute to client's in memory attributes storage. - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, - /** - * Returns the attribute with the given name. - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType, - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean, - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean, - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes, - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean - } - /** - * This represents the interface for the Client instance with asynchronous storage. - * @interface IAsyncClient - * @extends IBasicClient - */ - interface IAsyncClient extends IBasicClient { - /** - * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {string} key - The string key representing the consumer. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise - } - /** - * Representation of a manager instance with synchronous storage of the SDK. - * @interface IManager - * @extends IStatusInterface - */ - interface IManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNames} The list of feature flag names. - */ - names(): SplitNames; - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViews} The list of SplitIO.SplitView. - */ - splits(): SplitViews; - /** - * Get the data of a feature flag in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitView | null} The SplitIO.SplitView of the given feature flag name or null if the feature flag is not found. - */ - split(featureFlagName: string): SplitView | null; - } - /** - * Representation of a manager instance with asynchronous storage of the SDK. - * @interface IAsyncManager - * @extends IStatusInterface - */ - interface IAsyncManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. - */ - names(): SplitNamesAsync; - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. - */ - splits(): SplitViewsAsync; - /** - * Get the data of a feature flag in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. - */ - split(featureFlagName: string): SplitViewAsync; - } -} diff --git a/webpack.common.js b/webpack.common.js index e7a389788..8fb8dc34b 100644 --- a/webpack.common.js +++ b/webpack.common.js @@ -1,6 +1,6 @@ module.exports = { entry: { - split: ['./es/umd.js'] + split: ['./esm/umd.js'] }, output: {