diff --git a/.eslintrc b/.eslintrc index 383d3b6..6313c7b 100644 --- a/.eslintrc +++ b/.eslintrc @@ -5,6 +5,7 @@ "parser": "@typescript-eslint/parser", "plugins": [ "@typescript-eslint", + "eslint-plugin-tsdoc", "import" ], @@ -44,7 +45,7 @@ ], "rules": { "no-restricted-syntax": ["error", "ForOfStatement", "ForInStatement", "ArrayPattern"], - "compat/compat": ["error", "defaults, ie 10"], + "compat/compat": ["error", "defaults"], "no-throw-literal": "error", "import/no-default-export": "error", "import/no-self-import": "error", @@ -60,7 +61,14 @@ "files": ["types/**"], "rules": { "no-use-before-define": "off", - "no-undef": "off" // turned off to avoid issue with triple-slash path directive + "no-undef": "off" // turned off to avoid issue with SplitIO namespace not defined + } + }, + { + // Enable TSDoc rules for TypeScript files, allowing the use of JSDoc in JS files. + "files": ["**/*.ts"], + "rules": { + "tsdoc/syntax": "warn" } } ], diff --git a/CHANGES.txt b/CHANGES.txt index e45493b..916ccad 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,13 @@ +1.0.0 (November 1, 2024) + - Added support for targeting rules based on large segments. + - Added `factory.destroy()` method, which invokes the `destroy` method of all clients created by the factory. + - Updated @splitsoftware/splitio-commons package to version 2.0.0 that includes major updates and updated some transitive dependencies for vulnerability fixes. + - BREAKING CHANGES: + - Dropped support for Split Proxy below version 5.9.0. The SDK now requires Split Proxy 5.9.0 or above. + - Removed internal ponyfills for the `Map` and `Set` global objects, dropping support for IE and other outdated browsers. The SDK now requires the runtime environment to support these features natively or provide a polyfill. + - Removed the deprecated `GoogleAnalyticsToSplit` and `SplitToGoogleAnalytics` pluggable integration modules, along with the related interfaces in the TypeScript definitions. + - Removed the `LocalhostFromObject` export from the default import (`import { LocalhostFromObject } from '@splitsoftware/splitio-browserjs'`). It is no longer necessary to manually import and configure it in the `sync.localhostMode` option to enable localhost mode. + 0.15.0 (September 13, 2024) - Updated @splitsoftware/splitio-commons package to version 1.17.0 that includes minor updates: - Added `sync.requestOptions.getHeaderOverrides` configuration option to enhance SDK HTTP request Headers for Authorization Frameworks. @@ -52,7 +62,7 @@ - Updated some transitive dependencies for vulnerability fixes. - Updated @splitsoftware/splitio-commons package to version 1.8.3 that includes: - Updated SDK_READY_TIMED_OUT event to be emitted immediately when a connection error occurs using pluggable storage (i.e., when the wrapper `connect` promise is rejected) in consumer and partial consumer modes. - - Bugfixing - The `destroy` method has been updated to immediately flag the SDK client as destroyed, to prevent unexpected behaviours when `getTreatment` and `track` methods are called synchronously after `destroy` method is called. + - Bugfixing - The `destroy` method has been updated to immediately flag the SDK client as destroyed, to prevent unexpected behaviors when `getTreatment` and `track` methods are called synchronously after `destroy` method is called. 0.9.5 (May 15, 2023) - Updated @splitsoftware/splitio-commons package to version 1.8.2 that includes minor improvements. @@ -62,7 +72,7 @@ 0.9.4 (May 4, 2023) - Updated some transitive dependencies for vulnerability fixes. - - Bugfixing - Updated `unfetch` package as a runtime dependency, required when using the "full" entrypoint (`import { SplitFactory } from '@splitsoftware/splitio-browserjs/full'`). + - Bugfixing - Updated `unfetch` package as a runtime dependency, required when using the "full" import (`import { SplitFactory } from '@splitsoftware/splitio-browserjs/full'`). 0.9.3 (March 20, 2023) - Updated @splitsoftware/splitio-commons package to version 1.8.1 that includes minor improvements. diff --git a/README.md b/README.md index 945a21d..cc2d9c1 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ This SDK is designed to work with Split, the platform for controlled rollouts, w ## Compatibility The JS Browser SDK is a library for Web browser environments. -The library was build to support ES5 syntax and all major browsers. *However, there are a few polyfills that would be required when targeting old browsers that don't have native support for Promises and Fetch Web API. You should include a Promise polyfill like [es6-promise](https://github.com/stefanpenner/es6-promise), and a Fetch polyfill such as [whatwg-fetch](https://cdn.jsdelivr.net/npm/whatwg-fetch@3.5.0/dist/fetch.umd.min.js) or the lightweight [unfetch](https://unpkg.com/unfetch/polyfill).* +The library was build to support ES5 syntax and all major browsers. *However, there are a few polyfills that would be required when targeting old browsers that don't have native support for Promises, Maps, Sets and Fetch Web API. You should include a polyfill, like [es6-promise](https://github.com/stefanpenner/es6-promise) for Promises, and [whatwg-fetch](https://cdn.jsdelivr.net/npm/whatwg-fetch@3.5.0/dist/fetch.umd.min.js) or the lightweight [unfetch](https://unpkg.com/unfetch/polyfill) for the Fetch API.* ## Getting started Below is a simple index.js example that describes the instantiation and most basic usage of our SDK: diff --git a/karma/e2e.gaIntegration.karma.conf.js b/karma/e2e.gaIntegration.karma.conf.js deleted file mode 100644 index 4688904..0000000 --- a/karma/e2e.gaIntegration.karma.conf.js +++ /dev/null @@ -1,20 +0,0 @@ -const assign = require('lodash/assign'); - -module.exports = function(config) { - 'use strict'; - - config.set(assign({}, require('./config'), { - // list of files / patterns to load in the browser - files: [ - '__tests__/gaIntegration/browser.spec.js' - ], - // prepare code for the browser using rollup - preprocessors: { - '__tests__/gaIntegration/browser.spec.js': ['rollup'] - }, - - // level of logging - // possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG - logLevel: config.LOG_WARN - })); -}; diff --git a/package-lock.json b/package-lock.json index 414ee3e..c395c58 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,16 +1,15 @@ { "name": "@splitsoftware/splitio-browserjs", - "version": "0.15.0", + "version": "1.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-browserjs", - "version": "0.15.0", + "version": "1.0.0", "license": "Apache-2.0", "dependencies": { - "@splitsoftware/splitio-commons": "1.17.0", - "@types/google.analytics": "0.0.40", + "@splitsoftware/splitio-commons": "2.0.0", "tslib": "^2.3.1", "unfetch": "^4.2.0" }, @@ -26,6 +25,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^4.2.0", "eslint-plugin-import": "^2.25.4", + "eslint-plugin-tsdoc": "^0.3.0", "fetch-mock": "^11.1.3", "jest": "^27.2.3", "karma": "^6.4.1", @@ -1253,6 +1253,46 @@ "integrity": "sha512-fjXsjZJvdnC+j4AipnxHbwPCohV0cC1qLh3XHy2+zeXLDb1TVepLK7qDS9UcvNxuEkl/qYPsBf0lad2paHSuIQ==", "dev": true }, + "node_modules/@microsoft/tsdoc": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.0.tgz", + "integrity": "sha512-HZpPoABogPvjeJOdzCOSJsXeL/SMCBgBZMVC3X3d7YYp2gf31MfxhUoYUNwf1ERPJOnQc0wkFn9trqI6ZEdZuA==", + "dev": true + }, + "node_modules/@microsoft/tsdoc-config": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.0.tgz", + "integrity": "sha512-v/EYRXnCAIHxOHW+Plb6OWuUoMotxTN0GLatnpOb1xq0KuTNw/WI3pamJx/UbsoJP5k9MCw1QxvvhPcF9pH3Zg==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.0", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -1502,10 +1542,11 @@ "dev": true }, "node_modules/@splitsoftware/splitio-commons": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-1.17.0.tgz", - "integrity": "sha512-rvP+0LGUN92bcTytiqyVxq9UzBG5kTkIYjU7b7AU2awBUYgM0bqT3xhQ9/MJ/2fsBbqC6QIsxoKDOz9pMgbAQw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-2.0.0.tgz", + "integrity": "sha512-Sz4+vFacl29xw3451z9IUgB4zBFKUWZdCnmOB0DDXA803YKPqjXphdAwN6nV+1vsX9pXV/OS6UaNC4oUICa6PA==", "dependencies": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" }, "peerDependencies": { @@ -1594,11 +1635,6 @@ "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", "dev": true }, - "node_modules/@types/google.analytics": { - "version": "0.0.40", - "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", - "integrity": "sha512-R3HpnLkqmKxhUAf8kIVvDVGJqPtaaZlW4yowNwjOZUTmYUQEgHh8Nh5wkSXKMroNAuQM8gbXJHmNbbgA8tdb7Q==" - }, "node_modules/@types/graceful-fs": { "version": "4.1.8", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.8.tgz", @@ -1608,6 +1644,14 @@ "@types/node": "*" } }, + "node_modules/@types/ioredis": { + "version": "4.28.10", + "resolved": "https://registry.npmjs.org/@types/ioredis/-/ioredis-4.28.10.tgz", + "integrity": "sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", @@ -1658,7 +1702,6 @@ "version": "20.8.8", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.8.tgz", "integrity": "sha512-YRsdVxq6OaLfmR9Hy816IMp33xOBjfyOgUd77ehqg96CFywxAPbDbXvAsuN2KVg2HOT8Eh6uAfU+l4WffwPVrQ==", - "dev": true, "dependencies": { "undici-types": "~5.25.1" } @@ -3838,6 +3881,16 @@ "semver": "bin/semver.js" } }, + "node_modules/eslint-plugin-tsdoc": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-tsdoc/-/eslint-plugin-tsdoc-0.3.0.tgz", + "integrity": "sha512-0MuFdBrrJVBjT/gyhkP2BqpD0np1NxNLfQ38xXDlSs/KVVpKI2A6vN7jx2Rve/CyUsvOsMGwp9KKrinv7q9g3A==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.0", + "@microsoft/tsdoc-config": "0.17.0" + } + }, "node_modules/eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -6011,6 +6064,12 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -7622,6 +7681,15 @@ "node": ">=0.10.0" } }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/require-main-filename": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", @@ -7751,9 +7819,9 @@ } }, "node_modules/rollup": { - "version": "2.79.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz", - "integrity": "sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==", + "version": "2.79.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz", + "integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -8224,9 +8292,9 @@ "dev": true }, "node_modules/socket.io": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.0.tgz", - "integrity": "sha512-8U6BEgGjQOfGz3HHTYaC/L1GaxDCJ/KM0XTkJly0EhZ5U/du9uNEZy4ZgYzEzIqlx2CMm25CrCqr1ck899eLNA==", + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz", + "integrity": "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==", "dev": true, "dependencies": { "accepts": "~1.3.4", @@ -9096,8 +9164,7 @@ "node_modules/undici-types": { "version": "5.25.3", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.25.3.tgz", - "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==", - "dev": true + "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==" }, "node_modules/unfetch": { "version": "4.2.0", @@ -10409,6 +10476,44 @@ "integrity": "sha512-fjXsjZJvdnC+j4AipnxHbwPCohV0cC1qLh3XHy2+zeXLDb1TVepLK7qDS9UcvNxuEkl/qYPsBf0lad2paHSuIQ==", "dev": true }, + "@microsoft/tsdoc": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.0.tgz", + "integrity": "sha512-HZpPoABogPvjeJOdzCOSJsXeL/SMCBgBZMVC3X3d7YYp2gf31MfxhUoYUNwf1ERPJOnQc0wkFn9trqI6ZEdZuA==", + "dev": true + }, + "@microsoft/tsdoc-config": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.0.tgz", + "integrity": "sha512-v/EYRXnCAIHxOHW+Plb6OWuUoMotxTN0GLatnpOb1xq0KuTNw/WI3pamJx/UbsoJP5k9MCw1QxvvhPcF9pH3Zg==", + "dev": true, + "requires": { + "@microsoft/tsdoc": "0.15.0", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + }, + "dependencies": { + "ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + } + } + }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -10583,10 +10688,11 @@ "dev": true }, "@splitsoftware/splitio-commons": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-1.17.0.tgz", - "integrity": "sha512-rvP+0LGUN92bcTytiqyVxq9UzBG5kTkIYjU7b7AU2awBUYgM0bqT3xhQ9/MJ/2fsBbqC6QIsxoKDOz9pMgbAQw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@splitsoftware/splitio-commons/-/splitio-commons-2.0.0.tgz", + "integrity": "sha512-Sz4+vFacl29xw3451z9IUgB4zBFKUWZdCnmOB0DDXA803YKPqjXphdAwN6nV+1vsX9pXV/OS6UaNC4oUICa6PA==", "requires": { + "@types/ioredis": "^4.28.0", "tslib": "^2.3.1" } }, @@ -10664,11 +10770,6 @@ "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", "dev": true }, - "@types/google.analytics": { - "version": "0.0.40", - "resolved": "https://registry.npmjs.org/@types/google.analytics/-/google.analytics-0.0.40.tgz", - "integrity": "sha512-R3HpnLkqmKxhUAf8kIVvDVGJqPtaaZlW4yowNwjOZUTmYUQEgHh8Nh5wkSXKMroNAuQM8gbXJHmNbbgA8tdb7Q==" - }, "@types/graceful-fs": { "version": "4.1.8", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.8.tgz", @@ -10678,6 +10779,14 @@ "@types/node": "*" } }, + "@types/ioredis": { + "version": "4.28.10", + "resolved": "https://registry.npmjs.org/@types/ioredis/-/ioredis-4.28.10.tgz", + "integrity": "sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==", + "requires": { + "@types/node": "*" + } + }, "@types/istanbul-lib-coverage": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", @@ -10728,7 +10837,6 @@ "version": "20.8.8", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.8.tgz", "integrity": "sha512-YRsdVxq6OaLfmR9Hy816IMp33xOBjfyOgUd77ehqg96CFywxAPbDbXvAsuN2KVg2HOT8Eh6uAfU+l4WffwPVrQ==", - "dev": true, "requires": { "undici-types": "~5.25.1" } @@ -12342,6 +12450,16 @@ } } }, + "eslint-plugin-tsdoc": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-tsdoc/-/eslint-plugin-tsdoc-0.3.0.tgz", + "integrity": "sha512-0MuFdBrrJVBjT/gyhkP2BqpD0np1NxNLfQ38xXDlSs/KVVpKI2A6vN7jx2Rve/CyUsvOsMGwp9KKrinv7q9g3A==", + "dev": true, + "requires": { + "@microsoft/tsdoc": "0.15.0", + "@microsoft/tsdoc-config": "0.17.0" + } + }, "eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -13943,6 +14061,12 @@ } } }, + "jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -15184,6 +15308,12 @@ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "require-main-filename": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", @@ -15283,9 +15413,9 @@ } }, "rollup": { - "version": "2.79.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz", - "integrity": "sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==", + "version": "2.79.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz", + "integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==", "dev": true, "requires": { "fsevents": "~2.3.2" @@ -15609,9 +15739,9 @@ "dev": true }, "socket.io": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.0.tgz", - "integrity": "sha512-8U6BEgGjQOfGz3HHTYaC/L1GaxDCJ/KM0XTkJly0EhZ5U/du9uNEZy4ZgYzEzIqlx2CMm25CrCqr1ck899eLNA==", + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz", + "integrity": "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==", "dev": true, "requires": { "accepts": "~1.3.4", @@ -16239,8 +16369,7 @@ "undici-types": { "version": "5.25.3", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.25.3.tgz", - "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==", - "dev": true + "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==" }, "unfetch": { "version": "4.2.0", diff --git a/package.json b/package.json index 5a4a07a..d685b0b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-browserjs", - "version": "0.15.0", + "version": "1.0.0", "description": "Split SDK for JavaScript on Browser", "main": "cjs/index.js", "module": "esm/index.js", @@ -14,8 +14,7 @@ "esm", "src", "types", - "full", - "scripts/ga-to-split-autorequire.js" + "full" ], "scripts": { "check": "npm run check:lint && npm run check:types && npm run check:version", @@ -27,7 +26,6 @@ "build:cjs": "rimraf cjs && tsc -outDir cjs -m CommonJS && ./scripts/build_cjs_replace_imports.sh", "build:umd-visualizer": "rimraf umd && rollup --config rollup.visualizer.config.js", "build:umd": "rimraf umd && rollup --config rollup.ci.config.js --branch=$BUILD_BRANCH", - "build:ga-to-split-autorequire": "terser ./node_modules/@splitsoftware/splitio-commons/src/integrations/ga/autoRequire.js --mangle --output ./scripts/ga-to-split-autorequire.js", "test": "npm run test:unit && npm run test:e2e", "test:unit": "jest", "test:e2e": "npm run test:e2e-logger && npm run test:e2e-offline && npm run test:e2e-online && npm run test:e2e-destroy && npm run test:e2e-errorCatching && npm run test:e2e-push && npm run test:e2e-consumer", @@ -37,11 +35,8 @@ "test:e2e-destroy": "karma start karma/e2e.destroy.karma.conf.js", "test:e2e-errorCatching": "karma start karma/e2e.errorCatching.karma.conf.js", "test:e2e-push": "karma start karma/e2e.push.karma.conf.js", - "test:e2e-gaIntegration": "karma start karma/e2e.gaIntegration.karma.conf.js", "test:e2e-consumer": "karma start karma/e2e.consumer.karma.conf.js", - "pretest-ts-decls": "npm run build:esm && npm run build:cjs && npm link", - "test-ts-decls": "./scripts/ts-tests.sh", - "posttest-ts-decls": "npm rm --location=global @splitsoftware/splitio-browserjs && npm install", + "test-ts-decls": "tsc --build ts-tests", "all": "npm run check && npm run build && npm run test-ts-decls && npm run test", "publish:rc": "npm run check && npm run build && npm publish --tag rc", "publish:stable": "npm run check && npm run build && npm publish" @@ -64,8 +59,7 @@ "bugs": "https://github.com/splitio/javascript-browser-client/issues", "homepage": "https://github.com/splitio/javascript-browser-client#readme", "dependencies": { - "@splitsoftware/splitio-commons": "1.17.0", - "@types/google.analytics": "0.0.40", + "@splitsoftware/splitio-commons": "2.0.0", "tslib": "^2.3.1", "unfetch": "^4.2.0" }, @@ -81,6 +75,7 @@ "eslint": "^8.48.0", "eslint-plugin-compat": "^4.2.0", "eslint-plugin-import": "^2.25.4", + "eslint-plugin-tsdoc": "^0.3.0", "fetch-mock": "^11.1.3", "jest": "^27.2.3", "karma": "^6.4.1", diff --git a/scripts/ga-to-split-autorequire.js b/scripts/ga-to-split-autorequire.js deleted file mode 100644 index 7f8b885..0000000 --- a/scripts/ga-to-split-autorequire.js +++ /dev/null @@ -1 +0,0 @@ -(function(n,t,e){n[e]=n[e]||t;n[t]=n[t]||function(){n[t].q.push(arguments)};n[t].q=n[t].q||[];var r={};function i(n){return typeof n==="object"&&typeof n.name==="string"&&n.name}function o(e){if(e&&e[0]==="create"){var o=i(e[1])||i(e[2])||i(e[3])||(typeof e[3]==="string"?e[3]:undefined);if(!r[o]){r[o]=true;n[t]((o?o+".":"")+"require","splitTracker")}}}n[t].q.forEach(o);var u=n[t].q.push;n[t].q.push=function(n){var t=u.apply(this,arguments);o(n);return t}})(window,"ga","GoogleAnalyticsObject"); \ No newline at end of file diff --git a/scripts/ts-tests.sh b/scripts/ts-tests.sh deleted file mode 100755 index f3a9491..0000000 --- a/scripts/ts-tests.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -cd ts-tests ## Go to typescript tests folder -echo "Installing dependencies for TypeScript declarations testing..." -npm install ## Install dependencies -echo "Dependencies installed, linking the package." -npm link @splitsoftware/splitio-browserjs ## Link to the cloned code -echo "Running tsc compiler." -../node_modules/.bin/tsc ## Run typescript compiler. No need for flags as we have a tsconfig.json file - -if [ $? -eq 0 ] -then - echo "✅ Successfully compiled TS tests." - npm unlink @splitsoftware/splitio-browserjs - exit 0 -else - echo "☠️ Error compiling TS tests." - npm unlink @splitsoftware/splitio-browserjs - exit 1 -fi diff --git a/src/__tests__/browserSuites/evaluations.spec.js b/src/__tests__/browserSuites/evaluations.spec.js index cc8bee2..b40b0b1 100644 --- a/src/__tests__/browserSuites/evaluations.spec.js +++ b/src/__tests__/browserSuites/evaluations.spec.js @@ -365,7 +365,7 @@ export default function (config, fetchMock, assert) { for (i; i < SDK_INSTANCES_TO_TEST; i++) { let splitio = SplitFactory(config); - fetchMock.getOnce('https://sdk.split.io/api/mySegments/aaaaaaklmnbv', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce('https://sdk.split.io/api/memberships/aaaaaaklmnbv', { status: 200, body: { ms: {} } }); // on TA tests, this is going to return one against the mocked seed. let clientTABucket1 = splitio.client('aaaaaaklmnbv'); diff --git a/src/__tests__/browserSuites/fetch-specific-splits.spec.js b/src/__tests__/browserSuites/fetch-specific-splits.spec.js index 10e48f9..6f7a590 100644 --- a/src/__tests__/browserSuites/fetch-specific-splits.spec.js +++ b/src/__tests__/browserSuites/fetch-specific-splits.spec.js @@ -24,15 +24,15 @@ export default function fetchSpecificSplits(fetchMock, assert) { const queryString = queryStrings[i] || ''; let factory; - fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.1&since=-1' + queryString, { status: 200, body: { splits: [], since: -1, till: 1457552620999 } }); - fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.1&since=1457552620999' + queryString, { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.1&since=1457552620999' + queryString, function () { + fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.2&since=-1' + queryString, { status: 200, body: { splits: [], since: -1, till: 1457552620999 } }); + fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.2&since=1457552620999' + queryString, { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(urls.sdk + '/splitChanges?s=1.2&since=1457552620999' + queryString, function () { factory.client().destroy().then(() => { assert.pass(`splitFilters #${i}`); }); return { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }; }); - fetchMock.get(urls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(urls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { 'ms': {} } }); factory = SplitFactory(config); diff --git a/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js b/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js index c2c391d..6010806 100644 --- a/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js +++ b/src/__tests__/browserSuites/ignore-ip-addresses-setting.spec.js @@ -101,9 +101,9 @@ export default function (fetchMock, assert) { // Mock GET endpoints before creating the client const settings = settingsFactory(config); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, `/mySegments/${encodeURIComponent(config.core.key)}`), { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, `/memberships/${encodeURIComponent(config.core.key)}`), { status: 200, body: { ms: {} } }); // Init Split client const splitio = SplitFactory(config); diff --git a/src/__tests__/browserSuites/impressions.debug.spec.js b/src/__tests__/browserSuites/impressions.debug.spec.js index 3a2eeb4..18d875b 100644 --- a/src/__tests__/browserSuites/impressions.debug.spec.js +++ b/src/__tests__/browserSuites/impressions.debug.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; import { url } from '../testUtils'; @@ -21,9 +21,9 @@ const settings = settingsFactory({ export default function (fetchMock, assert) { // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); const splitio = SplitFactory({ core: { diff --git a/src/__tests__/browserSuites/impressions.spec.js b/src/__tests__/browserSuites/impressions.spec.js index 42c8580..aa4cff4 100644 --- a/src/__tests__/browserSuites/impressions.spec.js +++ b/src/__tests__/browserSuites/impressions.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { OPTIMIZED } from '@splitsoftware/splitio-commons/src/utils/constants'; import { truncateTimeFrame } from '@splitsoftware/splitio-commons/src/utils/time'; import { url } from '../testUtils'; @@ -24,9 +24,9 @@ let truncatedTimeFrame; export default function (fetchMock, assert) { // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); const splitio = SplitFactory({ core: { diff --git a/src/__tests__/browserSuites/manager.spec.js b/src/__tests__/browserSuites/manager.spec.js index d02b6ae..81143c9 100644 --- a/src/__tests__/browserSuites/manager.spec.js +++ b/src/__tests__/browserSuites/manager.spec.js @@ -4,7 +4,7 @@ import map from 'lodash/map'; import { url } from '../testUtils'; export default async function (settings, fetchMock, assert) { - fetchMock.getOnce({ url: url(settings, '/splitChanges?s=1.1&since=-1'), overwriteRoutes: true }, { status: 200, body: splitChangesMockReal }); + fetchMock.getOnce({ url: url(settings, '/splitChanges?s=1.2&since=-1'), overwriteRoutes: true }, { status: 200, body: splitChangesMockReal }); const mockSplits = splitChangesMockReal; diff --git a/src/__tests__/browserSuites/push-corner-cases.spec.js b/src/__tests__/browserSuites/push-corner-cases.spec.js index 7814d00..fe3e778 100644 --- a/src/__tests__/browserSuites/push-corner-cases.spec.js +++ b/src/__tests__/browserSuites/push-corner-cases.spec.js @@ -36,8 +36,8 @@ const MILLIS_SPLIT_CHANGES_RESPONSE = 400; /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection, SDK_READY_FROM_CACHE - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth, SSE connection, SDK_READY_FROM_CACHE + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) * 0.2 secs: SPLIT_KILL event -> /splitChanges * 0.4 secs: /splitChanges response --> SDK_READY */ @@ -70,13 +70,13 @@ export function testSplitKillOnReadyFromCache(fetchMock, assert) { }); // 1 auth request - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }); - // 2 mySegments requests: initial sync and after SSE opened - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 2 }, { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }); + // 2 memberships requests: initial sync and after SSE opened + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 2 }, { status: 200, body: { ms: {} } }); // 2 splitChanges request: initial sync and after SSE opened. Sync after SPLIT_KILL is not performed because SplitsSyncTask is "executing" - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=25'), { status: 200, body: splitChangesMock1 }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE, /* delay response */ }); - fetchMock.getOnce(url(settings, `/splitChanges?s=1.1&since=${splitChangesMock1.till}`), { status: 200, body: { splits: [], since: splitChangesMock1.till, till: splitChangesMock1.till } }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE - 100, /* delay response */ }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=25'), { status: 200, body: splitChangesMock1 }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE, /* delay response */ }); + fetchMock.getOnce(url(settings, `/splitChanges?s=1.2&since=${splitChangesMock1.till}`), { status: 200, body: { splits: [], since: splitChangesMock1.till, till: splitChangesMock1.till } }, { delay: MILLIS_SPLIT_CHANGES_RESPONSE - 100, /* delay response */ }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/push-fallbacking.spec.js b/src/__tests__/browserSuites/push-fallback.spec.js similarity index 67% rename from src/__tests__/browserSuites/push-fallbacking.spec.js rename to src/__tests__/browserSuites/push-fallback.spec.js index 83832c0..dc8df88 100644 --- a/src/__tests__/browserSuites/push-fallbacking.spec.js +++ b/src/__tests__/browserSuites/push-fallback.spec.js @@ -5,9 +5,9 @@ import splitChangesMock1 from '../mocks/splitchanges.real.withSegments.json'; // since: -1, till: 1457552620999 (for initial fetch) import splitChangesMock2 from '../mocks/splitchanges.real.updateWithSegments.json'; // since: 1457552620999, till: 1457552649999 (for SPLIT_UPDATE event) import splitChangesMock3 from '../mocks/splitchanges.real.updateWithoutSegments.json'; // since: 1457552649999, till: 1457552669999 (for second polling fetch) -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.mock2.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; +import membershipsNicolasMock1 from '../mocks/memberships.nicolas@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.mock2.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; import occupancy0ControlPriMessage from '../mocks/message.OCCUPANCY.0.control_pri.1586987434550.json'; import occupancy1ControlPriMessage from '../mocks/message.OCCUPANCY.1.control_pri.1586987434450.json'; @@ -20,7 +20,7 @@ import streamingPausedControlPriMessage2 from '../mocks/message.CONTROL.STREAMIN import streamingDisabledControlPriMessage from '../mocks/message.CONTROL.STREAMING_DISABLED.control_pri.1586987434950.json'; import splitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552649999.json'; -import mySegmentsUpdateMessage from '../mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; +import mySegmentsUpdateMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authPushEnabledNicolasAndMarcio from '../mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json'; @@ -39,9 +39,9 @@ const userKey = 'nicolas@split.io'; const secondUserKey = 'marcio@split.io'; const baseUrls = { - sdk: 'https://sdk.push-fallbacking/api', - events: 'https://events.push-fallbacking/api', - auth: 'https://auth.push-fallbacking/api' + sdk: 'https://sdk.push-fallback/api', + events: 'https://events.push-fallback/api', + auth: 'https://auth.push-fallback/api' }; const config = { core: { @@ -55,7 +55,6 @@ const config = { }, urls: baseUrls, streamingEnabled: true, - // debug: true, }; const settings = settingsFactory(config); @@ -67,11 +66,11 @@ const MILLIS_CREATE_CLIENT_DURING_PUSH = MILLIS_STREAMING_UP_OCCUPANCY + 50; const MILLIS_SPLIT_UPDATE_EVENT_DURING_PUSH = MILLIS_STREAMING_UP_OCCUPANCY + 100; const MILLIS_STREAMING_PAUSED_CONTROL = MILLIS_SPLIT_UPDATE_EVENT_DURING_PUSH + 100; -const MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_POLLING = MILLIS_STREAMING_PAUSED_CONTROL + 100; +const MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_POLLING = MILLIS_STREAMING_PAUSED_CONTROL + 100; const MILLIS_STREAMING_RESUMED_CONTROL = MILLIS_STREAMING_PAUSED_CONTROL + settings.scheduler.featuresRefreshRate + 100; -const MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH = MILLIS_STREAMING_RESUMED_CONTROL + 100; +const MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH = MILLIS_STREAMING_RESUMED_CONTROL + 100; -const MILLIS_STREAMING_PAUSED_CONTROL_2 = MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH + 100; +const MILLIS_STREAMING_PAUSED_CONTROL_2 = MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH + 100; const MILLIS_STREAMING_RESET_WHILE_PUSH_DOWN = MILLIS_STREAMING_PAUSED_CONTROL_2 + 100; const MILLIS_STREAMING_RESET_WHILE_PUSH_UP = MILLIS_STREAMING_RESET_WHILE_PUSH_DOWN + settings.scheduler.featuresRefreshRate; const MILLIS_STREAMING_DISABLED_CONTROL = MILLIS_STREAMING_RESET_WHILE_PUSH_UP + 100; @@ -79,30 +78,30 @@ const MILLIS_DESTROY = MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.fe /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/nicolas) - * 0.2 secs: Streaming down (OCCUPANCY event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/nicolas), auth, SSE connection + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/nicolas) + * 0.2 secs: Streaming down (OCCUPANCY event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas) * 0.3 secs: SPLIT_UPDATE event ignored * 0.4 secs: periodic fetch due to polling (/splitChanges) - * 0.45 secs: periodic fetch due to polling (/mySegments/*) - * 0.5 secs: Streaming up (OCCUPANCY event) -> syncAll (/splitChanges, /mySegments/nicolas) - * 0.55 secs: create a new client while streaming -> initial fetch (/mySegments/marcio), auth, SSE connection and syncAll (/splitChanges, /mySegments/nicolas, /mySegments/marcio) + * 0.45 secs: periodic fetch due to polling (/memberships/nicolas) + * 0.5 secs: Streaming up (OCCUPANCY event) -> syncAll (/splitChanges, /memberships/nicolas) + * 0.55 secs: create a new client while streaming -> initial fetch (/memberships/marcio), auth, SSE connection and syncAll (/splitChanges, /memberships/nicolas, /memberships/marcio) * 0.6 secs: SPLIT_UPDATE event -> /splitChanges - * 0.7 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas, /mySegments/marcio) - * 0.8 secs: MY_SEGMENTS_UPDATE event ignored + * 0.7 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas, /memberships/marcio) + * 0.8 secs: MEMBERSHIPS_MS_UPDATE event ignored * 0.9 secs: periodic fetch due to polling (/splitChanges) - * 0.95 secs: periodic fetch due to polling (/mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) - * 1.0 secs: Streaming up (CONTROL event) -> syncAll (/splitChanges, /mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) - * 1.1 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas - * 1.2 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) + * 0.95 secs: periodic fetch due to polling (/memberships/nicolas, /memberships/marcio) + * 1.0 secs: Streaming up (CONTROL event) -> syncAll (/splitChanges, /memberships/nicolas, /memberships/marcio) + * 1.1 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas, /memberships/marcio + * 1.2 secs: Streaming down (CONTROL event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas, /memberships/marcio) * 1.3 secs: STREAMING_RESET control event -> auth, SSE connection, syncAll and stop polling * 1.5 secs: STREAMING_RESET control event -> auth, SSE connection, syncAll - * 1.6 secs: Streaming closed (CONTROL STREAMING_DISABLED event) -> fetch due to fallback to polling (/splitChanges, /mySegments/nicolas, /mySegments/marcio, /mySegments/facundo) - * 1.8 secs: periodic fetch due to polling (/splitChanges): due to update without segments, mySegments are not fetched + * 1.6 secs: Streaming closed (CONTROL STREAMING_DISABLED event) -> fetch due to fallback to polling (/splitChanges, /memberships/nicolas, /memberships/marcio) + * 1.8 secs: periodic fetch due to polling (/splitChanges): due to update without segments, memberships are not fetched * 2.0 secs: periodic fetch due to polling (/splitChanges) * 2.1 secs: destroy client */ -export function testFallbacking(fetchMock, assert) { +export function testFallback(fetchMock, assert) { assert.plan(20); fetchMock.reset(); @@ -111,7 +110,7 @@ export function testFallbacking(fetchMock, assert) { // mock SSE open and message events setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'EventSource URL is the expected'); setTimeout(() => { @@ -137,7 +136,7 @@ export function testFallbacking(fetchMock, assert) { secondClient = splitio.client(secondUserKey); setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_MjE0MTkxOTU2Mg%3D%3D_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'new EventSource URL is the expected'); eventSourceInstance.emitOpen(); @@ -156,7 +155,7 @@ export function testFallbacking(fetchMock, assert) { setTimeout(() => { assert.equal(eventSourceInstance.readyState, EventSourceMock.OPEN, 'EventSource connection keeps opened after PUSH_SUBSYSTEM_DOWN (STREAMING_PAUSED event)'); eventSourceInstance.emitMessage(mySegmentsUpdateMessage); - }, MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_POLLING - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MY_SEGMENTS_UPDATE event while polling, to check that we are ignoring it + }, MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_POLLING - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MEMBERSHIPS_MS_UPDATE event while polling, to check that we are ignoring it setTimeout(() => { eventSourceInstance.emitMessage(streamingResumedControlPriMessage); @@ -168,7 +167,7 @@ export function testFallbacking(fetchMock, assert) { assert.equal(client.getTreatment('real_split'), 'on', 'evaluation with updated segment'); }); eventSourceInstance.emitMessage(mySegmentsUpdateMessage); - }, MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MY_SEGMENTS_UPDATE event + }, MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH - MILLIS_CREATE_CLIENT_DURING_PUSH); // send a MEMBERSHIPS_MS_UPDATE event setTimeout(() => { eventSourceInstance.emitMessage(streamingPausedControlPriMessage2); @@ -207,90 +206,91 @@ export function testFallbacking(fetchMock, assert) { }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); - // initial split and mySegment sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + // initial split and memberships sync + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // split and segment sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // fetches due to first fallback to polling - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_DOWN_OCCUPANCY + settings.scheduler.featuresRefreshRate), 'fetch due to first fallback to polling'); return { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // split and segment sync due to streaming up (OCCUPANCY event) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); - // creating of second client during streaming: initial mysegment sync, reauth and syncAll due to new client - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); - fetchMock.get({ url: url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(secondUserKey)}`), repeat: 3 /* initial + 2 STREAMING_RESET */ }, (url, opts) => { + // creating of second client during streaming: initial memberships sync, reauth and syncAll due to new client + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); + fetchMock.get({ url: url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(secondUserKey)}`), repeat: 3 /* initial + 2 STREAMING_RESET */ }, (url, opts) => { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('second auth success'); return { status: 200, body: authPushEnabledNicolasAndMarcio }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // fetch due to SPLIT_UPDATE event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SPLIT_UPDATE_EVENT_DURING_PUSH), 'sync due to SPLIT_UPDATE event'); return { status: 200, body: splitChangesMock2 }; }); // fetches due to second fallback to polling - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // continue fetches due to second fallback to polling - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_PAUSED_CONTROL + settings.scheduler.featuresRefreshRate), 'fetch due to second fallback to polling'); return { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // split and segment sync due to streaming up (CONTROL event) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); - // fetch due to MY_SEGMENTS_UPDATE event - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { + // fetch due to MEMBERSHIPS_MS_UPDATE event + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_EVENT_DURING_PUSH), 'sync due to MY_SEGMENTS_UPDATE event'); - return { status: 200, body: mySegmentsNicolasMock2 }; + assert.true(nearlyEqual(lapse, MILLIS_MEMBERSHIPS_MS_UPDATE_EVENT_DURING_PUSH), 'sync due to MEMBERSHIPS_MS_UPDATE event'); + return { status: 200, body: membershipsNicolasMock2 }; }); + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); // fetches due to third fallback to polling (STREAMING_PAUSED), two sync all (two STREAMING_RESET events) and fourth fallback (STREAMING_DISABLED) - fetchMock.get({ url: url(settings, '/splitChanges?s=1.1&since=1457552649999'), repeat: 4 }, { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsMarcio }); + fetchMock.get({ url: url(settings, '/splitChanges?s=1.2&since=1457552649999'), repeat: 4 }, { status: 200, body: { splits: [], since: 1457552649999, till: 1457552649999 } }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 4 }, { status: 200, body: membershipsNicolasMock1 }); + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 4 }, { status: 200, body: membershipsMarcio }); - // Periodic fetch due to polling (mySegments is not fetched due to smart pausing) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + // Periodic fetch due to polling (memberships is not fetched due to smart pausing) + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.featuresRefreshRate), 'fetch due to fourth fallback to polling'); return { status: 200, body: splitChangesMock3 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552669999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552669999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_STREAMING_DISABLED_CONTROL + settings.scheduler.featuresRefreshRate * 2), 'fetch due to fourth fallback to polling'); return { status: 200, body: { splits: [], since: 1457552669999, till: 1457552669999 } }; diff --git a/src/__tests__/browserSuites/push-initialization-nopush.spec.js b/src/__tests__/browserSuites/push-initialization-nopush.spec.js index 5687399..1df52e5 100644 --- a/src/__tests__/browserSuites/push-initialization-nopush.spec.js +++ b/src/__tests__/browserSuites/push-initialization-nopush.spec.js @@ -2,7 +2,7 @@ import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; import authPushDisabled from '../mocks/auth.pushDisabled.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authInvalidCredentials from '../mocks/auth.invalidCredentials.txt'; @@ -38,22 +38,22 @@ const settings = settingsFactory(config); /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and auth (success but push disabled) - * 0.0 secs: syncAll if falling back to polling (/splitChanges, /mySegments/*) - * 0.1 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and auth (success but push disabled) + * 0.0 secs: syncAll if falling back to polling (/splitChanges, /memberships/*) + * 0.1 secs: polling (/splitChanges, /memberships/*) */ function testInitializationFail(fetchMock, assert, fallbackToPolling) { let start, splitio, client, ready = false; - fetchMock.get(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolas }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.get(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolas }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); return { status: 200, body: splitChangesMock1 }; }); if (fallbackToPolling) { - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'polling (first fetch)'); @@ -61,7 +61,7 @@ function testInitializationFail(fetchMock, assert, fallbackToPolling) { }); } - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate), 'polling (second fetch)'); @@ -83,7 +83,7 @@ function testInitializationFail(fetchMock, assert, fallbackToPolling) { export function testAuthWithPushDisabled(fetchMock, assert) { assert.plan(6); - fetchMock.getOnce(`https://auth.push-initialization-nopush/api/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`, function (url, opts) { + fetchMock.getOnce(`https://auth.push-initialization-nopush/api/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`, function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth'); return { status: 200, body: authPushDisabled }; @@ -96,7 +96,7 @@ export function testAuthWithPushDisabled(fetchMock, assert) { export function testAuthWith401(fetchMock, assert) { assert.plan(6); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth'); return { status: 401, body: authInvalidCredentials }; @@ -122,7 +122,7 @@ export function testSSEWithNonRetryableError(fetchMock, assert) { assert.plan(7); // Auth successes - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth successes'); return { status: 200, body: authPushEnabledNicolas }; diff --git a/src/__tests__/browserSuites/push-initialization-retries.spec.js b/src/__tests__/browserSuites/push-initialization-retries.spec.js index d907d21..027cece 100644 --- a/src/__tests__/browserSuites/push-initialization-retries.spec.js +++ b/src/__tests__/browserSuites/push-initialization-retries.spec.js @@ -3,7 +3,7 @@ import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import authPushDisabled from '../mocks/auth.pushDisabled.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authPushBadToken from '../mocks/auth.pushBadToken.json'; -import mySegmentsNicolasMock from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolasMock from '../mocks/memberships.nicolas@split.io.json'; import { nearlyEqual, url } from '../testUtils'; @@ -40,49 +40,49 @@ const settings = settingsFactory(config); /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt (fail due to bad token) - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt (fail due to bad token) + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second push connect attempt (auth fail due to network error) - * 0.2 secs: polling (/splitChanges, /mySegments/*) + * 0.2 secs: polling (/splitChanges, /memberships/*) * 0.3 secs: third push connect attempt (auth success but push disabled) - * 0.4 secs: polling (/splitChanges, /mySegments/*) + * 0.4 secs: polling (/splitChanges, /memberships/*) */ export function testPushRetriesDueToAuthErrors(fetchMock, assert) { let start, splitio, client, ready = false; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('first auth attempt'); return { status: 200, body: authPushBadToken }; }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); const lapse = Date.now() - start; const expected = (settings.scheduler.pushRetryBackoffBase * Math.pow(2, 0) + settings.scheduler.pushRetryBackoffBase * Math.pow(2, 1)); assert.true(nearlyEqual(lapse, expected), 'third auth attempt (approximately in 0.3 seconds from first attempt)'); return { status: 200, body: authPushDisabled }; }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsNicolasMock }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 4 }, { status: 200, body: membershipsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready before first polling fetch'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'fallback to polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate), 'polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate * 2), 'keep polling since auth success buth with push disabled'); client.destroy().then(() => { @@ -102,11 +102,11 @@ export function testPushRetriesDueToAuthErrors(fetchMock, assert) { /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth successes and sse fails - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth successes and sse fails + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second push connect attempt (auth successes and sse fails again) - * 0.2 secs: polling (/splitChanges, /mySegments/*) - * 0.3 secs: third push connect attempt (auth and sse success), syncAll (/splitChanges, /mySegments/*) + * 0.2 secs: polling (/splitChanges, /memberships/*) + * 0.3 secs: third push connect attempt (auth and sse success), syncAll (/splitChanges, /memberships/*) */ export function testPushRetriesDueToSseErrors(fetchMock, assert) { window.EventSource = EventSourceMock; @@ -114,7 +114,7 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { let start, splitio, client, ready = false; const expectedTimeToSSEsuccess = (settings.scheduler.pushRetryBackoffBase * Math.pow(2, 0) + settings.scheduler.pushRetryBackoffBase * Math.pow(2, 1)); - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; let sseattempts = 0; setMockListener(function (eventSourceInstance) { assert.equal(eventSourceInstance.url, expectedSSEurl, 'SSE url is correct'); @@ -130,30 +130,30 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { sseattempts++; }); - fetchMock.get({ url: url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), repeat: 3 /* 3 push attempts */ }, function (url, opts) { + fetchMock.get({ url: url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), repeat: 3 /* 3 push attempts */ }, function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 4 }, { status: 200, body: mySegmentsNicolasMock }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 4 }, { status: 200, body: membershipsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { assert.true(ready, 'client ready before first polling fetch'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'fallback to polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, settings.scheduler.featuresRefreshRate), 'polling'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, expectedTimeToSSEsuccess), 'sync due to success SSE connection'); client.destroy().then(() => { @@ -176,7 +176,7 @@ export function testPushRetriesDueToSseErrors(fetchMock, assert) { * Assert that if the main client is destroyed while authentication request is in progress and successes, the SDK doesn't open the SSE connection * * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt * 0.05 secs: client destroyed * 0.1 secs: auth success but not SSE connection opened since push was closed * 0.2 secs: test finished @@ -189,10 +189,10 @@ export function testSdkDestroyWhileAuthSuccess(fetchMock, assert) { let splitio, client, ready = false; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }, { delay: 100 }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushEnabledNicolas }, { delay: 100 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); setTimeout(() => { client.destroy().then(() => { @@ -224,9 +224,9 @@ export function testSdkDestroyWhileConnDelay(fetchMock, assert) { assert.fail('unexpected EventSource request with url: ' + eventSourceInstance.url); }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: { ...authPushEnabledNicolas, connDelay: 0.1 } }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: { ...authPushEnabledNicolas, connDelay: 0.1 } }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); const client = SplitFactory(config).client(); setTimeout(() => { @@ -243,8 +243,8 @@ export function testSdkDestroyWhileConnDelay(fetchMock, assert) { * Asserts that if the client is destroyed while authentication request is in progress and fails, the SDK doesn't schedule an auth retry * * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*) and first auth attempt (fail due to bad token) - * 0.0 secs: polling (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*) and first auth attempt (fail due to bad token) + * 0.0 secs: polling (/splitChanges, /memberships/*) * 0.1 secs: second auth attempt request * 0.15 secs: client destroyed * 0.2 secs: second auth attempt response (fail due to network error) @@ -255,12 +255,12 @@ export function testSdkDestroyWhileAuthRetries(fetchMock, assert) { let splitio, client, ready = false; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushBadToken }); - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }, { delay: 100 }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { status: 200, body: authPushBadToken }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), { throws: new TypeError('Network error') }, { delay: 100 }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 2 }, { status: 200, body: mySegmentsNicolasMock }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 2 }, { status: 200, body: membershipsNicolasMock }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/push-refresh-token.spec.js b/src/__tests__/browserSuites/push-refresh-token.spec.js index cecfcb1..8e22592 100644 --- a/src/__tests__/browserSuites/push-refresh-token.spec.js +++ b/src/__tests__/browserSuites/push-refresh-token.spec.js @@ -1,6 +1,6 @@ import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolasMock1 from '../mocks/memberships.nicolas@split.io.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.601secs.json'; import authPushDisabled from '../mocks/auth.pushDisabled.json'; @@ -57,7 +57,7 @@ export function testRefreshToken(fetchMock, assert) { sseCount++; switch (sseCount) { case 1: - assert.true(nearlyEqual(Date.now() - start, 0), 'first connection is created inmediatelly'); + assert.true(nearlyEqual(Date.now() - start, 0), 'first connection is created immediately'); break; case 2: assert.true(nearlyEqual(Date.now() - start, MILLIS_REFRESH_TOKEN + MILLIS_CONNDELAY), 'second connection is created with a delay'); @@ -77,22 +77,22 @@ export function testRefreshToken(fetchMock, assert) { }); // initial sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // first auth - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); // sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // re-auth due to refresh token, with connDelay of 0.5 seconds - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN), 'reauthentication for token refresh'); if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); @@ -100,15 +100,15 @@ export function testRefreshToken(fetchMock, assert) { }); // sync after SSE reopened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN + MILLIS_CONNDELAY), 'sync after SSE connection is reopened'); return { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // second re-auth due to refresh token, this time responding with pushEnabled false - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}`), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN * 2), 'second reauthentication for token refresh'); if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); @@ -116,7 +116,7 @@ export function testRefreshToken(fetchMock, assert) { }); // split sync after SSE closed due to push disabled - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_REFRESH_TOKEN * 2), 'sync after SSE connection is reopened a second time'); setTimeout(() => { @@ -126,7 +126,7 @@ export function testRefreshToken(fetchMock, assert) { }, 200); // destroy the client a little bit latter, to assert that there weren't new requests return { status: 500, body: 'server error' }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/push-synchronization-retries.spec.js b/src/__tests__/browserSuites/push-synchronization-retries.spec.js index c7031b7..db0374a 100644 --- a/src/__tests__/browserSuites/push-synchronization-retries.spec.js +++ b/src/__tests__/browserSuites/push-synchronization-retries.spec.js @@ -1,13 +1,13 @@ import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import splitChangesMock3 from '../mocks/splitchanges.since.1457552620999.till.1457552649999.SPLIT_UPDATE.json'; -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.mock2.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; +import membershipsNicolasMock1 from '../mocks/memberships.nicolas@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.mock2.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; import splitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552649999.json'; import oldSplitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552620999.json'; -import mySegmentsUpdateMessage from '../mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; +import mySegmentsUpdateMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json'; import splitKillMessage from '../mocks/message.SPLIT_KILL.1457552650000.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; @@ -48,26 +48,26 @@ const MILLIS_RETRY_FOR_FIRST_SPLIT_UPDATE_EVENT = 300; const MILLIS_SECOND_SPLIT_UPDATE_EVENT = 400; -const MILLIS_MYSEGMENT_UPDATE_EVENT = 500; -const MILLIS_THIRD_RETRY_FOR_MYSEGMENT_UPDATE_EVENT = 1200; +const MILLIS_MEMBERSHIPS_MS_UPDATE = 500; +const MILLIS_THIRD_RETRY_FOR_MEMBERSHIPS_MS_UPDATE = 1200; const MILLIS_SPLIT_KILL_EVENT = 1300; const MILLIS_THIRD_RETRY_FOR_SPLIT_KILL_EVENT = 2000; /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth, SSE connection + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) * * 0.2 secs: SPLIT_UPDATE event -> /splitChanges: bad response -> SDK_UPDATE triggered * 0.3 secs: SPLIT_UPDATE event -> /splitChanges retry: success * * 0.4 secs: SPLIT_UPDATE event with old changeNumber -> SDK_UPDATE not triggered * - * 0.5 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io: network error - * 0.6 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io retry: invalid JSON response - * 0.8 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io: server error - * 1.2 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io retry: success -> SDK_UPDATE triggered + * 0.5 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/marcio@split.io OK, /memberships/nicolas@split.io: network error + * 0.6 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas@split.io retry: invalid JSON response + * 0.8 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas@split.io: server error + * 1.2 secs: Unbounded MEMBERSHIPS_MS_UPDATE event -> /memberships/nicolas@split.io retry: success -> SDK_UPDATE triggered * * 1.3 secs: SPLIT_KILL event -> /splitChanges: outdated response -> SDK_UPDATE triggered although fetches fail * 1.4 secs: SPLIT_KILL event -> /splitChanges retry: network error @@ -76,7 +76,7 @@ const MILLIS_THIRD_RETRY_FOR_SPLIT_KILL_EVENT = 2000; * (we destroy the client here, to assert that all scheduled tasks are clean) */ export function testSynchronizationRetries(fetchMock, assert) { - // Force the backoff base of UpdateWorkers, from 10 secs to 100 ms, to reduce test time + // Force the backoff base of UpdateWorkers, from 1 sec to 100 ms, to reduce test time Backoff.__TEST__BASE_MILLIS = 100; assert.plan(17); @@ -88,7 +88,7 @@ export function testSynchronizationRetries(fetchMock, assert) { setMockListener(function (eventSourceInstance) { start = Date.now(); - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'EventSource URL is the expected'); /* events on first SSE connection */ @@ -114,11 +114,11 @@ export function testSynchronizationRetries(fetchMock, assert) { assert.equal(client.getTreatment('splitters'), 'off', 'evaluation with initial MySegments list'); client.once(client.Event.SDK_UPDATE, () => { const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MYSEGMENT_UPDATE_EVENT), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event'); + assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MEMBERSHIPS_MS_UPDATE), 'SDK_UPDATE due to MEMBERSHIPS_MS_UPDATE event'); assert.equal(client.getTreatment('splitters'), 'on', 'evaluation with updated MySegments list'); }); eventSourceInstance.emitMessage(mySegmentsUpdateMessage); - }, MILLIS_MYSEGMENT_UPDATE_EVENT); // send a MY_SEGMENTS_UPDATE event with a new changeNumber after 0.4 seconds + }, MILLIS_MEMBERSHIPS_MS_UPDATE); // send a MEMBERSHIPS_MS_UPDATE event with a new changeNumber after 0.4 seconds setTimeout(() => { client.once(client.Event.SDK_UPDATE, () => { @@ -135,60 +135,60 @@ export function testSynchronizationRetries(fetchMock, assert) { }); // initial auth - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(otherUserKeySync)}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&users=${encodeURIComponent(userKey)}&users=${encodeURIComponent(otherUserKeySync)}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; }); - // initial split and mySegments sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 2 }, { status: 200, body: mySegmentsMarcio }); + // initial split and memberships sync + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 3 }, { status: 200, body: membershipsMarcio }); // split and segment sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SSE_OPEN), 'sync after SSE connection is opened'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolasMock1 }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolasMock1 }); // fetch due to SPLIT_UPDATE event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); // fetch retry for SPLIT_UPDATE event, due to previous unexpected response (response till minor than SPLIT_UPDATE changeNumber) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_RETRY_FOR_FIRST_SPLIT_UPDATE_EVENT), 'fetch retry due to SPLIT_UPDATE event'); return { status: 200, body: splitChangesMock3 }; }); - // fetch due to first MY_SEGMENTS_UPDATE event - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { throws: new TypeError('Network error') }); - // fetch retry for MY_SEGMENTS_UPDATE event, due to previous fail - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response - // fetch retry for MY_SEGMENTS_UPDATE event, due to previous fail - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), { status: 500, body: 'server error' }); - // second fetch retry for MY_SEGMENTS_UPDATE event, due to previous fail - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { + // fetch due to first MEMBERSHIPS_MS_UPDATE event + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { throws: new TypeError('Network error') }); + // fetch retry for MEMBERSHIPS_MS_UPDATE event, due to previous fail + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response + // fetch retry for MEMBERSHIPS_MS_UPDATE event, due to previous fail + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 500, body: 'server error' }); + // second fetch retry for MEMBERSHIPS_MS_UPDATE event, due to previous fail + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MYSEGMENT_UPDATE_EVENT), 'sync second retry for MY_SEGMENTS_UPDATE event'); - return { status: 200, body: mySegmentsNicolasMock2 }; + assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_MEMBERSHIPS_MS_UPDATE), 'sync second retry for MEMBERSHIPS_MS_UPDATE event'); + return { status: 200, body: membershipsNicolasMock2 }; }); // fetch due to SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation with split killed immediately, before fetch is done'); const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SPLIT_KILL_EVENT), 'sync due to SPLIT_KILL event'); return { status: 200, body: { since: 1457552649999, till: 1457552649999, splits: [] } }; // returning old state }); // first fetch retry for SPLIT_KILL event, due to previous unexpected response (response till minor than SPLIT_KILL changeNumber) - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { throws: new TypeError('Network error') }); + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { throws: new TypeError('Network error') }); // second fetch retry for SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), { status: 200, body: '{ "since": 1457552620999, "til' }); // invalid JSON response // third fetch retry for SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function () { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_THIRD_RETRY_FOR_SPLIT_KILL_EVENT), 'third fetch retry due to SPLIT_KILL event'); diff --git a/src/__tests__/browserSuites/push-synchronization.spec.js b/src/__tests__/browserSuites/push-synchronization.spec.js index 3ac2406..4086050 100644 --- a/src/__tests__/browserSuites/push-synchronization.spec.js +++ b/src/__tests__/browserSuites/push-synchronization.spec.js @@ -2,27 +2,25 @@ import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; import splitChangesMock3 from '../mocks/splitchanges.since.1457552620999.till.1457552649999.SPLIT_UPDATE.json'; import splitChangesMock4 from '../mocks/splitchanges.since.1457552649999.till.1457552650000.SPLIT_KILL.json'; -import mySegmentsNicolasMock1 from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.mock2.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.mock2.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; import splitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552649999.json'; import oldSplitUpdateMessage from '../mocks/message.SPLIT_UPDATE.1457552620999.json'; -import mySegmentsUpdateMessageNoPayload from '../mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json'; -import mySegmentsUpdateMessageWithPayload from '../mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json'; -import mySegmentsUpdateMessageWithEmptyPayload from '../mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json'; import splitKillMessage from '../mocks/message.SPLIT_KILL.1457552650000.json'; -import unboundedMessage from '../mocks/message.V2.UNBOUNDED.1457552650000.json'; -import boundedZlibMessage from '../mocks/message.V2.BOUNDED.ZLIB.1457552651000.json'; -import keylistGzipMessage from '../mocks/message.V2.KEYLIST.GZIP.1457552652000.json'; -import segmentRemovalMessage from '../mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json'; +import unboundedMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json'; +import boundedZlibMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json'; +import keylistGzipMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json'; +import segmentRemovalMessage from '../mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; +import unboundedLSMessage from '../mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json'; +import segmentRemovalLSMessage from '../mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json'; import authPushEnabledNicolas from '../mocks/auth.pushEnabled.nicolas@split.io.json'; import authPushEnabledNicolasAndMarcio from '../mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json'; +import { Backoff } from '@splitsoftware/splitio-commons/src/utils/Backoff'; import { nearlyEqual, url, hasNoCacheHeader } from '../testUtils'; -import includes from 'lodash/includes'; // Replace original EventSource with mock import EventSourceMock, { setMockListener } from '../testUtils/eventSourceMock'; @@ -55,49 +53,52 @@ const settings = settingsFactory(config); const MILLIS_SSE_OPEN = 100; const MILLIS_FIRST_SPLIT_UPDATE_EVENT = 200; const MILLIS_SECOND_SPLIT_UPDATE_EVENT = 300; -const MILLIS_MY_SEGMENTS_UPDATE_EVENT_NO_PAYLOAD = 400; -const MILLIS_SPLIT_KILL_EVENT = 500; -const MILLIS_NEW_CLIENT = 600; -const MILLIS_SECOND_SSE_OPEN = 700; -const MILLIS_MY_SEGMENTS_UPDATE_WITH_PAYLOAD = 800; -const MILLIS_MY_SEGMENTS_UPDATE_WITH_EMPTY_PAYLOAD = 900; -const MILLIS_MORE_CLIENTS = 1000; -const MILLIS_UNBOUNDED_FETCH = 1100; -const MILLIS_BOUNDED_FALLBACK = 1200; -const MILLIS_KEYLIST_FALLBACK = 1300; -const MILLIS_BOUNDED = 1400; -const MILLIS_KEYLIST = 1500; -const MILLIS_SEGMENT_REMOVAL = 1600; +const MILLIS_SPLIT_KILL_EVENT = 400; +const MILLIS_NEW_CLIENT = 500; +const MILLIS_SECOND_SSE_OPEN = 600; +const MILLIS_MORE_CLIENTS = 700; +const MILLIS_MEMBERSHIPS_MS_UPDATE_UNBOUNDED_FETCH = 800; +const MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED_FALLBACK = 900; +const MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST_FALLBACK = 1000; +const MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED = 1100; +const MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST = 1200; +const MILLIS_MEMBERSHIPS_MS_UPDATE_SEGMENT_REMOVAL = 1300; +const MILLIS_MEMBERSHIPS_LS_UPDATE_UNBOUNDED_FETCH = 1400; +const MILLIS_MEMBERSHIPS_LS_UPDATE_SEGMENT_REMOVAL = 1900; +const EXPECTED_DELAY_AND_BACKOFF = 241 + 100; /** * Sequence of calls: - * 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection - * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) + * 0.0 secs: initial SyncAll (/splitChanges, /memberships/*), auth, SSE connection + * 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) * 0.2 secs: SPLIT_UPDATE event -> /splitChanges * 0.3 secs: SPLIT_UPDATE event with old changeNumber - * 0.4 secs: MY_SEGMENTS_UPDATE event -> /mySegments/nicolas@split.io - * 0.5 secs: SPLIT_KILL event -> /splitChanges - * 0.6 secs: creates a new client -> new auth and SSE connection - * 0.7 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*) - * 0.8 secs: MY_SEGMENTS_UPDATE event for new client (with payload). - * 0.9 secs: MY_SEGMENTS_UPDATE event for new client (with empty payload). - * 1.0 secs: creates more clients - * 1.1 secs: MY_SEGMENTS_UPDATE_V2 UnboundedFetchRequest event. - * 1.2 secs: MY_SEGMENTS_UPDATE_V2 BoundedFetchRequest event error --> UnboundedFetchRequest. - * 1.3 secs: MY_SEGMENTS_UPDATE_V2 KeyList event error --> UnboundedFetchRequest. - * 1.4 secs: MY_SEGMENTS_UPDATE_V2 BoundedFetchRequest event. - * 1.5 secs: MY_SEGMENTS_UPDATE_V2 KeyList event. - * 1.6 secs: MY_SEGMENTS_UPDATE_V2 SegmentRemoval event. + * 0.4 secs: SPLIT_KILL event -> /splitChanges + * 0.5 secs: creates a new client -> new auth and SSE connection + * 0.6 secs: SSE connection opened -> syncAll (/splitChanges, /memberships/*) + * 0.7 secs: creates more clients + * 0.8 secs: MEMBERSHIPS_MS_UPDATE UnboundedFetchRequest event. + * 0.9 secs: MEMBERSHIPS_MS_UPDATE BoundedFetchRequest event error --> UnboundedFetchRequest. + * 1.0 secs: MEMBERSHIPS_MS_UPDATE KeyList event error --> UnboundedFetchRequest. + * 1.1 secs: MEMBERSHIPS_MS_UPDATE BoundedFetchRequest event. + * 1.2 secs: MEMBERSHIPS_MS_UPDATE KeyList event. + * 1.3 secs: MEMBERSHIPS_MS_UPDATE SegmentRemoval event. + * 1.4 secs: MEMBERSHIPS_LS_UPDATE UnboundedFetchRequest event, with 241 ms delay for 'nicolas@split.io' (hash('nicolas@split.io') % 300) + * 1.641 secs: /memberships/* fetch due to unbounded MEMBERSHIPS_LS_UPDATE event, with an old changeNumber + * 1.741 secs: /memberships/* fetch due to unbounded MEMBERSHIPS_LS_UPDATE event, with the target changeNumber -> SDK_UPDATE event + * 1.9 secs: MEMBERSHIPS_LS_UPDATE SegmentRemoval event -> SPLIT_UPDATE event */ export function testSynchronization(fetchMock, assert) { - assert.plan(38); + // Force the backoff base of UpdateWorkers to reduce test time + Backoff.__TEST__BASE_MILLIS = 100; + assert.plan(34); fetchMock.reset(); let start, splitio, client, otherClient, keylistAddClient, keylistRemoveClient, bitmapTrueClient, sharedClients = []; // mock SSE open and message events setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolas.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'EventSource URL is the expected'); /* events on first SSE connection */ @@ -119,16 +120,6 @@ export function testSynchronization(fetchMock, assert) { eventSourceInstance.emitMessage(oldSplitUpdateMessage); }, MILLIS_SECOND_SPLIT_UPDATE_EVENT); // send a SPLIT_UPDATE event with an old changeNumber after 0.3 seconds - setTimeout(() => { - assert.equal(client.getTreatment('splitters'), 'off', 'evaluation with initial MySegments list'); - client.once(client.Event.SDK_UPDATE, () => { - const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_EVENT_NO_PAYLOAD), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event'); - assert.equal(client.getTreatment('splitters'), 'on', 'evaluation with updated MySegments list'); - }); - eventSourceInstance.emitMessage(mySegmentsUpdateMessageNoPayload); - }, MILLIS_MY_SEGMENTS_UPDATE_EVENT_NO_PAYLOAD); // send a MY_SEGMENTS_UPDATE event with a new changeNumber after 0.4 seconds - setTimeout(() => { assert.equal(client.getTreatment('whitelist'), 'allowed', 'evaluation with not killed Split'); const onUpdateCb = () => { @@ -146,7 +137,7 @@ export function testSynchronization(fetchMock, assert) { otherClient = splitio.client(otherUserKey); setMockListener((eventSourceInstance) => { - const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_MjE0MTkxOTU2Mg%3D%3D_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_NTcwOTc3MDQx_mySegments,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_splits,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; + const expectedSSEurl = `${url(settings, '/sse')}?channels=NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_control,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_flags,NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw%3D%3D_memberships,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_pri,%5B%3Foccupancy%3Dmetrics.publishers%5Dcontrol_sec&accessToken=${authPushEnabledNicolasAndMarcio.token}&v=1.1&heartbeats=true&SplitSDKVersion=${settings.version}&SplitSDKClientKey=h-1>`; assert.equals(eventSourceInstance.url, expectedSSEurl, 'new EventSource URL is the expected'); /* events on second SSE connection */ @@ -154,41 +145,6 @@ export function testSynchronization(fetchMock, assert) { eventSourceInstance.emitOpen(); }, MILLIS_SECOND_SSE_OPEN - MILLIS_NEW_CLIENT); // open new SSE connection - setTimeout(() => { - assert.equal(otherClient.getTreatment('qc_team'), 'no', 'evaluation with initial MySegments list (shared client)'); - otherClient.once(otherClient.Event.SDK_UPDATE, () => { - const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_WITH_PAYLOAD), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event (with payload)'); - assert.equal(otherClient.getTreatment('qc_team'), 'yes', 'evaluation with updated MySegments list (shared client)'); - }); - eventSourceInstance.emitMessage(mySegmentsUpdateMessageWithPayload); - }, MILLIS_MY_SEGMENTS_UPDATE_WITH_PAYLOAD - MILLIS_NEW_CLIENT); // send a MY_SEGMENTS_UPDATE event with payload after 0.1 seconds from new SSE connection opened - - setTimeout(() => { - assert.equal(otherClient.getTreatment('qc_team'), 'yes', 'evaluation with updated MySegments list (shared client)'); - otherClient.once(otherClient.Event.SDK_UPDATE, () => { - const lapse = Date.now() - start; - assert.true(nearlyEqual(lapse, MILLIS_MY_SEGMENTS_UPDATE_WITH_EMPTY_PAYLOAD), 'SDK_UPDATE due to MY_SEGMENTS_UPDATE event (with empty payload)'); - assert.equal(otherClient.getTreatment('qc_team'), 'no', 'evaluation with re-updated MySegments list (shared client)'); - }); - - // assert that user error on callback is an Uncaught Exception - otherClient.once(otherClient.Event.SDK_UPDATE, () => { - const previousErrorHandler = window.onerror; - const exceptionHandler = err => { - if (includes(err, 'willThrowFor')) { - assert.pass(`User error on SDK_UPDATE callback should throw as Uncaught Exception: ${err}`); - } else { - assert.fail(err); - } - window.onerror = previousErrorHandler; - }; - window.onerror = exceptionHandler; - null.willThrowForUpdate(); - }); - eventSourceInstance.emitMessage(mySegmentsUpdateMessageWithEmptyPayload); - }, MILLIS_MY_SEGMENTS_UPDATE_WITH_EMPTY_PAYLOAD - MILLIS_NEW_CLIENT); // send a MY_SEGMENTS_UPDATE event with payload after 0.1 seconds from new SSE connection opened - setTimeout(() => { keylistAddClient = splitio.client(keylistAddKey); keylistRemoveClient = splitio.client(keylistRemoveKey); @@ -200,17 +156,17 @@ export function testSynchronization(fetchMock, assert) { setTimeout(() => { eventSourceInstance.emitMessage(unboundedMessage); - }, MILLIS_UNBOUNDED_FETCH - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_UNBOUNDED_FETCH - MILLIS_MORE_CLIENTS); setTimeout(() => { const malformedMessage = { ...boundedZlibMessage, data: boundedZlibMessage.data.replace('eJxiGAX4AMd', '').replace('1457552651000', '1457552650100') }; eventSourceInstance.emitMessage(malformedMessage); - }, MILLIS_BOUNDED_FALLBACK - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED_FALLBACK - MILLIS_MORE_CLIENTS); setTimeout(() => { const malformedMessage = { ...keylistGzipMessage, data: keylistGzipMessage.data.replace('H4sIAAAAAAA', '').replace('1457552652000', '1457552650200') }; eventSourceInstance.emitMessage(malformedMessage); - }, MILLIS_KEYLIST_FALLBACK - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST_FALLBACK - MILLIS_MORE_CLIENTS); setTimeout(() => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'on', 'off'], 'evaluation before bounded fetch'); @@ -218,7 +174,7 @@ export function testSynchronization(fetchMock, assert) { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'on', 'on'], 'evaluation after bounded fetch'); }); eventSourceInstance.emitMessage(boundedZlibMessage); - }, MILLIS_BOUNDED - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_BOUNDED - MILLIS_MORE_CLIENTS); setTimeout(() => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'on', 'on'], 'evaluation before keylist message'); @@ -230,30 +186,56 @@ export function testSynchronization(fetchMock, assert) { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'on', 'off', 'on'], 'evaluation after keylist message (removed key)'); }); eventSourceInstance.emitMessage(keylistGzipMessage); - }, MILLIS_KEYLIST - MILLIS_MORE_CLIENTS); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_KEYLIST - MILLIS_MORE_CLIENTS); setTimeout(() => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'on', 'off', 'on'], 'evaluation before segment removal'); bitmapTrueClient.once(bitmapTrueClient.Event.SDK_UPDATE, () => { assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'off', 'off'], 'evaluation after segment removal'); + }); + + eventSourceInstance.emitMessage(segmentRemovalMessage); + }, MILLIS_MEMBERSHIPS_MS_UPDATE_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS); + + setTimeout(() => { + assert.equal(client.getTreatment('in_large_segment'), 'no', 'evaluation before myLargeSegment fetch'); + + const timestampUnboundEvent = Date.now(); + + client.once(client.Event.SDK_UPDATE, () => { + assert.true(nearlyEqual(Date.now() - timestampUnboundEvent, EXPECTED_DELAY_AND_BACKOFF), 'SDK_UPDATE after fetching memberships with a delay'); + assert.equal(client.getTreatment('in_large_segment'), 'yes', 'evaluation after myLargeSegment fetch'); + }); + + eventSourceInstance.emitMessage(unboundedLSMessage); + }, MILLIS_MEMBERSHIPS_LS_UPDATE_UNBOUNDED_FETCH - MILLIS_MORE_CLIENTS); + + setTimeout(() => { + assert.equal(client.getTreatment('in_large_segment'), 'yes', 'evaluation before large segment removal'); + assert.deepEqual(sharedClients.map(c => c.getTreatment('in_large_segment')), ['no', 'no', 'no', 'no'], 'evaluation before large segment removal'); + + client.once(client.Event.SDK_UPDATE, () => { + assert.equal(client.getTreatment('in_large_segment'), 'no', 'evaluation after large segment removal'); // destroy shared clients and then main client Promise.all(sharedClients.map(c => c.destroy())) .then(() => { assert.equal(otherClient.getTreatment('whitelist'), 'control', 'evaluation returns control for shared client if it is destroyed'); - assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation returns correct tratment for main client'); + assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation returns correct treatment for main client'); assert.equal(eventSourceInstance.readyState, EventSourceMock.OPEN, 'streaming is still open'); client.destroy().then(() => { assert.equal(client.getTreatment('whitelist'), 'control', 'evaluation returns control for main client if it is destroyed'); - assert.equal(eventSourceInstance.readyState, EventSourceMock.CLOSED, 'streaming is closed after "unload" browser event'); + assert.equal(eventSourceInstance.readyState, EventSourceMock.CLOSED, 'streaming is closed after destroy'); + + Backoff.__TEST__BASE_MILLIS = undefined; assert.end(); }); }); }); - eventSourceInstance.emitMessage(segmentRemovalMessage); - }, MILLIS_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS); + eventSourceInstance.emitMessage(segmentRemovalLSMessage); + }, MILLIS_MEMBERSHIPS_LS_UPDATE_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS); }); }, MILLIS_MORE_CLIENTS - MILLIS_NEW_CLIENT); @@ -265,7 +247,7 @@ export function testSynchronization(fetchMock, assert) { // initial auth let authParams = `users=${encodeURIComponent(userKey)}`; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&${authParams}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('auth success'); return { status: 200, body: authPushEnabledNicolas }; @@ -273,7 +255,7 @@ export function testSynchronization(fetchMock, assert) { // reauth due to new client authParams += `&users=${encodeURIComponent(otherUserKey)}`; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), function (url, opts) { + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&${authParams}`), function (url, opts) { if (!opts.headers['Authorization']) assert.fail('`/v2/auth` request must include `Authorization` header'); assert.pass('second auth success'); return { status: 200, body: authPushEnabledNicolasAndMarcio }; @@ -281,90 +263,87 @@ export function testSynchronization(fetchMock, assert) { // reauth due to more clients authParams += `&users=${encodeURIComponent(keylistAddKey)}&users=${encodeURIComponent(keylistRemoveKey)}&users=${encodeURIComponent(bitmapTrueKey)}`; - fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), { status: 200, body: authPushEnabledNicolasAndMarcio }); + fetchMock.getOnce(url(settings, `/v2/auth?s=1.2&${authParams}`), { status: 200, body: authPushEnabledNicolasAndMarcio }); - // initial split and mySegments sync - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function (url, opts) { + // initial sync + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, 0), 'initial sync'); if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function (url, opts) { + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock1 }; + return { status: 200, body: membershipsNicolasMock2 }; }); - // split and segment sync after SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function (url, opts) { + // sync all after SSE opened + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SSE_OPEN), 'sync after SSE connection is opened'); if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function (url, opts) { + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock1 }; + return { status: 200, body: membershipsNicolasMock2 }; }); // fetch due to SPLIT_UPDATE event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function (url, opts) { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552620999'), function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must include `Cache-Control` header'); return { status: 200, body: splitChangesMock3 }; }); - // fetch due to first MY_SEGMENTS_UPDATE event - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function (url, opts) { - if (!hasNoCacheHeader(opts)) assert.fail('request must include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock2 }; - }); - // fetch due to SPLIT_KILL event - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552649999'), function (url, opts) { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552649999'), function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must include `Cache-Control` header'); assert.equal(client.getTreatment('whitelist'), 'not_allowed', 'evaluation with split killed immediately, before fetch is done'); return { status: 200, body: splitChangesMock4 }; }); - // initial fetch of mySegments for new client - fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), function (url, opts) { + // initial fetch of memberships for new client + fetchMock.getOnce(url(settings, '/memberships/marcio%40split.io'), function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsMarcio }; + return { status: 200, body: membershipsMarcio }; }); - // split and mySegment sync after second SSE opened - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552650000'), function (url, opts) { + // sync all after second SSE opened + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552650000'), function (url, opts) { const lapse = Date.now() - start; assert.true(nearlyEqual(lapse, MILLIS_SECOND_SSE_OPEN), 'sync after second SSE connection is opened'); if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); return { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } }; }); - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 2 }, function (url, opts) { + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 2 }, function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock2 }; + return { status: 200, body: membershipsNicolasMock2 }; }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 2 }, function (url, opts) { + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 2 }, function (url, opts) { if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsMarcio }; + return { status: 200, body: membershipsMarcio }; }); - // 3 unbounded fetch requests - fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 3 }, function (url, opts) { + + // 3 unbounded fetch for MEMBERSHIPS_MS_UPDATE + 1 unbounded fetch for MEMBERSHIPS_LS_UPDATE + fetchMock.get({ url: url(settings, '/memberships/marcio%40split.io'), repeat: 4 }, function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsNicolasMock2 }; + return { status: 200, body: membershipsMarcio }; }); - fetchMock.get({ url: url(settings, '/mySegments/marcio%40split.io'), repeat: 3 }, function (url, opts) { + fetchMock.get({ url: url(settings, '/memberships/nicolas%40split.io'), repeat: 3 }, function (url, opts) { if (!hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header'); - return { status: 200, body: mySegmentsMarcio }; + return { status: 200, body: membershipsNicolasMock2 }; }); + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: { ms: { k: [{ n: 'developers' }, { n: 'engineers' }] }, ls: { k: [], cn: 1457552640000 } } }); // not target changeNumber + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: { ms: { k: [{ n: 'developers' }, { n: 'engineers' }] }, ls: { k: [{ n: 'employees' }, { n: 'splitters' }], cn: 1457552650000 } } }); // target changeNumber - // initial fetch of mySegments for other clients + sync after third SSE opened + 3 unbounded fetch requests - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552650000'), { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } }); - fetchMock.get({ url: url(settings, '/mySegments/key1'), repeat: 5 }, { status: 200, body: { mySegments: [] } }); - fetchMock.get({ url: url(settings, '/mySegments/key3'), repeat: 5 }, { status: 200, body: { mySegments: [{ name: 'splitters' }] } }); - fetchMock.get({ url: url(settings, `/mySegments/${bitmapTrueKey}`), repeat: 5 }, { status: 200, body: { mySegments: [] } }); + // initial fetch of memberships for other clients + sync all after third SSE opened + 3 unbounded fetch for MEMBERSHIPS_MS_UPDATE + 1 unbounded fetch for MEMBERSHIPS_LS_UPDATE + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1457552650000'), { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } }); + fetchMock.get({ url: url(settings, '/memberships/key1'), repeat: 6 }, { status: 200, body: { ms: {} } }); + fetchMock.get({ url: url(settings, '/memberships/key3'), repeat: 6 }, { status: 200, body: { ms: { k: [{ n: 'splitters' }] } } }); + fetchMock.get({ url: url(settings, `/memberships/${bitmapTrueKey}`), repeat: 5 }, { status: 200, body: { ms: { k: [] } } }); // bounded fetch request - fetchMock.get(url(settings, `/mySegments/${bitmapTrueKey}`), { status: 200, body: { mySegments: [{ name: 'splitters' }] } }); + fetchMock.get(url(settings, `/memberships/${bitmapTrueKey}`), { status: 200, body: { ms: { k: [{ n: 'splitters' }] } } }); fetchMock.get(new RegExp('.*'), function (url) { assert.fail('unexpected GET request with url: ' + url); diff --git a/src/__tests__/browserSuites/readiness.spec.js b/src/__tests__/browserSuites/readiness.spec.js index 14dacef..6829965 100644 --- a/src/__tests__/browserSuites/readiness.spec.js +++ b/src/__tests__/browserSuites/readiness.spec.js @@ -2,9 +2,9 @@ import { SplitFactory, InLocalStorage } from '../../'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; -// mocks for mySegments readiness tests +// mocks for memberships readiness tests import splitChangesStartWithoutSegmentsMock from '../mocks/splitchanges.real.json'; import splitChangesUpdateWithSegmentsMock from '../mocks/splitchanges.real.updateWithSegments.json'; import splitChangesUpdateWithoutSegmentsMock from '../mocks/splitchanges.real.updateWithoutSegments.json'; @@ -38,13 +38,13 @@ export default function (fetchMock, assert) { sdk: 'https://sdk.baseurl/readinessSuite1', events: 'https://events.baseurl/readinessSuite1' }; - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: mySegmentsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: membershipsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); const splitio = SplitFactory({ ...baseConfig, urls: testUrls @@ -62,18 +62,18 @@ export default function (fetchMock, assert) { }); }); - assert.test(t => { // Timeout test, we have retries but mySegmnets takes too long + assert.test(t => { // Timeout test, we have retries but memberships takes too long const testUrls = { sdk: 'https://sdk.baseurl/readinessSuite2', events: 'https://events.baseurl/readinessSuite2' }; - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: mySegmentsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: membershipsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); const splitio = SplitFactory({ ...baseConfig, urls: testUrls }); const client = splitio.client(); @@ -95,16 +95,16 @@ export default function (fetchMock, assert) { events: 'https://events.baseurl/readinessSuite3' }; - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 + 50); }); }); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { return new Promise((res) => { setTimeout(() => { res({ status: 200, body: splitChangesMock1, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); // Faster, it should get ready on the retry. }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: mySegmentsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: membershipsNicolas, headers: {} }); }, requestTimeoutBeforeReady * 1000 - 50); }); }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); const splitio = SplitFactory({ ...baseConfig, urls: testUrls }); const client = splitio.client(); @@ -119,40 +119,40 @@ export default function (fetchMock, assert) { }); }); - /************** Now we will validate the intelligent mySegments pausing, which requires lots of code. Related code below. **************/ + /************** Now we will validate the intelligent memberships pausing, which requires lots of code. Related code below. **************/ localStorage.clear(); - const mySegmentsEndpointDelay = 450; + const membershipsEndpointDelay = 450; function mockForSegmentsPauseTest(testUrls, startWithSegments = false) { - let mySegmentsHits = 0; + let membershipsHits = 0; - fetchMock.get(new RegExp(`${testUrls.sdk}/mySegments/nicolas\\d?%40split.io`), function () { // Mock any mySegments call, so we can test with multiple clients. - mySegmentsHits++; - return new Promise((res) => { setTimeout(() => { res({ status: 200, body: { mySegments: [] } }); }, mySegmentsEndpointDelay); }); + fetchMock.get(new RegExp(`${testUrls.sdk}/memberships/nicolas\\d?%40split.io`), function () { // Mock any memberships call, so we can test with multiple clients. + membershipsHits++; + return new Promise((res) => { setTimeout(() => { res({ status: 200, body: { ms: {} } }); }, membershipsEndpointDelay); }); }); // Now mock the no more updates state - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552669999', { status: 200, body: { splits: [], since: 1457552669999, till: 1457552669999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552669999', { status: 200, body: { splits: [], since: 1457552669999, till: 1457552669999 } }); if (startWithSegments) { // Adjust since and till so the order is inverted. - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesStartWithSegmentsMock }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552620999, till: 1457552649999 } }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552649999', { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552649999, till: 1457552669999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesStartWithSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552620999, till: 1457552649999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552649999', { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552649999, till: 1457552669999 } }); } else { - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesStartWithoutSegmentsMock }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesUpdateWithSegmentsMock }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552649999', { status: 200, body: splitChangesUpdateWithoutSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesStartWithoutSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesUpdateWithSegmentsMock }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552649999', { status: 200, body: splitChangesUpdateWithoutSegmentsMock }); } - return () => mySegmentsHits; + return () => membershipsHits; } assert.test(t => { // Testing how the SDK pauses/resumes segments synchronization. const testUrls = { - sdk: 'https://sdk.baseurl/readinessMySegmentsSuite', - events: 'https://events.baseurl/readinessMySegmentsSuite' + sdk: 'https://sdk.baseurl/readinessMembershipsSuite', + events: 'https://events.baseurl/readinessMembershipsSuite' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -174,47 +174,42 @@ export default function (fetchMock, assert) { let readyCount = 0; client2.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; // create a client on a different event-loop tick than client and client2. client3 = splitio.client('nicolas3@split.io'); client3.once(client3.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT - 1, 'mySegments should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT - 1, 'memberships should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); }, 2500); // Now we will wait until it picks up Splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s. client.once(client.Event.SDK_UPDATE, () => { - // This update came with segments, it should have tried to fetch mySegments for all used keys. + // This update came with segments, it should have tried to fetch memberships for all used keys. setTimeout(() => { - t.equal(getMySegmentsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments as soon as it received a new Split with segments.'); + t.equal(getMembershipsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships as soon as it received a new Split with segments.'); }, 0); - setTimeout(() => { // Nasty ugly crap to avoid listening to the update coming from mySegment calls. + setTimeout(() => { // Nasty ugly code to avoid listening to the update coming from membership calls. client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with no segments (removed the matcher we fetched on the previous one), it should stop the producer and not trigger more requests. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); t.equal(readyCount, CLIENTS_COUNT, 'all clients must be ready'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); - + splitio.destroy().then(() => { t.end(); }); }, 10000); }, 0); }); @@ -229,10 +224,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing how the SDK pauses/resumes segments synchronization in localstorage from scratch (no SDK_READY_FROM_CACHE). const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite', - events: 'https://events.baseurl/readinessLSMySegmentsSuite' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite', + events: 'https://events.baseurl/readinessLSMembershipsSuite' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -257,47 +252,42 @@ export default function (fetchMock, assert) { let readyCount = 0; client2.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'It should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; // create a client on a different event-loop tick than client and client2. client3 = splitio.client('nicolas3@split.io'); client3.once(client3.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for mySegments, as there were no segments in the first splits payload.'); + t.ok(Date.now() - start < 50, 'Shared client should be ready really quickly, without waiting for memberships, as there were no segments in the first splits payload.'); readyCount++; }); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT -1, 'mySegments should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT -1, 'memberships should had been hit once per client on the first attempt (excluding client3), but it stopped syncing afterwards.'); }, 2500); // Now we will wait until it picks up Splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s. client.once(client.Event.SDK_UPDATE, () => { - // This update came with segments, it should have tried to fetch mySegments for all used keys. + // This update came with segments, it should have tried to fetch memberships for all used keys. setTimeout(() => { - t.equal(getMySegmentsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments as soon as it received a new Split with segments.'); + t.equal(getMembershipsHits(), 2 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships as soon as it received a new Split with segments.'); }, 0); - setTimeout(() => { // Nasty ugly crap to avoid listening to the update coming from mySegment calls. + setTimeout(() => { // Nasty ugly code to avoid listening to the update coming from membership calls. client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with no segments (removed the matcher we fetched on the previous one), it should stop the producer and not trigger more requests. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have not tried to synchronize segments again after the last update that left us in a no segment state.'); t.equal(readyCount, CLIENTS_COUNT, 'all clients must be ready'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); - + splitio.destroy().then(() => { t.end(); }); }, 10000); }, 0); }); @@ -312,10 +302,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing how the SDK pauses/resumes segments synchronization. const testUrls = { - sdk: 'https://sdk.baseurl/readinessMySegmentsSuite2', - events: 'https://events.baseurl/readinessMySegmentsSuite2' + sdk: 'https://sdk.baseurl/readinessMembershipsSuite2', + events: 'https://events.baseurl/readinessMembershipsSuite2' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, true); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, true); const start = Date.now(); const splitio = SplitFactory({ @@ -337,47 +327,42 @@ export default function (fetchMock, assert) { let readyCount = 0; client2.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'Shared client should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'Shared client should not be ready without waiting for memberships, as there are segments in the first splits payload.'); readyCount++; }); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, as there are segments in the first splits payload.'); readyCount++; // create a client on a different event-loop tick than client and client2. client3 = splitio.client('nicolas3@split.io'); client3.once(client3.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'Shared client should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'Shared client should not be ready without waiting for memberships, as there are segments in the first splits payload.'); readyCount++; }); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT - 1, 'mySegments should had been hit once per client on the first attempt (excluding one for client3) and keep syncing afterwards.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT - 1, 'memberships should had been hit once per client on the first attempt (excluding one for client3) and keep syncing afterwards.'); }, 2500); // Now we will wait until it picks up splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s (plus sync time). client.once(client.Event.SDK_UPDATE, () => { // This update came without segments, it should not trigger an extra fetch. setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT - 1, 'It should have stopped synchronizing mySegments since it transitioned to no segments state.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT - 1, 'It should have stopped synchronizing memberships since it transitioned to no segments state.'); }, 0); setTimeout(() => { client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with segments again, it should trigger a request ASAP and restart the producer. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT - 1, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 6 * CLIENTS_COUNT - 1, 'It should keep the producer synchronizing periodically..'); + t.equal(getMembershipsHits(), 6 * CLIENTS_COUNT - 1, 'It should keep the producer synchronizing periodically..'); t.equal(readyCount, CLIENTS_COUNT, 'all clients must be ready'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); - + splitio.destroy().then(() => { t.end(); }); }, 3000); }, 0); }); @@ -392,10 +377,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from scratch const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite2', - events: 'https://events.baseurl/readinessLSMySegmentsSuite2' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite2', + events: 'https://events.baseurl/readinessLSMembershipsSuite2' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, true); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, true); const start = Date.now(); const splitio = SplitFactory({ @@ -415,37 +400,32 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, as there are segments in the first splits payload.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, as there are segments in the first splits payload.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and keep syncing afterwards.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and keep syncing afterwards.'); }, 2500); // Now we will wait until it picks up splits, using the SDK_UPDATE event. Features are refreshed every 3s, but segments every 1s (plus sync time). client.once(client.Event.SDK_UPDATE, () => { // This update came without segments, it should not trigger an extra fetch. setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'It should have stopped synchronizing mySegments since it transitioned to no segments state.'); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'It should have stopped synchronizing memberships since it transitioned to no segments state.'); }, 0); setTimeout(() => { client.once(client.Event.SDK_UPDATE, () => { setTimeout(() => { // This update left us in an state with segments again, it should trigger a request ASAP and restart the producer. - t.equal(getMySegmentsHits(), 4 * CLIENTS_COUNT, 'It should have tried to synchronize mySegments periodically.'); + t.equal(getMembershipsHits(), 4 * CLIENTS_COUNT, 'It should have tried to synchronize memberships periodically.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 6 * CLIENTS_COUNT, 'It should keep the producer synchronizing periodically..'); - - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 6 * CLIENTS_COUNT, 'It should keep the producer synchronizing periodically..'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }, 0); }); @@ -460,10 +440,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from scratch with segments being previously used const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite3', - events: 'https://events.baseurl/readinessLSMySegmentsSuite3' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite3', + events: 'https://events.baseurl/readinessLSMembershipsSuite3' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, true); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, true); const start = Date.now(); const splitio = SplitFactory({ @@ -483,19 +463,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, when we start from cache it might be stale.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, when we start from cache it might be stale.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and keep syncing afterwards.'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and keep syncing afterwards.'); + splitio.destroy().then(() => { t.end(); }); }, 2500); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -506,10 +482,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache without segments being previously used, and first update has no segments. const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite4', - events: 'https://events.baseurl/readinessLSMySegmentsSuite4' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite4', + events: 'https://events.baseurl/readinessLSMembershipsSuite4' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -529,19 +505,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { t.ok(Date.now() - start < 50, 'It should be ready quickly, since it had no segments and update has no segments either.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt but stopped syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt but stopped syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 4500); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -552,14 +524,14 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache without segments being previously used, and first update HAS segments. const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite5', - events: 'https://events.baseurl/readinessLSMySegmentsSuite5' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite5', + events: 'https://events.baseurl/readinessLSMembershipsSuite5' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); - // I'm having the first update of Splits come with segments. In this scenario it'll wait for mySegments to download before being ready. - fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.1&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552669999, till: 1457552679999 } }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); + // I'm having the first update of Splits come with segments. In this scenario it'll wait for memberships to download before being ready. + fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.2&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithSegmentsMock, since: 1457552669999, till: 1457552679999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); const start = Date.now(); const splitio = SplitFactory({ @@ -579,19 +551,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { const delay = Date.now() - start; - t.ok(delay >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, when we start from cache it might be stale.'); + t.ok(delay >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, when we start from cache it might be stale.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt but stopped syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt but stopped syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -602,10 +570,10 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache with segments being previously used, and update is empty. const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite6', - events: 'https://events.baseurl/readinessLSMySegmentsSuite6' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite6', + events: 'https://events.baseurl/readinessLSMembershipsSuite6' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); const start = Date.now(); const splitio = SplitFactory({ @@ -625,19 +593,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start >= mySegmentsEndpointDelay, 'It should not be ready without waiting for mySegments, when we start from cache it might be stale and we had segments even though the update has nothing.'); + t.ok(Date.now() - start >= membershipsEndpointDelay, 'It should not be ready without waiting for memberships, when we start from cache it might be stale and we had segments even though the update has nothing.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 3 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and kept syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 3 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and kept syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { @@ -648,13 +612,13 @@ export default function (fetchMock, assert) { assert.test(t => { // Testing when we start from cache with segments being previously used and first update removes segments const testUrls = { - sdk: 'https://sdk.baseurl/readinessLSMySegmentsSuite7', - events: 'https://events.baseurl/readinessLSMySegmentsSuite7' + sdk: 'https://sdk.baseurl/readinessLSMembershipsSuite7', + events: 'https://events.baseurl/readinessLSMembershipsSuite7' }; - const getMySegmentsHits = mockForSegmentsPauseTest(testUrls, false); - // I'm having the first update of Splits come without segments. In this scenario it'll NOT wait for mySegments to download before being ready. - fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.1&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552669999, till: 1457552679999 } }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); + const getMembershipsHits = mockForSegmentsPauseTest(testUrls, false); + // I'm having the first update of Splits come without segments. In this scenario it'll NOT wait for memberships to download before being ready. + fetchMock.get({ url: testUrls.sdk + '/splitChanges?s=1.2&since=1457552669999', overwriteRoutes: true }, { status: 200, body: { ...splitChangesUpdateWithoutSegmentsMock, since: 1457552669999, till: 1457552679999 } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552679999', { status: 200, body: { splits: [], since: 1457552679999, till: 1457552679999 } }); const start = Date.now(); const splitio = SplitFactory({ @@ -674,19 +638,15 @@ export default function (fetchMock, assert) { }); const CLIENTS_COUNT = 3; // Just so it's easier to read the assertions. const client = splitio.client(); - const client2 = splitio.client('nicolas2@split.io'); - const client3 = splitio.client('nicolas3@split.io'); + splitio.client('nicolas2@split.io'); + splitio.client('nicolas3@split.io'); client.once(client.Event.SDK_READY, () => { - t.ok(Date.now() - start < 50, 'It should be ready without waiting for mySegments, since when it downloads changes it will have no more use for them.'); + t.ok(Date.now() - start < 50, 'It should be ready without waiting for memberships, since when it downloads changes it will have no more use for them.'); setTimeout(() => { - t.equal(getMySegmentsHits(), 1 * CLIENTS_COUNT, 'mySegments should had been hit once per client on the first attempt and stopped syncing afterwards'); - Promise.all([ - client2.destroy(), - client3.destroy(), - client.destroy() - ]).then(() => { t.end(); }); + t.equal(getMembershipsHits(), 1 * CLIENTS_COUNT, 'memberships should had been hit once per client on the first attempt and stopped syncing afterwards'); + splitio.destroy().then(() => { t.end(); }); }, 3000); }); client.once(client.Event.SDK_READY_TIMED_OUT, () => { diff --git a/src/__tests__/browserSuites/ready-from-cache.spec.js b/src/__tests__/browserSuites/ready-from-cache.spec.js index 8326136..b41220f 100644 --- a/src/__tests__/browserSuites/ready-from-cache.spec.js +++ b/src/__tests__/browserSuites/ready-from-cache.spec.js @@ -3,7 +3,7 @@ import { SplitFactory, InLocalStorage } from '../../'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; import { nearlyEqual } from '../testUtils'; @@ -83,8 +83,8 @@ const baseConfig = { streamingEnabled: false }; -const expectedHashNullFilter = '2a2c20bb'; // for SDK key '', filter query null, and flags spec version '1.1' -const expectedHashWithFilter = 'fdf7bd89'; // for SDK key '', filter query '&names=p1__split,p2__split', and flags spec version '1.1' +const expectedHashNullFilter = 'db8943b4'; // for SDK key '', filter query null, and flags spec version '1.2' +const expectedHashWithFilter = '7ccd6b31'; // for SDK key '', filter query '&names=p1__split,p2__split', and flags spec version '1.2' export default function (fetchMock, assert) { @@ -96,11 +96,11 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(3); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesMock1 }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: mySegmentsNicolas }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', { status: 200, body: { 'mySegments': [] } }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', { status: 200, body: { 'mySegments': [] } }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesMock1 }); + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: membershipsNicolas }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', { status: 200, body: { 'ms': {} } }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', { status: 200, body: { 'ms': {} } }); const splitio = SplitFactory({ ...baseConfig, @@ -146,18 +146,18 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(12 * 2 + 3); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=25', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=25', function () { return new Promise(res => { setTimeout(() => res({ status: 200, body: { ...splitChangesMock1, since: 25 }, headers: {} }), 200); }); // 400ms is how long it'll take to reply with Splits, no SDK_READY should be emitted before that. }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: mySegmentsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: membershipsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 700); }); // Second client gets segments after 700ms + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 700); }); // Second client gets segments after 700ms }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 1000); }); // Third client mySegments will come after 1s + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 1000); }); // Third client memberships will come after 1s }); fetchMock.postOnce(testUrls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(testUrls.events + '/testImpressions/count', 200); @@ -229,7 +229,7 @@ export default function (fetchMock, assert) { Promise.all([client3.destroy(), client2.destroy(), client.destroy()]).then(() => { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_2.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); - t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_2.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last Split and MySegments fetch */), 'lastUpdated is added and must correspond to the timestamp of the last successfully fetched Splits'); + t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_2.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last splitChanges and memberships fetch */), 'lastUpdated is added and must correspond to the timestamp of the last successfully fetched Splits'); }); }); t.true(Date.now() - startTime >= 1000, 'It should emit SDK_READY too but after syncing with the cloud.'); @@ -237,11 +237,11 @@ export default function (fetchMock, assert) { }); client3.on(client3.Event.SDK_READY_TIMED_OUT, () => { client3.ready().catch(() => { - t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); - t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); }); @@ -253,21 +253,21 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(12 * 2 + 5); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=25', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=25', function () { t.equal(localStorage.getItem('readyFromCache_3.SPLITIO.split.always_on'), alwaysOnSplitInverted, 'feature flags must not be cleaned from cache'); return new Promise(res => { setTimeout(() => res({ status: 200, body: { ...splitChangesMock1, since: 25 }, headers: {} }), 200); }); // 400ms is how long it'll take to reply with Splits, no SDK_READY should be emitted before that. }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: mySegmentsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: membershipsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 700); }); // Second client gets segments after 700ms + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 700); }); // Second client gets segments after 700ms }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 1000); }); // Third client mySegments will come after 1s + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 1000); }); // Third client memberships will come after 1s }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas4%40split.io', { 'mySegments': [] }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas4%40split.io', { 'ms': {} }); fetchMock.postOnce(testUrls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(testUrls.events + '/testImpressions/count', 200); @@ -346,7 +346,7 @@ export default function (fetchMock, assert) { Promise.all([client3.destroy(), client2.destroy(), client.destroy()]).then(() => { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_3.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); - t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_3.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last Split and MySegments fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); + t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_3.SPLITIO.splits.lastUpdated')), Date.now() - 800 /* 800 ms between last Split and memberships fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); }); }); t.true(Date.now() - startTime >= 1000, 'It should emit SDK_READY too but after syncing with the cloud.'); @@ -354,11 +354,11 @@ export default function (fetchMock, assert) { }); client3.on(client3.Event.SDK_READY_TIMED_OUT, () => { client3.ready().catch(() => { - t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); - t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'on', 'It should evaluate treatments with memberships data from cache.'); }); }); @@ -369,21 +369,21 @@ export default function (fetchMock, assert) { }; localStorage.clear(); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=-1', function () { + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=-1', function () { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_4.SPLITIO.hash'), expectedHashNullFilter, 'storage hash must not be changed'); t.equal(localStorage.length, 2, 'feature flags cache data must be cleaned from localStorage'); return { status: 200, body: splitChangesMock1 }; }); - fetchMock.get(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: splitChangesMock2 }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: mySegmentsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) + fetchMock.get(testUrls.sdk + '/splitChanges?s=1.2&since=1457552620999', { status: 200, body: splitChangesMock2 }); + fetchMock.get(testUrls.sdk + '/memberships/nicolas%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: membershipsNicolas, headers: {} }), 400); }); // First client gets segments before splits. No segment cache loading (yet) }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas2%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 700); }); // Second client gets segments after 700ms + fetchMock.get(testUrls.sdk + '/memberships/nicolas2%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 700); }); // Second client gets segments after 700ms }); - fetchMock.get(testUrls.sdk + '/mySegments/nicolas3%40split.io', function () { - return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'mySegments': [] }, headers: {} }), 1000); }); // Third client mySegments will come after 1s + fetchMock.get(testUrls.sdk + '/memberships/nicolas3%40split.io', function () { + return new Promise(res => { setTimeout(() => res({ status: 200, body: { 'ms': {} }, headers: {} }), 1000); }); // Third client memberships will come after 1s }); fetchMock.postOnce(testUrls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(testUrls.events + '/testImpressions/count', 200); @@ -456,7 +456,7 @@ export default function (fetchMock, assert) { Promise.all([client3.destroy(), client2.destroy(), client.destroy()]).then(() => { t.equal(localStorage.getItem('some_user_item'), 'user_item', 'user items at localStorage must not be changed'); t.equal(localStorage.getItem('readyFromCache_4.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); - t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_4.SPLITIO.splits.lastUpdated')), Date.now() - 1000 /* 1000 ms between last Split and MySegments fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); + t.true(nearlyEqual(parseInt(localStorage.getItem('readyFromCache_4.SPLITIO.splits.lastUpdated')), Date.now() - 1000 /* 1000 ms between last Split and memberships fetch */), 'lastUpdated must correspond to the timestamp of the last successfully fetched Splits'); t.end(); }); @@ -466,11 +466,11 @@ export default function (fetchMock, assert) { }); client3.on(client3.Event.SDK_READY_TIMED_OUT, () => { client3.ready().catch(() => { - t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'control', 'It should not evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should reject ready promise before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'control', 'It should not evaluate treatments with memberships data from cache.'); }); - t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing mySegments data with the cloud.'); - t.equal(client3.getTreatment('always_on'), 'control', 'It should evaluate treatments with mySegments data from cache.'); + t.true(Date.now() - startTime >= 850, 'It should emit SDK_READY_TIMED_OUT before syncing memberships data with the cloud.'); + t.equal(client3.getTreatment('always_on'), 'control', 'It should evaluate treatments with memberships data from cache.'); }); }); @@ -484,9 +484,8 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(7); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - // fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=1457552620999&names=p1__split', { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_5.SPLITIO.splits.till', 25); @@ -509,7 +508,7 @@ export default function (fetchMock, assert) { const manager = splitio.manager(); client.once(client.Event.SDK_READY_FROM_CACHE, () => { - t.fail('It should not emit SDK_READY_FROM_CACHE if cache is empty.'); + t.fail('It should not emit SDK_READY_FROM_CACHE because localStorage is cleaned and there isn\'t cached feature flags'); t.end(); }); @@ -536,8 +535,8 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(5); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&names=p1__split,p2__split', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); const splitio = SplitFactory({ ...baseConfig, @@ -579,15 +578,14 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(7); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=25&names=p2__split&prefixes=p1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: 25, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=25&names=p2__split&prefixes=p1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: 25, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); - const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.1' } }); + const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.2' } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_6.SPLITIO.splits.till', 25); localStorage.setItem('readyFromCache_6.SPLITIO.split.p1__split', JSON.stringify(splitDeclarations.p1__split)); localStorage.setItem('readyFromCache_6.SPLITIO.split.p2__split', JSON.stringify(splitDeclarations.p2__split)); - localStorage.setItem('readyFromCache_6.SPLITIO.splits.filterQuery', '&names=p2__split&prefixes=p1'); localStorage.setItem('readyFromCache_6.SPLITIO.hash', expectedHash); const splitio = SplitFactory({ @@ -630,10 +628,10 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(6); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&prefixes=p1,p2', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&prefixes=p1,p2', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); - const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&prefixes=p1,p2' }, flagSpecVersion: '1.1' } }); + const expectedHash = getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&prefixes=p1,p2' }, flagSpecVersion: '1.2' } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_7.SPLITIO.splits.till', 25); localStorage.setItem('readyFromCache_7.SPLITIO.split.p1__split', JSON.stringify(splitDeclarations.p1__split)); @@ -695,8 +693,8 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(7); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: { splits: [splitDeclarations.p1__split, splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_8.SPLITIO.splits.till', 25); @@ -748,14 +746,14 @@ export default function (fetchMock, assert) { localStorage.clear(); t.plan(6); - fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.1&since=-1&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2', { status: 200, body: { splits: [splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE - fetchMock.getOnce(testUrls.sdk + '/mySegments/nicolas%40split.io', { status: 200, body: { mySegments: [] } }); + fetchMock.getOnce(testUrls.sdk + '/splitChanges?s=1.2&since=-1&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2', { status: 200, body: { splits: [splitDeclarations.p2__split, splitDeclarations.p3__split], since: -1, till: 1457552620999 } }, { delay: 10 }); // short delay to let emit SDK_READY_FROM_CACHE + fetchMock.getOnce(testUrls.sdk + '/memberships/nicolas%40split.io', { status: 200, body: { ms: {} } }); localStorage.setItem('some_user_item', 'user_item'); localStorage.setItem('readyFromCache_9.SPLITIO.splits.till', 25); localStorage.setItem('readyFromCache_9.SPLITIO.split.p1__split', JSON.stringify(splitDeclarations.p1__split)); localStorage.setItem('readyFromCache_9.SPLITIO.split.p2__split', JSON.stringify(splitDeclarations.p2__split)); - localStorage.setItem('readyFromCache_9.SPLITIO.hash', getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.1' } })); + localStorage.setItem('readyFromCache_9.SPLITIO.hash', getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=p2__split&prefixes=p1' }, flagSpecVersion: '1.2' } })); const splitio = SplitFactory({ ...baseConfig, @@ -784,7 +782,7 @@ export default function (fetchMock, assert) { t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.splits.till'), '1457552620999', 'splits.till must correspond to the till of the last successfully fetched Splits'); t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.split.p2__split'), JSON.stringify(splitDeclarations.p2__split), 'feature flag declarations must be cached'); t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.split.p3__split'), JSON.stringify(splitDeclarations.p3__split), 'feature flag declarations must be cached'); - t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.hash'), getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2' }, flagSpecVersion: '1.1' } }), 'Storage hash must correspond to the split filter query and SDK key'); + t.equal(localStorage.getItem('readyFromCache_9.SPLITIO.hash'), getStorageHash({ ...baseConfig, sync: { __splitFiltersValidation: { queryString: '&names=no%20exist%20trim,no_exist,p3__split&prefixes=no%20exist%20trim,p2' }, flagSpecVersion: '1.2' } }), 'Storage hash must correspond to the split filter query and SDK key'); t.end(); }); }); diff --git a/src/__tests__/browserSuites/ready-promise.spec.js b/src/__tests__/browserSuites/ready-promise.spec.js index ffae5a4..0f420d6 100644 --- a/src/__tests__/browserSuites/ready-promise.spec.js +++ b/src/__tests__/browserSuites/ready-promise.spec.js @@ -13,7 +13,7 @@ const consoleSpy = { import { SplitFactory, WarnLogger } from '../../'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; const baseConfig = { core: { @@ -59,9 +59,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' in both attempts - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -107,9 +107,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -157,9 +157,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -226,16 +226,16 @@ export default function readyPromiseAssertions(fetchMock, assert) { config.scheduler.featuresRefreshRate) - config.startup.readyTimeout) + refreshTimeMillis; // /splitChanges takes longer than 'requestTimeoutBeforeReady' in both initial attempts - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: refreshTimeMillis }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: refreshTimeMillis }); // main client endpoint configured to fetch segments before request timeout - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=1457552620999', { splits: [], since: 1457552620999, till: 1457552620999 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=1457552620999', { splits: [], since: 1457552620999, till: 1457552620999 }); // shared client endpoint configured to fetch segments immediately, in order to emit SDK_READY as soon as splits arrives - fetchMock.get(config.urls.sdk + '/mySegments/nicolas%40split.io', mySegmentsFacundo); + fetchMock.get(config.urls.sdk + '/memberships/nicolas%40split.io', membershipsFacundo); // shared client endpoint configured to emit SDK_READY_TIMED_OUT - fetchMock.get(config.urls.sdk + '/mySegments/emiliano%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.readyTimeout) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/emiliano%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.readyTimeout) + 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -264,7 +264,7 @@ export default function readyPromiseAssertions(fetchMock, assert) { const timeoutClient = splitio.client('emiliano@split.io'); timeoutClient.ready().then(undefined, () => { // setting onRejected handler via `then` method - t.pass('### Shared client TIMED OUT - promise rejected since mySegments fetch took more time than readyTimeout'); + t.pass('### Shared client TIMED OUT - promise rejected since memberships fetch took more time than readyTimeout'); timeoutClient.ready().catch(() => { // setting onRejected handler via `catch` method t.pass('### Shared client TIMED OUT - promise keeps being rejected'); timeoutClient.on(timeoutClient.Event.SDK_READY, () => { @@ -310,8 +310,8 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -359,9 +359,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { } }; - // Both /splitChanges and /mySegments take less than 'requestTimeoutBeforeReady' - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + // Both /splitChanges and /memberships take less than 'requestTimeoutBeforeReady' + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -407,9 +407,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -433,7 +433,7 @@ export default function readyPromiseAssertions(fetchMock, assert) { }); }, 0); - // `ready`` is called in 0.15 seconds, when the promise is just rejected. Thus, the 'reject' callback is expected to be called immediately (0 seconds aprox). + // `ready` is called in 0.15 seconds, when the promise is just rejected. Thus, the 'reject' callback is expected to be called immediately (0 seconds aprox). setTimeout(() => { const tStart = Date.now(); manager.ready() @@ -496,11 +496,11 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' only for the first attempt - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/nicolas%40split.io', mySegmentsFacundo); - fetchMock.get(config.urls.sdk + '/mySegments/emiliano%40split.io', mySegmentsFacundo); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/nicolas%40split.io', membershipsFacundo); + fetchMock.get(config.urls.sdk + '/memberships/emiliano%40split.io', membershipsFacundo); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); @@ -581,9 +581,9 @@ export default function readyPromiseAssertions(fetchMock, assert) { }; // /splitChanges takes longer than 'requestTimeoutBeforeReady' - fetchMock.get(config.urls.sdk + '/splitChanges?s=1.1&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/facundo%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); - fetchMock.get(config.urls.sdk + '/mySegments/nicolas%40split.io', mySegmentsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/splitChanges?s=1.2&since=-1', splitChangesMock1, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) + 20 }); + fetchMock.get(config.urls.sdk + '/memberships/facundo%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); + fetchMock.get(config.urls.sdk + '/memberships/nicolas%40split.io', membershipsFacundo, { delay: fromSecondsToMillis(config.startup.requestTimeoutBeforeReady) - 20 }); fetchMock.postOnce(config.urls.events + '/testImpressions/bulk', 200); fetchMock.postOnce(config.urls.events + '/testImpressions/count', 200); diff --git a/src/__tests__/browserSuites/shared-instantiation.spec.js b/src/__tests__/browserSuites/shared-instantiation.spec.js index dddbe9f..9f9423a 100644 --- a/src/__tests__/browserSuites/shared-instantiation.spec.js +++ b/src/__tests__/browserSuites/shared-instantiation.spec.js @@ -14,9 +14,9 @@ const settings = settingsFactory({ * @param {boolean} sdkIgnoredTT whether the SDK ignores TT (i.e, clients without bound TT) or not (client with optional bound TT) */ export default function sharedInstantiationSuite(startWithTT, sdkIgnoresTT, fetchMock, assert) { - // mocking mySegments endpoints with delays for new clients - fetchMock.get(url(settings, '/mySegments/emiliano%2Fsplit.io'), { status: 200, body: { mySegments: [] } }, { delay: 100 }); - fetchMock.get(url(settings, '/mySegments/matias%25split.io'), { status: 200, body: { mySegments: [] } }, { delay: 200 }); + // mocking memberships endpoints with delays for new clients + fetchMock.get(url(settings, '/memberships/emiliano%2Fsplit.io'), { status: 200, body: { ms: {} } }, { delay: 100 }); + fetchMock.get(url(settings, '/memberships/matias%25split.io'), { status: 200, body: { ms: {} } }, { delay: 200 }); const factory = SplitFactory({ core: { diff --git a/src/__tests__/browserSuites/single-sync.spec.js b/src/__tests__/browserSuites/single-sync.spec.js index 19e46db..a2d7c16 100644 --- a/src/__tests__/browserSuites/single-sync.spec.js +++ b/src/__tests__/browserSuites/single-sync.spec.js @@ -4,7 +4,7 @@ import { url } from '../testUtils'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsNicolasMock2 from '../mocks/mysegments.nicolas@split.io.json'; +import membershipsNicolasMock2 from '../mocks/memberships.nicolas@split.io.json'; const baseUrls = { sdk: 'https://sdk.single-sync/api', @@ -36,7 +36,7 @@ const settings = settingsFactory(config); export default function singleSync(fetchMock, assert) { - fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function () { + fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), function () { assert.pass('first splitChanges fetch'); return { status: 200, body: splitChangesMock1 }; }); @@ -45,13 +45,13 @@ export default function singleSync(fetchMock, assert) { return { status: 200, body: splitChangesMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { - assert.pass('first mySegments fetch'); - return { status: 200, body: mySegmentsNicolasMock2 }; + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { + assert.pass('first memberships fetch'); + return { status: 200, body: membershipsNicolasMock2 }; }); - fetchMock.getOnce(url(settings, '/mySegments/nicolas%40split.io'), function () { - assert.fail('mySegments should not be called again'); - return { status: 200, body: mySegmentsNicolasMock2 }; + fetchMock.getOnce(url(settings, '/memberships/nicolas%40split.io'), function () { + assert.fail('memberships should not be called again'); + return { status: 200, body: membershipsNicolasMock2 }; }); let splitio, client = false; diff --git a/src/__tests__/browserSuites/telemetry.spec.js b/src/__tests__/browserSuites/telemetry.spec.js index a5fed4f..bef7e77 100644 --- a/src/__tests__/browserSuites/telemetry.spec.js +++ b/src/__tests__/browserSuites/telemetry.spec.js @@ -21,10 +21,10 @@ const config = { }; export default async function telemetryBrowserSuite(fetchMock, assert) { - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', 500); - fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesMock1 }); - fetchMock.getOnce(baseUrls.sdk + '/mySegments/user-key', 500); - fetchMock.getOnce(baseUrls.sdk + '/mySegments/user-key', { status: 200, body: { 'mySegments': [ 'one_segment'] } }); + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', 500); + fetchMock.getOnce(baseUrls.sdk + '/splitChanges?s=1.2&since=-1', { status: 200, body: splitChangesMock1 }); + fetchMock.getOnce(baseUrls.sdk + '/memberships/user-key', 500); + fetchMock.getOnce(baseUrls.sdk + '/memberships/user-key', { status: 200, body: { 'ms': { k: [{ n: 'one_segment' }] } } }); // We need to handle all requests properly fetchMock.postOnce(baseUrls.events + '/testImpressions/bulk', 200); @@ -54,13 +54,13 @@ export default async function telemetryBrowserSuite(fetchMock, assert) { const data = JSON.parse(opts.body); // Validate last successful sync - assert.deepEqual(Object.keys(data.lS), ['ms', 'sp', 'te'], 'Successful splitChanges, mySegments and metrics/config requests'); + assert.deepEqual(Object.keys(data.lS), ['ms', 'sp', 'te'], 'Successful splitChanges, memberships and metrics/config requests'); lastSync = data.lS; delete data.lS; // Validate http and method latencies const getLatencyCount = buckets => buckets ? buckets.reduce((accum, entry) => accum + entry, 0) : 0; assert.equal(getLatencyCount(data.hL.sp), 2, 'Two latency metrics for splitChanges GET request'); - assert.equal(getLatencyCount(data.hL.ms), 2, 'Two latency metrics for mySegments GET request'); + assert.equal(getLatencyCount(data.hL.ms), 2, 'Two latency metrics for memberships GET request'); assert.equal(getLatencyCount(data.hL.te), 1, 'One latency metric for telemetry config POST request'); assert.equal(getLatencyCount(data.mL.t), 2, 'Two latency metrics for getTreatment (one not ready usage'); assert.equal(getLatencyCount(data.mL.ts), 1, 'One latency metric for getTreatments'); @@ -71,7 +71,7 @@ export default async function telemetryBrowserSuite(fetchMock, assert) { // @TODO check if iDe value is correct assert.deepEqual(data, { - mE: {}, hE: { sp: { 500: 1 }, ms: { 500: 1 } }, tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 31, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: { sp: 0, ms: 0 } + mE: {}, hE: { sp: { 500: 1 }, ms: { 500: 1 } }, tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 32, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: {} }, 'metrics/usage JSON payload should be the expected'); finish.next(); @@ -91,7 +91,7 @@ export default async function telemetryBrowserSuite(fetchMock, assert) { // @TODO check if iDe value is correct assert.deepEqual(data, { mL: {}, mE: {}, hE: {}, hL: {}, // errors and latencies were popped - tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 31, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: { sp: 0, ms: 0 } + tR: 0, aR: 0, iQ: 4, iDe: 1, iDr: 0, spC: 32, seC: 1, skC: 1, eQ: 1, eD: 0, sE: [], t: [], ufs: {} }, '2nd metrics/usage JSON payload should be the expected'); return 200; }); diff --git a/src/__tests__/browserSuites/use-beacon-api.debug.spec.js b/src/__tests__/browserSuites/use-beacon-api.debug.spec.js index 2dede6e..7de576e 100644 --- a/src/__tests__/browserSuites/use-beacon-api.debug.spec.js +++ b/src/__tests__/browserSuites/use-beacon-api.debug.spec.js @@ -2,7 +2,7 @@ import sinon from 'sinon'; import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; import { url } from '../testUtils'; import { triggerPagehideEvent, triggerVisibilitychange } from '../testUtils/browser'; @@ -66,9 +66,9 @@ function beaconApiNotSendTestDebug(fetchMock, assert) { sendBeaconSpyDebug = sinon.spy(window.navigator, 'sendBeacon'); // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); // Init and run Split client const splitio = SplitFactory(config); diff --git a/src/__tests__/browserSuites/use-beacon-api.spec.js b/src/__tests__/browserSuites/use-beacon-api.spec.js index 9065cfd..bde4260 100644 --- a/src/__tests__/browserSuites/use-beacon-api.spec.js +++ b/src/__tests__/browserSuites/use-beacon-api.spec.js @@ -2,7 +2,7 @@ import sinon from 'sinon'; import { SplitFactory } from '../../'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; import { url } from '../testUtils'; import { OPTIMIZED } from '@splitsoftware/splitio-commons/src/utils/constants'; import { triggerPagehideEvent, triggerVisibilitychange } from '../testUtils/browser'; @@ -78,9 +78,9 @@ function beaconApiNotSendTest(fetchMock, assert) { sendBeaconSpy = sinon.spy(window.navigator, 'sendBeacon'); // Mocking this specific route to make sure we only get the items we want to test from the handlers. - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: { splits: [], since: 1457552620999, till: 1457552620999 } }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); // Init and run Split client const splitio = SplitFactory(config); diff --git a/src/__tests__/browserSuites/user-consent.spec.js b/src/__tests__/browserSuites/user-consent.spec.js index 024fcb4..ed1caba 100644 --- a/src/__tests__/browserSuites/user-consent.spec.js +++ b/src/__tests__/browserSuites/user-consent.spec.js @@ -72,7 +72,7 @@ function mockSubmittersRequests(fetchMock, assert, impressionFeature, eventTypeI export default function userConsent(fetchMock, t) { - // Validate trackers, submitters and browser listener behaviour on different consent status transitions + // Validate trackers, submitters and browser listener behavior on different consent status transitions t.test(async (assert) => { const sendBeaconSpy = sinon.spy(window.navigator, 'sendBeacon'); let expectedTrackedImpressions = 0; @@ -97,7 +97,7 @@ export default function userConsent(fetchMock, t) { ], ['on', 'on', 'on', 'on', 'on', 'on', 'on', 'on'], 'evaluating on SDK ready'); if (isTracking) expectedTrackedImpressions += 8; - // Trigger pagehide event to validate browser listener behaviour + // Trigger pagehide event to validate browser listener behavior // Beacon API is used only if user consent is GRANTED triggerPagehideEvent(); if (factory.UserConsent.getStatus() === factory.UserConsent.Status.GRANTED) { @@ -137,9 +137,9 @@ export default function userConsent(fetchMock, t) { assert.equal(trackedImpressions.length, expectedTrackedImpressions, 'Tracked impressions are the expected'); sendBeaconSpy.restore(); assert.end(); - }, 'Validate trackers, submitters and browser listener behaviour on different consent status transitions'); + }, 'Validate trackers, submitters and browser listener behavior on different consent status transitions'); - // Validate submitter's behaviour with full queues and with events first push window + // Validate submitter's behavior with full queues and with events first push window t.test(async (assert) => { const config = { ...baseConfig, @@ -182,6 +182,6 @@ export default function userConsent(fetchMock, t) { await client.destroy(); assert.end(); - }, 'Validate submitter\'s behaviour with full queues and with events first push window'); + }, 'Validate submitter\'s behavior with full queues and with events first push window'); } diff --git a/src/__tests__/consumer/browser_consumer.spec.js b/src/__tests__/consumer/browser_consumer.spec.js index 8c05edb..a24f53c 100644 --- a/src/__tests__/consumer/browser_consumer.spec.js +++ b/src/__tests__/consumer/browser_consumer.spec.js @@ -31,7 +31,7 @@ const config = { wrapper: wrapperInstance }), sync: { - impressionsMode: 'DEBUG' + impressionsMode: 'DEBUG', }, }; @@ -62,8 +62,8 @@ tape('Browser Consumer mode with pluggable storage', function (t) { /** Evaluation, track and manager methods before SDK_READY */ - assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational inmediatelly'); - assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational inmediatelly'); + assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational immediately'); + assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational immediately'); client.getTreatment('UT_IN_SEGMENT').then(treatment => assert.equal(treatment, 'control', 'Evaluations using pluggable storage returns a promise that resolves to control if initiated before SDK_READY')); otherClient.track('user', 'test.event', 18).then(result => assert.true(result, 'Track calls returns a promise on consumer mode, that resolves to true if the wrapper push operation success to queue the event')); @@ -206,8 +206,8 @@ tape('Browser Consumer mode with pluggable storage', function (t) { assert.equal(typeof getTreatmentResult.then, 'function', 'GetTreatment calls should always return a promise on Consumer mode.'); assert.equal(await getTreatmentResult, 'control', 'Evaluations using pluggable storage should be control if initiated before SDK_READY.'); - assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational inmediatelly'); - assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational inmediatelly'); + assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational immediately'); + assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational immediately'); const trackResult = otherClient.track('user', 'test.event', 18); assert.equal(typeof trackResult.then, 'function', 'Track calls should always return a promise on Consumer mode.'); @@ -338,8 +338,8 @@ tape('Browser Consumer mode with pluggable storage', function (t) { assert.equal(typeof getTreatmentResult.then, 'function', 'GetTreatment calls should always return a promise on Consumer mode.'); assert.equal(await getTreatmentResult, 'control', 'Evaluations using pluggable storage should be control if initiated before SDK_READY.'); - assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational inmediatelly'); - assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational inmediatelly'); + assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational immediately'); + assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational immediately'); const trackResult = otherClient.track('user', 'test.event', 18); assert.equal(typeof trackResult.then, 'function', 'Track calls should always return a promise on Consumer mode.'); diff --git a/src/__tests__/consumer/browser_consumer_partial.spec.js b/src/__tests__/consumer/browser_consumer_partial.spec.js index f2ce638..bca4eff 100644 --- a/src/__tests__/consumer/browser_consumer_partial.spec.js +++ b/src/__tests__/consumer/browser_consumer_partial.spec.js @@ -28,14 +28,11 @@ const config = { prefix: wrapperPrefix, wrapper: wrapperInstance }), - // sync: { - // impressionsMode: 'OPTIMIZED' - // }, urls: { sdk: 'https://sdk.baseurl/impressionsSuite', events: 'https://events.baseurl/impressionsSuite', telemetry: 'https://telemetry.baseurl/impressionsSuite' - } + }, }; tape('Browser Consumer Partial mode with pluggable storage', function (t) { @@ -88,8 +85,8 @@ tape('Browser Consumer Partial mode with pluggable storage', function (t) { /** Evaluation, track and manager methods before SDK_READY */ - assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational inmediatelly'); - assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational inmediatelly'); + assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational immediately'); + assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational immediately'); client.getTreatment('UT_IN_SEGMENT').then(treatment => assert.equal(treatment, 'control', 'Evaluations using pluggable storage returns a promise that resolves to control if initiated before SDK_READY')); otherClient.track('user', 'test.event', 18).then(result => assert.true(result, 'Track calls returns a promise on consumer mode, that resolves to true if the wrapper push operation success to queue the event')); @@ -250,8 +247,8 @@ tape('Browser Consumer Partial mode with pluggable storage', function (t) { assert.equal(typeof getTreatmentResult.then, 'function', 'GetTreatment calls should always return a promise on Consumer mode.'); assert.equal(await getTreatmentResult, 'control', 'Evaluations using pluggable storage should be control if initiated before SDK_READY.'); - assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational inmediatelly'); - assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational inmediatelly'); + assert.equal(client.__getStatus().isReadyFromCache, false, 'SDK in consumer mode is not operational immediately'); + assert.equal(client.__getStatus().isReady, false, 'SDK in consumer mode is not operational immediately'); const trackResult = otherClient.track('user', 'test.event', 18); assert.equal(typeof trackResult.then, 'function', 'Track calls should always return a promise on Consumer mode.'); diff --git a/src/__tests__/destroy/browser.spec.js b/src/__tests__/destroy/browser.spec.js index dec5b1e..04f17cb 100644 --- a/src/__tests__/destroy/browser.spec.js +++ b/src/__tests__/destroy/browser.spec.js @@ -8,7 +8,7 @@ import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitChanges.since.-1.till.1500492097547.json'; import splitChangesMock2 from '../mocks/splitChanges.since.1500492097547.json'; -import mySegmentsMock from '../mocks/mySegmentsEmpty.json'; +import membershipsMock from '../mocks/membershipsEmpty.json'; import impressionsMock from '../mocks/impressions.json'; const settings = settingsFactory({ @@ -18,11 +18,11 @@ const settings = settingsFactory({ streamingEnabled: false }); -fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); -fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1500492097547'), { status: 200, body: splitChangesMock2 }); -fetchMock.getOnce(url(settings, '/mySegments/ut1'), { status: 200, body: mySegmentsMock }); -fetchMock.getOnce(url(settings, '/mySegments/ut2'), { status: 200, body: mySegmentsMock }); -fetchMock.getOnce(url(settings, '/mySegments/ut3'), { status: 200, body: mySegmentsMock }); +fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); +fetchMock.getOnce(url(settings, '/splitChanges?s=1.2&since=1500492097547'), { status: 200, body: splitChangesMock2 }); +fetchMock.getOnce(url(settings, '/memberships/ut1'), { status: 200, body: membershipsMock }); +fetchMock.getOnce(url(settings, '/memberships/ut2'), { status: 200, body: membershipsMock }); +fetchMock.getOnce(url(settings, '/memberships/ut3'), { status: 200, body: membershipsMock }); fetchMock.postOnce(url(settings, '/v1/metrics/config'), 200); // 0.1% sample rate tape('SDK destroy for BrowserJS', async function (assert) { diff --git a/src/__tests__/errorCatching/browser.spec.js b/src/__tests__/errorCatching/browser.spec.js index 6016001..768f6c8 100644 --- a/src/__tests__/errorCatching/browser.spec.js +++ b/src/__tests__/errorCatching/browser.spec.js @@ -5,7 +5,7 @@ import includes from 'lodash/includes'; import fetchMock from '../testUtils/fetchMock'; import { url } from '../testUtils'; import splitChangesMock1 from '../mocks/splitChanges.since.-1.till.1500492097547.json'; -import mySegmentsMock from '../mocks/mySegmentsEmpty.json'; +import membershipsMock from '../mocks/membershipsEmpty.json'; import splitChangesMock2 from '../mocks/splitChanges.since.1500492097547.till.1500492297547.json'; import splitChangesMock3 from '../mocks/splitChanges.since.1500492297547.json'; import { SplitFactory, InLocalStorage } from '../../'; @@ -21,14 +21,14 @@ const settings = settingsFactory({ // prepare localstorage to emit SDK_READY_FROM_CACHE localStorage.clear(); localStorage.setItem('SPLITIO.splits.till', 25); -localStorage.setItem('SPLITIO.hash', getStorageHash({ core: { authorizationKey: '' }, sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.1' } })); +localStorage.setItem('SPLITIO.hash', getStorageHash({ core: { authorizationKey: '' }, sync: { __splitFiltersValidation: { queryString: null }, flagSpecVersion: '1.2' } })); -fetchMock.get(url(settings, '/splitChanges?s=1.1&since=25'), function () { +fetchMock.get(url(settings, '/splitChanges?s=1.2&since=25'), function () { return new Promise((res) => { setTimeout(() => res({ status: 200, body: splitChangesMock1 }), 1000); }); }); -fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1500492097547'), { status: 200, body: splitChangesMock2 }); -fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1500492297547'), { status: 200, body: splitChangesMock3 }); -fetchMock.get(url(settings, '/mySegments/nico%40split.io'), { status: 200, body: mySegmentsMock }); +fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1500492097547'), { status: 200, body: splitChangesMock2 }); +fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1500492297547'), { status: 200, body: splitChangesMock3 }); +fetchMock.get(url(settings, '/memberships/nico%40split.io'), { status: 200, body: membershipsMock }); fetchMock.post('*', 200); const assertionsPlanned = 4; diff --git a/src/__tests__/gaIntegration/both-integrations.spec.js b/src/__tests__/gaIntegration/both-integrations.spec.js deleted file mode 100644 index 884a411..0000000 --- a/src/__tests__/gaIntegration/both-integrations.spec.js +++ /dev/null @@ -1,134 +0,0 @@ -import { SplitFactory, GoogleAnalyticsToSplit, SplitToGoogleAnalytics } from '../../'; -import { settingsFactory } from '../../settings'; -import { gaSpy, gaTag } from './gaTestUtils'; -import includes from 'lodash/includes'; -import { DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; -import { url } from '../testUtils'; - -function countImpressions(parsedImpressionsBulkPayload) { - return parsedImpressionsBulkPayload - .reduce((accumulator, currentValue) => { return accumulator + currentValue.i.length; }, 0); -} - -const config = { - core: { - key: 'facundo@split.io', - trafficType: 'user', // Traffic type is not bound to default client in JS Browser SDK, but it is used as identity in GaToSplit integration - }, - integrations: [GoogleAnalyticsToSplit(), SplitToGoogleAnalytics()], - streamingEnabled: false, - sync: { - impressionsMode: DEBUG, - } -}; -const settings = settingsFactory(config); - -export default function (fetchMock, assert) { - - let client; - - // test default behavior of both integrations - assert.test(t => { - const customHits = [{ hitType: 'pageview' }, { hitType: 'event' }]; - - /* [trafficType, eventType] */ - const splitTrackParams = [ ['user', 'some_event'], ['user', 'other_event'], ['user', 'another_event']]; - /* [splitName] */ - const splitGetTreatmentParams = [['hierarchical_splits_test']]; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - const totalHits = customHits.length + splitTrackParams.length + splitGetTreatmentParams.length; - - t.equal(window.gaSpy.getHits().length, totalHits, 'Total hits'); - setTimeout(() => { - client.destroy(); - t.end(); - }, 0); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - // we can assert payload and ga hits, once ga is ready and after `SplitToGa.queue`, that is timeout wrapped, make to the queue stack. - window.ga(() => { - setTimeout(() => { - try { - const resp = JSON.parse(opts.body); - const numberOfSentImpressions = countImpressions(resp); - const sentImpressionHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(numberOfSentImpressions, splitGetTreatmentParams.length, 'Number of impressions'); - t.equal(sentImpressionHits.length, splitGetTreatmentParams.length, `Number of sent impression hits must be equal to the number of impressions (${splitGetTreatmentParams.length})`); - - finish.next(); - } catch (err) { - console.error(err); - } - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - window.ga(() => { - setTimeout(() => { - try { - const sentEvents = JSON.parse(opts.body); - const sentEventsFromSplitToGa = sentEvents.filter(event => { - return event.properties && event.properties.eventCategory && includes(event.properties.eventCategory, 'split'); - }); - - t.equal(sentEvents.length, splitTrackParams.length + customHits.length, 'Number of sent events is equal to custom events plus hits tracked as events'); - t.equal(sentEventsFromSplitToGa.length, 0, 'GA hits comming from Split-to-GA integration must not be tracked again as Split events'); - - const sentHitsNoSplitData = window.gaSpy.getHits().filter(hit => !hit.eventCategory || !includes(hit.eventCategory, 'split')); - const sentHitsSplitEvents = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-event'); - - t.equal(sentHitsNoSplitData.length, customHits.length, 'Number of custom hits'); - t.equal(sentHitsSplitEvents.length, splitTrackParams.length, 'Number of Split event hits'); - finish.next(); - } catch (err) { - console.error(err); - } - }); - }); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('require', 'splitTracker'); - customHits.forEach(hit => { - window.ga('send', hit); - }); - - const factory = SplitFactory({ - ...config, - startup: { - eventsFirstPushWindow: 0, - }, - scheduler: { - impressionsRefreshRate: 1, - // @TODO eventsPushRate is too high, but using eventsQueueSize don't let us assert `filterSplitToGaHits` - eventsPushRate: 10, - // eventsQueueSize: splitTrackParams.length + customHits.length, - }, - }); - client = factory.client(); - - client.ready().then(() => { - splitTrackParams.forEach(trackParams => { - client.track.apply(client, trackParams); - }); - splitGetTreatmentParams.forEach(getTreatmentParams => { - client.getTreatment.apply(client, getTreatmentParams); - }); - }); - }); - -} diff --git a/src/__tests__/gaIntegration/browser.spec.js b/src/__tests__/gaIntegration/browser.spec.js deleted file mode 100644 index c97a292..0000000 --- a/src/__tests__/gaIntegration/browser.spec.js +++ /dev/null @@ -1,31 +0,0 @@ -import tape from 'tape-catch'; -import fetchMock from '../testUtils/fetchMock'; -import { url } from '../testUtils'; -import gaToSplitSuite from './ga-to-split.spec'; -import splitToGaSuite from './split-to-ga.spec'; -import bothIntegrationsSuite from './both-integrations.spec'; - -import { settingsFactory } from '../../settings'; - -import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; - -const settings = settingsFactory({ - core: { - key: 'facundo@split.io' - } -}); - -tape('## E2E CI Tests ##', function (assert) { - - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); - fetchMock.post(/\/v1\/metrics/, 200); // 0.1% sample rate - - /* Validate GA integration */ - assert.test('E2E / GA-to-Split', gaToSplitSuite.bind(null, fetchMock)); - assert.test('E2E / Split-to-GA', splitToGaSuite.bind(null, fetchMock)); - assert.test('E2E / Both GA integrations', bothIntegrationsSuite.bind(null, fetchMock)); - - assert.end(); -}); diff --git a/src/__tests__/gaIntegration/ga-to-split.spec.js b/src/__tests__/gaIntegration/ga-to-split.spec.js deleted file mode 100644 index e26c441..0000000 --- a/src/__tests__/gaIntegration/ga-to-split.spec.js +++ /dev/null @@ -1,455 +0,0 @@ -import sinon from 'sinon'; -import { SplitFactory, GoogleAnalyticsToSplit, DebugLogger } from '../../'; -import { settingsFactory } from '../../settings'; -import { gaSpy, gaTag, addGaTag, removeGaTag } from './gaTestUtils'; -import { url } from '../testUtils'; - -const config = { - core: { - key: 'facundo@split.io', - trafficType: 'user', // Traffic type is not bound to default client in JS Browser SDK, but it is used as identity in GaToSplit integration - }, - integrations: [GoogleAnalyticsToSplit()], - startup: { - eventsFirstPushWindow: 0.2, - }, - streamingEnabled: false, - debug: DebugLogger() -}; - -const settings = settingsFactory(config); - -export default function (fetchMock, assert) { - - let client; - - // test default behavior on default tracker - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits(); - - t.equal(resp.length, sentHits.length, `Number of sent hits must be equal to sent events (${resp.length})`); - t.equal(resp[0].key, settings.core.key, 'Event key is same that SDK config key'); - t.equal(resp[0].trafficTypeName, settings.core.trafficType, 'Event trafficTypeName is same that SDK config key'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('require', 'splitTracker'); - window.ga('send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - - }); - - // test default behavior on named tracker, tracking N events, and GA in a different global variable - assert.test(t => { - const numberOfCustomEvents = 5; - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits('myTracker'); - - t.equal(resp.length, sentHits.length, `Number of sent hits must be equal to sent events (${resp.length})`); - t.equal(resp[0].key, settings.core.key, 'Event key is same that SDK config key'); - t.equal(resp[0].trafficTypeName, settings.core.trafficType, 'Event trafficTypeName is same that SDK config key'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag('other_location_for_ga'); - - window.other_location_for_ga('create', 'UA-00000001-1', 'example1.com', 'myTracker', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker']); - - const factory = SplitFactory(config); - client = factory.client(); - - window.other_location_for_ga('myTracker.require', 'splitTracker'); - // this second 'require' is not applied (does not overwrite previous command) - window.other_location_for_ga('myTracker.require', 'splitTracker', { mapper: function () { throw 'error'; } }); - - for (let i = 0; i < numberOfCustomEvents; i++) - window.other_location_for_ga('myTracker.send', 'pageview'); - - }); - - // test error: no TT in SDK config - assert.test(t => { - const numberOfCustomEvents = 5; - - gaTag(); - - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - const logSpy = sinon.spy(console, 'log'); - - const factory = SplitFactory({ - ...config, - core: { key: config.core.key } - }); - - window.ga('require', 'splitTracker'); - for (let i = 0; i < numberOfCustomEvents; i++) - window.ga('send', 'pageview'); - - // We must wait until ga is ready to get SplitTracker required and invoked, and to assert the test - window.ga(() => { - t.ok(logSpy.calledWith('[WARN] splitio => ga-to-split: No valid identities were provided. Please check that you are passing a valid list of identities or providing a traffic type at the SDK configuration.')); - t.equal(window.gaSpy.getHits().length, numberOfCustomEvents, `Number of sent hits must be equal to ${numberOfCustomEvents}`); - - logSpy.restore(); - t.end(); - }); - - factory.client().destroy(); - - }); - - // test default behavior, providing a list of identities as SDK options - assert.test(t => { - const numberOfCustomEvents = 3; - const identities = [{ key: 'user1', trafficType: 'user' }, { key: 'user2', trafficType: 'user' }]; - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits('myTracker3'); - - t.equal(sentHits.length, numberOfCustomEvents, `Number of sent hits must be equal to sent custom events (${numberOfCustomEvents})`); - t.equal(resp.length, numberOfCustomEvents * identities.length, 'The number of sent events must be equal to the number of sent hits multiply by the number of identities'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000003-1', 'example3.com', 'myTracker3', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker3']); - - const factory = SplitFactory({ - ...config, - core: { key: config.core.key }, - integrations: [GoogleAnalyticsToSplit({ - identities: identities, - })], - }); - client = factory.client(); - - window.ga('myTracker3.require', 'splitTracker'); - for (let i = 0; i < numberOfCustomEvents; i++) - window.ga('myTracker3.send', 'pageview'); - - }); - - - // test default behavior in multiple trackers, providing a list of identities in plugin options for one tracker and in sdk options for another - assert.test(t => { - const identitiesPluginOpts = [{ key: 'user1', trafficType: 'user' }, { key: 'user2', trafficType: 'user' }]; - const identitiesSdkOpts = [{ key: 'user3', trafficType: 'user' }]; - const gaSendIterations = 3; - const expectedNumberOfSplitEvents = gaSendIterations * (identitiesPluginOpts.length + identitiesSdkOpts.length); - - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - t.equal(resp.length, expectedNumberOfSplitEvents, 'The number of sent Split events must be equal to the number of sent hits multiply by the number of identities'); - - const sentHitsTracker4 = window.gaSpy.getHits('myTracker4'); - const sentHitsTracker5 = window.gaSpy.getHits('myTracker5'); - - t.equal(sentHitsTracker4.length, gaSendIterations, `Number of sent hits must be equal to the times 'send' command was invoked (${gaSendIterations})`); - t.equal(sentHitsTracker5.length, gaSendIterations, `Number of sent hits must be equal to the times 'send' command was invoked (${gaSendIterations})`); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000004-1', 'example4.com', 'myTracker4', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000005-1', 'example5.com', 'myTracker5', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker4', 'myTracker5']); - - const factory = SplitFactory({ - ...config, - core: { key: config.core.key }, - integrations: [GoogleAnalyticsToSplit({ - identities: identitiesSdkOpts, - })], - }); - client = factory.client(); - - window.ga('myTracker4.require', 'splitTracker', { identities: identitiesPluginOpts }); - window.ga('myTracker5.require', 'splitTracker'); - - for (let i = 0; i < gaSendIterations; i++) { - window.ga('myTracker4.send', 'pageview'); - window.ga('myTracker5.send', 'event', 'mycategory', 'myaction'); - } - - }); - - // test custom filter and mapper in multiple trackers, passed as plugin options for one tracker and as sdk options for another - assert.test(t => { - const gaSendIterations = 3; - const prefixPluginOpts = 'plugin'; - const prefixSdkOpts = 'sdk'; - - let client; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - t.equal(resp.length, gaSendIterations * 2, 'The number of sent Split events must be equal to the number of no filtered sent hits'); - t.equal(resp.filter(event => event.eventTypeId === prefixSdkOpts + '.mapperSdkOpts').length, gaSendIterations, 'Custom Split events'); - t.equal(resp.filter(event => event.eventTypeId === prefixPluginOpts + '.mapperPluginOpts').length, gaSendIterations, 'Custom Split events'); - - const sentHitsTracker4 = window.gaSpy.getHits('myTracker4'); - const sentHitsTracker5 = window.gaSpy.getHits('myTracker5'); - - t.equal(sentHitsTracker4.length, gaSendIterations * 2, 'Number of sent hits must be equal to the times `send` command was invoked'); - t.equal(sentHitsTracker5.length, gaSendIterations * 2, 'Number of sent hits must be equal to the times `send` command was invoked'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000004-1', 'example4.com', 'myTracker4', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000005-1', 'example5.com', 'myTracker5', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker4', 'myTracker5']); - - const factory = SplitFactory({ - ...config, - integrations: [GoogleAnalyticsToSplit({ - filter: model => model.get('hitType') === 'pageview', // accepts only pageviews - mapper: () => ({ eventTypeId: 'mapperSdkOpts' }), // return a fixed event instance - prefix: prefixSdkOpts, - })], - }); - client = factory.client(); - - window.ga('myTracker4.require', 'splitTracker', { - filter: model => model.get('hitType') === 'event', // accepts only events - mapper: (model, defaultEvent) => ({ ...defaultEvent, eventTypeId: 'mapperPluginOpts' }), // updates the eventTypeId of default event - prefix: prefixPluginOpts, - }); - window.ga('myTracker5.require', 'splitTracker'); - - for (let i = 0; i < gaSendIterations; i++) { - window.ga('myTracker4.send', 'pageview'); - window.ga('myTracker5.send', 'pageview'); - window.ga('myTracker4.send', 'event', 'mycategory', 'myaction'); - window.ga('myTracker5.send', 'event', 'mycategory', 'myaction'); - } - - }); - - // exception in custom mapper or invalid mapper result must not block sending hits - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - t.equal(resp.length, 1, 'only a custom event is sent. no events associated to ga hit'); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000001-1', 'example1.com', 'myTracker', { siteSpeedSampleRate: 0 }); - - gaSpy(['t0', 'myTracker']); - - window.ga('require', 'splitTracker', { mapper: function () { throw 'error'; } }); - // this second 'require' is not applied (it does not overwrite previous command) - window.ga('require', 'splitTracker'); - - window.ga('myTracker.require', 'splitTracker', { mapper: function () { return { value: 'invalid value' }; } }); - - const logSpy = sinon.spy(console, 'log'); - - window.ga('send', 'pageview'); - window.ga('myTracker.send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - client.track('user', 'some_event'); - - setTimeout(() => { - const sentHitsT0 = window.gaSpy.getHits('t0'); - const sentHitsMyTracker = window.gaSpy.getHits('myTracker'); - t.equal(sentHitsT0.length, 1, 'Hits must be sent even if a custom mapper throw an exception'); - t.equal(sentHitsMyTracker.length, 1, 'Hits must be sent even if a custom mapper return an invalid event instance'); - t.ok(logSpy.calledWith('[ERROR] splitio => ga-to-split:mapper: value must be a finite number.')); - client.destroy(); - logSpy.restore(); - t.end(); - }); - - }); - - // test default behavior on default tracker: Split ready before GA init, and keep sending hits after Split destroyed - assert.test(t => { - const hits = [{ hitType: 'pageview' }, { hitType: 'event' }]; - const hitsAfterDestroyed = [{ hitType: 'screenview' }]; - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits(); - - t.equal(resp.length, sentHits.length, `Number of sent hits must be equal to sent events (${hits.length})`); - t.equal(resp.length, hits.length, `Number of sent hits must be equal to sent events (${hits.length})`); - - setTimeout(() => { - client.destroy().then(() => { - hitsAfterDestroyed.forEach(hit => window.ga('send', hit)); - setTimeout(() => { - t.equal(sentHits.length, hits.length + hitsAfterDestroyed.length, 'sending hits must not be bloqued if Split SDK is destroyed'); - t.end(); - }); - }); - }); - return 200; - }); - - removeGaTag(); - - const factory = SplitFactory({ - ...config, - startup: { - eventsFirstPushWindow: 1000, - }, - scheduler: { - eventsQueueSize: hits.length, - } - }); - client = factory.client(); - - client.ready().then(() => { - addGaTag(); - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - gaSpy(); - - window.ga('require', 'splitTracker'); - hits.forEach(hit => window.ga('send', hit)); - - }); - }); - - // test `hits` flag - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHits = window.gaSpy.getHits(); - - t.equal(resp.filter(event => event.eventTypeId === 'ga.pageview').length, 0, 'No events associated to GA hits must be sent'); - t.equal(resp.filter(event => event.eventTypeId === 'some_event').length, 1, 'Tracked events must be sent to Split'); - t.equal(sentHits.length, 1, 'Hits must be sent to GA'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('require', 'splitTracker', { hits: false }); - window.ga('send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - client.track('user', 'some_event'); - - }); - - // test 'autoRequire' script placed right after GA script tag. - // We get same result if it is placed right before, and also applies for Universal Analytics configured with GTM and gtag.js tags. - // If it is executed asynchronously, trackers creation might be missed. - assert.test(t => { - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - const resp = JSON.parse(opts.body); - const sentHitsTracker1 = window.gaSpy.getHits('tracker1'); - const sentHitsTracker2 = window.gaSpy.getHits('tracker2'); - - t.equal(resp.length, sentHitsTracker1.length + sentHitsTracker2.length, 'All hits of all trackers are captured as Split events'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - return 200; - }); - - gaTag(); - - // Run autoRequire iife. `require` cannot be used because it is not polyfilled by "rollup-plugin-node-polyfills" - // eslint-disable-next-line - (function(n,t,e){n[e]=n[e]||t;n[t]=n[t]||function(){n[t].q.push(arguments)};n[t].q=n[t].q||[];var r={};function i(n){return typeof n==="object"&&typeof n.name==="string"&&n.name}function o(e){if(e&&e[0]==="create"){var o=i(e[1])||i(e[2])||i(e[3])||(typeof e[3]==="string"?e[3]:undefined);if(!r[o]){r[o]=true;n[t]((o?o+".":"")+"require","splitTracker")}}}n[t].q.forEach(o);var u=n[t].q.push;n[t].q.push=function(n){var t=u.apply(this,arguments);o(n);return t}})(window,"ga","GoogleAnalyticsObject"); - - window.ga('create', 'UA-00000000-1', { name: 'tracker1', cookieDomain: 'auto', siteSpeedSampleRate: 0 }); - - gaSpy(['tracker1']); - - window.ga('tracker1.send', 'event', 'mycategory', 'myaction1'); // Captured - - const factory = SplitFactory({ - ...config, - integrations: [GoogleAnalyticsToSplit({ - autoRequire: true - })], - }); - - window.ga('tracker1.send', 'event', 'mycategory', 'myaction2'); // Captured - window.ga('create', 'UA-00000001-1', 'auto', 'tracker2', { siteSpeedSampleRate: 0 }); // New tracker - gaSpy(['tracker2'], false); - window.ga('tracker2.send', 'event', 'mycategory', 'myaction3'); // Captured - - client = factory.client(); - - }); - -} diff --git a/src/__tests__/gaIntegration/gaTestUtils.js b/src/__tests__/gaIntegration/gaTestUtils.js deleted file mode 100644 index e0aac43..0000000 --- a/src/__tests__/gaIntegration/gaTestUtils.js +++ /dev/null @@ -1,94 +0,0 @@ -export const DEFAULT_TRACKER = 't0'; - -const HIT_FIELDS = ['hitType', 'nonInteraction']; -const EVENT_FIELDS = ['eventCategory', 'eventAction', 'eventLabel', 'eventValue']; -const FIELDS = [...HIT_FIELDS, ...EVENT_FIELDS]; // List of hit fields to spy, which are the ones set by the default SplitToGa mapper. - -let hits = {}; - -/** - * Spy ga hits per tracker. - * - * @param {string[]} trackerNames names of the trackers to spy. If not provided, it spies the default tracker. i.e., `gaSpy()` is equivalent to `gaSpy(['t0'])`. - * @param {boolean} resetSpy true to reset the list of captured hits. - * - * @see {@link https://developers.google.com/analytics/devguides/collection/analyticsjs/field-reference} - */ -export function gaSpy(trackerNames = [DEFAULT_TRACKER], resetSpy = true) { - - if (resetSpy) hits = {}; - - // access ga via its gaAlias, accounting for the possibility that the global command queue - // has been renamed or not yet defined (analytics.js mutates window[gaAlias] reference) - const gaAlias = window['GoogleAnalyticsObject'] || 'ga'; - - if (typeof window[gaAlias] === 'function') { - window[gaAlias](function () { - // We try-catch the following code, since errors are catched by `ga` and thus cannot be traced for debugging. - try { - trackerNames.forEach(trackerName => { - const trackerToSniff = window[gaAlias].getByName(trackerName); - hits[trackerName] = []; - const originalSendHitTask = trackerToSniff.get('sendHitTask'); - trackerToSniff.set('sendHitTask', function (model) { - originalSendHitTask(model); - const hit = {}; - FIELDS.forEach(fieldName => { - hit[fieldName] = model.get(fieldName); - }); - hits[trackerName].push(hit); - }); - }); - } catch (err) { - console.log(err); - } - }); - } else { - console.error('GA command queue was not found'); - } - - window.gaSpy = { - // getHits may return `undefined` if `ga` is not ready or `trackerName` is not in the list of `trackerNames` - getHits: function (trackerName = DEFAULT_TRACKER) { - const trackerHits = hits[trackerName]; - return trackerHits; - } - }; - - return window.gaSpy; -} - -/** - * Add Google Analytics tag, removing previous one if exists. - * - * @see {@link https://developers.google.com/analytics/devguides/collection/analyticsjs#the_google_analytics_tag} - */ -export function gaTag(gaAlias = 'ga') { - removeGaTag(gaAlias); - addGaTag(gaAlias); -} - -/** - * Add Google Analytics tag. - */ -export function addGaTag(gaAlias = 'ga') { - (function (i, s, o, g, r, a, m) { - i['GoogleAnalyticsObject'] = r; - i[r] = i[r] || function () { - (i[r].q = i[r].q || []).push(arguments); - }, - i[r].l = 1 * new Date(); - a = s.createElement(o), - m = s.getElementsByTagName(o)[0]; - a.async = 1; - a.src = g; - m.parentNode.insertBefore(a, m); - })(window, document, 'script', 'https://www.google-analytics.com/analytics.js', gaAlias); -} - -/** - * Remove Google Analytics command queue. - */ -export function removeGaTag(gaAlias = 'ga') { - window[window['GoogleAnalyticsObject'] || gaAlias] = undefined; -} diff --git a/src/__tests__/gaIntegration/split-to-ga.spec.js b/src/__tests__/gaIntegration/split-to-ga.spec.js deleted file mode 100644 index e6b43d5..0000000 --- a/src/__tests__/gaIntegration/split-to-ga.spec.js +++ /dev/null @@ -1,427 +0,0 @@ -import sinon from 'sinon'; -import { SplitFactory, SplitToGoogleAnalytics, DebugLogger } from '../../'; -import { settingsFactory } from '../../settings'; -import { gaSpy, gaTag, removeGaTag, addGaTag } from './gaTestUtils'; -import { SPLIT_IMPRESSION, SPLIT_EVENT, DEBUG } from '@splitsoftware/splitio-commons/src/utils/constants'; -import { url } from '../testUtils'; - -function countImpressions(parsedImpressionsBulkPayload) { - return parsedImpressionsBulkPayload - .reduce((accumulator, currentValue) => { return accumulator + currentValue.i.length; }, 0); -} - -const config = { - core: { - authorizationKey: '', - key: 'facundo@split.io', - }, - integrations: [SplitToGoogleAnalytics()], - scheduler: { - impressionsRefreshRate: 0.2, - eventsQueueSize: 1, - }, - streamingEnabled: false, - sync: { - impressionsMode: DEBUG, - } -}; - -const settings = settingsFactory(config); - -export default function (fetchMock, assert) { - - // test default behavior - assert.test(t => { - - let client; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - t.equal(window.gaSpy.getHits().length, 3, 'Total hits are 3: pageview, split event and impression'); - setTimeout(() => { - client.destroy(); - t.end(); - }); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - // we can assert payload and ga hits, once ga is ready and after `SplitToGa.queue`, that is timeout wrapped, make to the queue stack. - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentImpressionHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(sentImpressions, 1, 'Number of impressions'); - t.equal(sentImpressions, sentImpressionHits.length, `Number of sent impression hits must be equal to the number of impressions (${sentImpressions})`); - - finish.next(); - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentEvents = resp.length; - const sentEventHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-event'); - - t.equal(sentEvents, 1, 'Number of events'); - t.equal(sentEvents, sentEventHits.length, `Number of sent event hits must be equal to sent events: (${sentEvents})`); - - finish.next(); - }); - }, 10); - return 200; - }); - - gaTag(); - - // siteSpeedSampleRate set to 0 to never send a site speed timing hit - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - window.ga('send', 'pageview'); - - const factory = SplitFactory(config); - client = factory.client(); - client.ready().then(() => { - client.track('user', 'some_event'); - client.getTreatment('hierarchical_splits_test'); - }); - - }); - - // test default behavior in multiple trackers, with multiple impressions, and GA in a different global variable - assert.test(t => { - - let client; - const numOfEvaluations = 4; - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.other_location_for_ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentHitsTracker1 = window.gaSpy.getHits('myTracker1'); - const sentHitsTracker2 = window.gaSpy.getHits('myTracker2'); - - t.equal(sentImpressions, numOfEvaluations, 'Number of impressions equals the number of evaluations'); - t.equal(sentImpressions, sentHitsTracker1.length, 'Number of sent hits must be equal to the number of impressions'); - t.equal(sentImpressions, sentHitsTracker2.length, 'Number of sent hits must be equal to the number of impressions'); - - setTimeout(() => { - client.destroy(); - t.end(); - }); - }); - }); - return 200; - }); - - gaTag('other_location_for_ga'); - - window.other_location_for_ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - window.other_location_for_ga('create', 'UA-00000001-1', 'example1.com', 'myTracker1', { siteSpeedSampleRate: 0 }); - window.other_location_for_ga('create', 'UA-00000002-1', 'example2.com', 'myTracker2', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker1', 'myTracker2']); - - const factory = SplitFactory({ - ...config, - core: { - ...config.core, - authorizationKey: '', - }, - integrations: [SplitToGoogleAnalytics({ - trackerNames: ['myTracker1'], - }), SplitToGoogleAnalytics({ - trackerNames: ['myTracker2'], - })], - }); - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) - client.getTreatment('split_with_config'); - }); - - }); - - // test several SplitToGa integration items, with custom filter and mapper - assert.test(t => { - - let client; - const numOfEvaluations = 4; - const numOfEvents = 3; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - setTimeout(() => { - client.destroy(); - t.end(); - }); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentImpressionHitsTracker3 = window.gaSpy.getHits('myTracker3').filter(hit => hit.eventCategory === 'split-impression'); - const sentImpressionHitsTracker4 = window.gaSpy.getHits('myTracker4').filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(sentImpressionHitsTracker3.length, sentImpressions, 'For tracker3, no impressions are filtered'); - t.equal(sentImpressionHitsTracker4.length, 0, 'For tracker4, all impressions are filtered'); - - finish.next(); - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentEvents = resp.length; - const sentEventHitsTracker3 = window.gaSpy.getHits('myTracker3').filter(hit => hit.eventCategory === 'mycategory'); - const sentEventHitsTracker4 = window.gaSpy.getHits('myTracker4').filter(hit => hit.eventCategory === 'mycategory'); - - t.equal(sentEventHitsTracker3.length, 0, 'For tracker3, all events are filtered'); - t.equal(sentEventHitsTracker4.length, sentEvents, 'For tracker4, no events are filtered'); - - finish.next(); - }); - }, 10); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000003-1', 'example3.com', 'myTracker3', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000004-1', 'example4.com', 'myTracker4', { siteSpeedSampleRate: 0 }); - - gaSpy(['myTracker3', 'myTracker4']); - - const onlyImpressionsFilter = ({ type }) => type === SPLIT_IMPRESSION; - const onlyEventsMapper = function ({ payload, type }) { - return type === SPLIT_EVENT ? - { hitType: 'event', eventCategory: 'mycategory', eventAction: payload.eventTypeId } : - undefined; - }; - const factory = SplitFactory({ - ...config, - core: { - ...config.core, - authorizationKey: '', - }, - scheduler: { - impressionsRefreshRate: 0.2, - eventsQueueSize: numOfEvents, - }, - integrations: [SplitToGoogleAnalytics({ - trackerNames: ['myTracker3'], - filter: onlyImpressionsFilter, - }), SplitToGoogleAnalytics({ - trackerNames: ['myTracker4'], - mapper: onlyEventsMapper, - })], - }); - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) { - client.getTreatment('split_with_config'); - } - for (let i = 0; i < numOfEvents; i++) { - client.track('user', 'eventType'); - } - }); - - }); - - // exception in custom mapper or invalid mapper result must not send a hit - assert.test(t => { - - const logSpy = sinon.spy(console, 'log'); - const error = 'some error'; - let client; - const numOfEvaluations = 1; - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentHitsDefault = window.gaSpy.getHits(); - const sentHitsTracker1 = window.gaSpy.getHits('myTracker1'); - const sentHitsTracker2 = window.gaSpy.getHits('myTracker2'); - - t.equal(sentImpressions, numOfEvaluations, 'Number of impressions equals the number of evaluations'); - t.equal(sentHitsDefault.length, 0, 'No hits sent if custom mapper throws error'); - t.equal(sentHitsTracker1.length, 0, 'No hits sent if custom mapper returns invalid result'); - t.equal(sentHitsTracker2.length, numOfEvaluations, 'Number of sent hits must be equal to the number of impressions'); - - setTimeout(() => { - t.ok(logSpy.calledWith(`[WARN] splitio => split-to-ga: queue method threw: ${error}. No hit was sent.`)); - t.ok(logSpy.calledWith('[WARN] splitio => split-to-ga: your custom mapper returned an invalid FieldsObject instance. It must be an object with at least a `hitType` field.')); - client.destroy(); - logSpy.restore(); - t.end(); - }); - }); - }); - return 200; - }); - - gaTag(); - - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000001-1', 'example1.com', 'myTracker1', { siteSpeedSampleRate: 0 }); - window.ga('create', 'UA-00000002-1', 'example2.com', 'myTracker2', { siteSpeedSampleRate: 0 }); - - gaSpy(['t0', 'myTracker1', 'myTracker2']); - - const factory = SplitFactory({ - ...config, - debug: DebugLogger(), - integrations: [SplitToGoogleAnalytics({ - mapper: function () { throw error; }, - }), SplitToGoogleAnalytics({ - trackerNames: ['myTracker1'], - mapper: function () { return {}; }, - }), SplitToGoogleAnalytics({ - trackerNames: ['myTracker2'], - mapper: function () { return { hitType: 'event', eventCategory: 'my-split-impression', eventAction: 'some-action' }; }, - })], - }); - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) - client.getTreatment('split_with_config'); - }); - - }); - - // Split created before GA initialized - assert.test(t => { - - const logSpy = sinon.spy(console, 'log'); - let client; - const numOfEvaluations = 1; - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentHitsDefault = window.gaSpy.getHits(); - - t.equal(sentImpressions, numOfEvaluations, 'Number of impressions equals the number of evaluations'); - t.equal(sentHitsDefault.length, numOfEvaluations, 'Hits sent if ga initialized before Split evaluation (client.getTreatment***)'); - - setTimeout(() => { - client.destroy().then(() => { - logSpy.restore(); - t.end(); - }); - }); - }); - }); - return 200; - }); - - removeGaTag(); - - const factory = SplitFactory({ - ...config, - debug: DebugLogger(), - }); - t.ok(logSpy.calledWith('[WARN] splitio => split-to-ga: `ga` command queue not found. No hits will be sent until it is available.'), 'warning GA not found'); - - client = factory.client(); - client.ready().then(() => { - for (let i = 0; i < numOfEvaluations; i++) - client.getTreatment('split_with_config'); - }); - - addGaTag(); - - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - - gaSpy(); - - }); - - // test `events` and `impressions` flags - assert.test(t => { - - let client; - - // Generator to synchronize the call of t.end() when both impressions and events endpoints were invoked. - const finish = (function* () { - yield; - t.equal(window.gaSpy.getHits().length, 1, 'Total hits are 1: pageview'); - setTimeout(() => { - client.destroy(); - t.end(); - }); - })(); - - fetchMock.postOnce(url(settings, '/testImpressions/bulk'), (url, opts) => { - // we can assert payload and ga hits, once ga is ready and after `SplitToGa.queue`, that is timeout wrapped, make to the queue stack. - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentImpressions = countImpressions(resp); - const sentImpressionHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-impression'); - - t.equal(sentImpressions, 1, 'Number of impressions'); - t.equal(sentImpressionHits.length, 0, 'No hits associated to Split impressions must be sent'); - - finish.next(); - }); - }); - return 200; - }); - - fetchMock.postOnce(url(settings, '/events/bulk'), (url, opts) => { - setTimeout(() => { - window.ga(() => { - const resp = JSON.parse(opts.body); - const sentEvents = resp.length; - const sentEventHits = window.gaSpy.getHits().filter(hit => hit.eventCategory === 'split-event'); - - t.equal(sentEvents, 1, 'Number of events'); - t.equal(sentEventHits.length, 0, 'No hits associated to Split events must be sent'); - - finish.next(); - }); - }); - return 200; - }); - - gaTag(); - window.ga('create', 'UA-00000000-1', 'auto', { siteSpeedSampleRate: 0 }); - gaSpy(); - window.ga('send', 'pageview'); - - const factory = SplitFactory({ - ...config, - integrations: [SplitToGoogleAnalytics({ - events: false, - impressions: false, - })] - }); - client = factory.client(); - client.ready().then(() => { - client.track('user', 'some_event'); - client.getTreatment('hierarchical_splits_test'); - }); - - }); -} diff --git a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json index 3930e1d..d7b68e9 100644 --- a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json +++ b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.json @@ -1,5 +1,5 @@ { "pushEnabled": true, - "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X05UY3dPVGMzTURReF9teVNlZ21lbnRzXCI6W1wic3Vic2NyaWJlXCJdLFwiTnpNMk1ESTVNemMwX05ERXpNalExTXpBME53PT1fc3BsaXRzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE1ODY5MTU3NjksImlhdCI6MTU4NjkxMjE2OX0.iq6k65WcCx8s-yqDj4FpIOUEP6-G3VdB-NLhR0fXQUw", + "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US5MZzMtZWciLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X2NvbnRyb2xcIjpbXCJzdWJzY3JpYmVcIl0sXCJOek0yTURJNU16YzBfTkRFek1qUTFNekEwTnc9PV9mbGFnc1wiOltcInN1YnNjcmliZVwiXSxcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X21lbWJlcnNoaXBzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE3MjUzODM2NDEsImlhdCI6MTcyNTM4MDA0MX0.Qqyixo2ZG-2tAkxjad7O-iphK3DVK5_xICypbIDh3IM", "connDelay": 0 } \ No newline at end of file diff --git a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json index 8ed677b..e266b59 100644 --- a/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json +++ b/src/__tests__/mocks/auth.pushEnabled.nicolas@split.io.marcio@split.io.json @@ -1,5 +1,5 @@ { "pushEnabled": true, - "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X01qRTBNVGt4T1RVMk1nPT1fbXlTZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X05UY3dPVGMzTURReF9teVNlZ21lbnRzXCI6W1wic3Vic2NyaWJlXCJdLFwiTnpNMk1ESTVNemMwX05ERXpNalExTXpBME53PT1fc3BsaXRzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE1ODY5MTYyMDAsImlhdCI6MTU4NjkxMjYwMH0.iq6k65WcCx8s-yqDj4FpIOUEP6-G3VdB-NLhR0fXQUw", + "token": "eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US5MZzMtZWciLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X2NvbnRyb2xcIjpbXCJzdWJzY3JpYmVcIl0sXCJOek0yTURJNU16YzBfTkRFek1qUTFNekEwTnc9PV9mbGFnc1wiOltcInN1YnNjcmliZVwiXSxcIk56TTJNREk1TXpjMF9OREV6TWpRMU16QTBOdz09X21lbWJlcnNoaXBzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE3MjUzODk4MjgsImlhdCI6MTcyNTM4NjIyOH0.KaEa6CjNM489dLgHxDbL8RP1DUFCMtkGLI6W3JZcTTs", "connDelay": 0 } \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.emmanuel@split.io.json b/src/__tests__/mocks/memberships.emmanuel@split.io.json new file mode 100644 index 0000000..aec8b87 --- /dev/null +++ b/src/__tests__/mocks/memberships.emmanuel@split.io.json @@ -0,0 +1,15 @@ +{ + "ms": { + "k": [ + { + "n": "developers" + }, + { + "n": "engineers" + }, + { + "n": "employees" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.facundo@split.io.json b/src/__tests__/mocks/memberships.facundo@split.io.json new file mode 100644 index 0000000..d64b10e --- /dev/null +++ b/src/__tests__/mocks/memberships.facundo@split.io.json @@ -0,0 +1,9 @@ +{ + "ms": { + "k": [ + { + "n": "splitters" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.marcio@split.io.json b/src/__tests__/mocks/memberships.marcio@split.io.json new file mode 100644 index 0000000..476ddc3 --- /dev/null +++ b/src/__tests__/mocks/memberships.marcio@split.io.json @@ -0,0 +1,3 @@ +{ + "ms": {} +} diff --git a/src/__tests__/mocks/memberships.nicolas@split.io.json b/src/__tests__/mocks/memberships.nicolas@split.io.json new file mode 100644 index 0000000..aec8b87 --- /dev/null +++ b/src/__tests__/mocks/memberships.nicolas@split.io.json @@ -0,0 +1,15 @@ +{ + "ms": { + "k": [ + { + "n": "developers" + }, + { + "n": "engineers" + }, + { + "n": "employees" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/memberships.nicolas@split.io.mock2.json b/src/__tests__/mocks/memberships.nicolas@split.io.mock2.json new file mode 100644 index 0000000..9de7489 --- /dev/null +++ b/src/__tests__/mocks/memberships.nicolas@split.io.mock2.json @@ -0,0 +1,15 @@ +{ + "ms": { + "k": [ + { + "n": "developers" + }, + { + "n": "engineers" + }, + { + "n": "splitters" + } + ] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/membershipsEmpty.json b/src/__tests__/mocks/membershipsEmpty.json new file mode 100644 index 0000000..7473a4b --- /dev/null +++ b/src/__tests__/mocks/membershipsEmpty.json @@ -0,0 +1,5 @@ +{ + "ms": { + "k": [] + } +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 0000000..baa24fe --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"n\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json new file mode 100644 index 0000000..99c839c --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_LS_UPDATE.UNBOUNDED.DELAY.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_LS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"n\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":1,\\\"s\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json new file mode 100644 index 0000000..59da6ea --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.GZIP.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"n\\\":[],\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json new file mode 100644 index 0000000..fe4ff5a --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.BOUNDED.ZLIB.1457552651000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552651000,\\\"n\\\":[],\\\"c\\\": 2,\\\"u\\\": 1,\\\"d\\\":\\\"eJxiGAX4AMdAO2AU4AeMA+2AAQACA+0AuoORGMvDBDANtAPoDBQG2gGDGQz16pRloB0wCkbBKBgFo4As0EBYyZCqoojwDwEACAAA//+W/QFR\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json new file mode 100644 index 0000000..3205225 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.KEYLIST.GZIP.1457552652000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552652000,\\\"n\\\":[\\\"splitters\\\"],\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json new file mode 100644 index 0000000..23e5827 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.SEGMENT_REMOVAL.1457552653000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552653000,\\\"n\\\":[\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json new file mode 100644 index 0000000..dcce251 --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552640000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552640000,\\\"u\\\": 0,\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json new file mode 100644 index 0000000..a69df9d --- /dev/null +++ b/src/__tests__/mocks/message.MEMBERSHIPS_MS_UPDATE.UNBOUNDED.1457552650000.json @@ -0,0 +1,4 @@ +{ + "type": "message", + "data": "{\"data\":\"{\\\"type\\\":\\\"MEMBERSHIPS_MS_UPDATE\\\",\\\"cn\\\":1457552650000,\\\"n\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"h\\\":0}\"}" +} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json deleted file mode 100644 index 415bc2b..0000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552645000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552645900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_MjE0MTkxOTU2Mg==_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552645000,\\\"segmentList\\\":[\\\"employees\\\"],\\\"includesPayload\\\":true}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json deleted file mode 100644 index 18daa83..0000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.marcio@split.io.1457552646000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552646900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_MjE0MTkxOTU2Mg==_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552646000,\\\"includesPayload\\\":true}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json deleted file mode 100644 index 951d8a3..0000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552640000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552640900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552640000,\\\"includesPayload\\\":false}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552641000.json b/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552641000.json deleted file mode 100644 index 1c65996..0000000 --- a/src/__tests__/mocks/message.MY_SEGMENTS_UPDATE.nicolas@split.io.1457552641000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"id\":\"mc4i3NENoA:0:0\",\"clientId\":\"NDEzMTY5Mzg0MA==:MTM2ODE2NDMxNA==\",\"timestamp\":1457552641900,\"encoding\":\"json\",\"channel\":\"NzM2MDI5Mzc0_NDEzMjQ1MzA0Nw==_NTcwOTc3MDQx_mySegments\",\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552641000,\\\"includesPayload\\\":false}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json b/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json deleted file mode 100644 index 97c2a73..0000000 --- a/src/__tests__/mocks/message.V2.BOUNDED.GZIP.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552651000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 1,\\\"u\\\": 1,\\\"d\\\":\\\"H4sIAAAAAAAA/2JABxzYeIxQLguYFIBLN8Bl4EABjc+EzOnAsA4QAAD//8YBvWeAAAAA\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.BOUNDED.ZLIB.1457552651000.json b/src/__tests__/mocks/message.V2.BOUNDED.ZLIB.1457552651000.json deleted file mode 100644 index 44c0089..0000000 --- a/src/__tests__/mocks/message.V2.BOUNDED.ZLIB.1457552651000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552651000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 2,\\\"u\\\": 1,\\\"d\\\":\\\"eJxiGAX4AMdAO2AU4AeMA+2AAQACA+0AuoORGMvDBDANtAPoDBQG2gGDGQz16pRloB0wCkbBKBgFo4As0EBYyZCqoojwDwEACAAA//+W/QFR\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json b/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json deleted file mode 100644 index c44ee3a..0000000 --- a/src/__tests__/mocks/message.V2.KEYLIST.GZIP.1457552652000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552652000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 1,\\\"u\\\": 2,\\\"d\\\":\\\"H4sIAAAAAAAA/wTAsRHDUAgD0F2ofwEIkPAqPhdZIW0uu/v97GPXHU004ULuMGrYR6XUbIjlXULPPse+dt1yhJibBODjrTmj3GJ4emduuDDP/w0AAP//18WLsl0AAAA=\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json b/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json deleted file mode 100644 index aaf1a3f..0000000 --- a/src/__tests__/mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552653000,\\\"segmentName\\\":\\\"splitters\\\",\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json b/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json deleted file mode 100644 index a7a2e79..0000000 --- a/src/__tests__/mocks/message.V2.UNBOUNDED.1457552650000.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "message", - "data": "{\"data\":\"{\\\"type\\\":\\\"MY_SEGMENTS_UPDATE_V2\\\",\\\"changeNumber\\\":1457552650000,\\\"segmentName\\\":\\\"\\\",\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\"}\"}" -} \ No newline at end of file diff --git a/src/__tests__/mocks/mySegmentsEmpty.json b/src/__tests__/mocks/mySegmentsEmpty.json deleted file mode 100644 index 619bab2..0000000 --- a/src/__tests__/mocks/mySegmentsEmpty.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "mySegments": [] -} diff --git a/src/__tests__/mocks/mysegments.emmanuel@split.io.json b/src/__tests__/mocks/mysegments.emmanuel@split.io.json deleted file mode 100644 index 6d53eb4..0000000 --- a/src/__tests__/mocks/mysegments.emmanuel@split.io.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df151-e63f-11e9-9275-924a43dg782b", - "name": "developers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783e", - "name": "engineers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc7840", - "name": "employees" - } - ] -} diff --git a/src/__tests__/mocks/mysegments.facundo@split.io.json b/src/__tests__/mocks/mysegments.facundo@split.io.json deleted file mode 100644 index de85cc2..0000000 --- a/src/__tests__/mocks/mysegments.facundo@split.io.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df150-e62f-11e5-9265-924a43db712b", - "name": "splitters" - } - ] -} diff --git a/src/__tests__/mocks/mysegments.marcio@split.io.json b/src/__tests__/mocks/mysegments.marcio@split.io.json deleted file mode 100644 index 619bab2..0000000 --- a/src/__tests__/mocks/mysegments.marcio@split.io.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "mySegments": [] -} diff --git a/src/__tests__/mocks/mysegments.nicolas@split.io.json b/src/__tests__/mocks/mysegments.nicolas@split.io.json deleted file mode 100644 index 6d53eb4..0000000 --- a/src/__tests__/mocks/mysegments.nicolas@split.io.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df151-e63f-11e9-9275-924a43dg782b", - "name": "developers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783e", - "name": "engineers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc7840", - "name": "employees" - } - ] -} diff --git a/src/__tests__/mocks/mysegments.nicolas@split.io.mock2.json b/src/__tests__/mocks/mysegments.nicolas@split.io.mock2.json deleted file mode 100644 index 98a1538..0000000 --- a/src/__tests__/mocks/mysegments.nicolas@split.io.mock2.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "mySegments": [ - { - "id": "482df151-e63f-11e9-9275-924a43dg782b", - "name": "developers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783e", - "name": "engineers" - }, - { - "id": "725df151-d43f-11e9-7561-498b43dc783f", - "name": "splitters" - } - ] -} \ No newline at end of file diff --git a/src/__tests__/mocks/splitchanges.since.-1.json b/src/__tests__/mocks/splitchanges.since.-1.json index 6198d41..ee21cf9 100644 --- a/src/__tests__/mocks/splitchanges.since.-1.json +++ b/src/__tests__/mocks/splitchanges.since.-1.json @@ -1,5 +1,75 @@ { "splits": [ + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "in_large_segment", + "seed": -1984784937, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "no", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_LARGE_SEGMENT", + "negate": false, + "userDefinedLargeSegmentMatcherData": { + "largeSegmentName": "harnessians" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "unaryStringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "yes", + "size": 100 + } + ] + }, + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "IN_LARGE_SEGMENT", + "negate": false, + "userDefinedLargeSegmentMatcherData": { + "largeSegmentName": "splitters" + }, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "unaryStringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "yes", + "size": 100 + } + ] + } + ], + "configurations": {} + }, { "orgId": null, "environment": null, diff --git a/src/__tests__/offline/browser.spec.js b/src/__tests__/offline/browser.spec.js index a7f7f08..6b2d981 100644 --- a/src/__tests__/offline/browser.spec.js +++ b/src/__tests__/offline/browser.spec.js @@ -3,14 +3,14 @@ import sinon from 'sinon'; import fetchMock from '../testUtils/fetchMock'; import { url } from '../testUtils'; import { SplitFactory, InLocalStorage } from '../../full'; -import { SplitFactory as SplitFactorySlim, LocalhostFromObject } from '../../'; +import { SplitFactory as SplitFactorySlim } from '../../'; import { settingsFactory } from '../../settings'; const settings = settingsFactory({ core: { key: 'facundo@split.io' } }); const spySplitChanges = sinon.spy(); const spySegmentChanges = sinon.spy(); -const spyMySegments = sinon.spy(); +const spyMemberships = sinon.spy(); const spyEventsBulk = sinon.spy(); const spyTestImpressionsBulk = sinon.spy(); const spyTestImpressionsCount = sinon.spy(); @@ -28,7 +28,7 @@ const replySpy = spy => { const configMocks = () => { fetchMock.mock(new RegExp(`${url(settings, '/splitChanges/')}.*`), () => replySpy(spySplitChanges)); fetchMock.mock(new RegExp(`${url(settings, '/segmentChanges/')}.*`), () => replySpy(spySegmentChanges)); - fetchMock.mock(new RegExp(`${url(settings, '/mySegments/')}.*`), () => replySpy(spyMySegments)); + fetchMock.mock(new RegExp(`${url(settings, '/memberships/')}.*`), () => replySpy(spyMemberships)); fetchMock.mock(url(settings, '/events/bulk'), () => replySpy(spyEventsBulk)); fetchMock.mock(url(settings, '/testImpressions/bulk'), () => replySpy(spyTestImpressionsBulk)); fetchMock.mock(url(settings, '/testImpressions/count'), () => replySpy(spyTestImpressionsCount)); @@ -60,7 +60,7 @@ tape('Browser offline mode', function (assert) { eventsFirstPushWindow: 0, readyTimeout: 0.001 }, - features: originalFeaturesMap + features: originalFeaturesMap, }; const factory = SplitFactory(config); const manager = factory.manager(); @@ -90,24 +90,19 @@ tape('Browser offline mode', function (assert) { const factoriesReadyFromCache = [ SplitFactory({ ...config, storage: InLocalStorage() }), - SplitFactorySlim({ ...config, storage: InLocalStorage(), sync: { localhostMode: LocalhostFromObject() } }) // slim factory requires localhostFromObject module + SplitFactorySlim({ ...config, storage: InLocalStorage() }) ]; const configs = [ - { ...config, features: { ...config.features }, storage: InLocalStorage /* invalid */, sync: { localhostMode: LocalhostFromObject /* invalid */ } }, + { ...config, features: { ...config.features }, storage: InLocalStorage /* invalid */ }, { ...config }, config, ]; - const factoriesReady = [ + const factories = [ ...configs.map(config => SplitFactory(config)), ...factoriesReadyFromCache ]; - const factoriesTimeout = [ // slim factory without a valid localhostFromObject module will timeout - SplitFactorySlim(config), - SplitFactorySlim({ ...config, sync: { localhostMode: LocalhostFromObject /* invalid */ } }), - ]; - const factories = [...factoriesReady, ...factoriesTimeout]; - let readyCount = 0, updateCount = 0, readyFromCacheCount = 0, timeoutCount = 0; + let readyCount = 0, updateCount = 0, readyFromCacheCount = 0; for (let i = 0; i < factories.length; i++) { const factory = factories[i], client = factory.client(), manager = factory.manager(), client2 = factory.client('other'); @@ -123,8 +118,7 @@ tape('Browser offline mode', function (assert) { updateCount++; }); client.on(client.Event.SDK_READY_TIMED_OUT, () => { - assert.equal(factory.settings.sync.localhostMode, undefined); - timeoutCount++; + assert.fail('Should not emit SDK_READY_TIMED_OUT event'); }); const sdkReadyFromCache = (client) => () => { @@ -249,8 +243,8 @@ tape('Browser offline mode', function (assert) { }; // Update the features in all remaining factories except the last one - for (let i = 1; i < factoriesReady.length - 1; i++) { - factoriesReady[i].settings.features = factory.settings.features; + for (let i = 1; i < factories.length - 1; i++) { + factories[i].settings.features = factory.settings.features; } // Assigning a new object to the features property in the config object doesn't trigger an update @@ -352,7 +346,7 @@ tape('Browser offline mode', function (assert) { // We test the breakdown instead of just the misc because it's faster to spot where the issue is assert.notOk(spySplitChanges.called, 'On offline mode we should not call the splitChanges endpoint.'); assert.notOk(spySegmentChanges.called, 'On offline mode we should not call the segmentChanges endpoint.'); - assert.notOk(spyMySegments.called, 'On offline mode we should not call the mySegments endpoint.'); + assert.notOk(spyMemberships.called, 'On offline mode we should not call the memberships endpoint.'); assert.notOk(spyEventsBulk.called, 'On offline mode we should not call the events endpoint.'); assert.notOk(spyTestImpressionsBulk.called, 'On offline mode we should not call the impressions endpoint.'); assert.notOk(spyTestImpressionsCount.called, 'On offline mode we should not call the impressions count endpoint.'); @@ -365,10 +359,9 @@ tape('Browser offline mode', function (assert) { assert.equal(sharedUpdateCount, 1, 'Shared client should have emitted SDK_UPDATE event once'); // SDK events on other factory clients - assert.equal(readyCount, factoriesReady.length, 'Each factory client should have emitted SDK_READY event once'); - assert.equal(updateCount, factoriesReady.length - 1, 'Each factory client except one should have emitted SDK_UPDATE event once'); + assert.equal(readyCount, factories.length, 'Each factory client should have emitted SDK_READY event once'); + assert.equal(updateCount, factories.length - 1, 'Each factory client except one should have emitted SDK_UPDATE event once'); assert.equal(readyFromCacheCount, factoriesReadyFromCache.length * 2, 'The main and shared client of the factories with LOCALSTORAGE should have emitted SDK_READY_FROM_CACHE event'); - assert.equal(timeoutCount, factoriesTimeout.length, 'The wrongly configured slim factories should have emitted SDK_READY_TIMED_OUT event'); assert.end(); }); diff --git a/src/__tests__/online/browser.spec.js b/src/__tests__/online/browser.spec.js index 0c667ae..7974446 100644 --- a/src/__tests__/online/browser.spec.js +++ b/src/__tests__/online/browser.spec.js @@ -22,10 +22,10 @@ import singleSync from '../browserSuites/single-sync.spec'; import { settingsFactory } from '../../settings'; import splitChangesMock1 from '../mocks/splitchanges.since.-1.json'; import splitChangesMock2 from '../mocks/splitchanges.since.1457552620999.json'; -import mySegmentsFacundo from '../mocks/mysegments.facundo@split.io.json'; -import mySegmentsNicolas from '../mocks/mysegments.nicolas@split.io.json'; -import mySegmentsMarcio from '../mocks/mysegments.marcio@split.io.json'; -import mySegmentsEmmanuel from '../mocks/mysegments.emmanuel@split.io.json'; +import membershipsFacundo from '../mocks/memberships.facundo@split.io.json'; +import membershipsNicolas from '../mocks/memberships.nicolas@split.io.json'; +import membershipsMarcio from '../mocks/memberships.marcio@split.io.json'; +import membershipsEmmanuel from '../mocks/memberships.emmanuel@split.io.json'; import { InLocalStorage } from '../../index'; const settings = settingsFactory({ @@ -84,12 +84,12 @@ tape('## E2E CI Tests ##', function (assert) { //If we change the mocks, we need to clear localstorage. Cleaning up after testing ensures "fresh data". localStorage.clear(); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=-1'), { status: 200, body: splitChangesMock1 }); - fetchMock.get(url(settings, '/splitChanges?s=1.1&since=1457552620999'), { status: 200, body: splitChangesMock2 }); - fetchMock.get(url(settings, '/mySegments/facundo%40split.io'), { status: 200, body: mySegmentsFacundo }); - fetchMock.get(url(settings, '/mySegments/nicolas%40split.io'), { status: 200, body: mySegmentsNicolas }); - fetchMock.get(url(settings, '/mySegments/marcio%40split.io'), { status: 200, body: mySegmentsMarcio }); - fetchMock.get(url(settings, '/mySegments/emmanuel%40split.io'), { status: 200, body: mySegmentsEmmanuel }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=-1'), { status: 200, body: splitChangesMock1 }); + fetchMock.get(url(settings, '/splitChanges?s=1.2&since=1457552620999'), { status: 200, body: splitChangesMock2 }); + fetchMock.get(url(settings, '/memberships/facundo%40split.io'), { status: 200, body: membershipsFacundo }); + fetchMock.get(url(settings, '/memberships/nicolas%40split.io'), { status: 200, body: membershipsNicolas }); + fetchMock.get(url(settings, '/memberships/marcio%40split.io'), { status: 200, body: membershipsMarcio }); + fetchMock.get(url(settings, '/memberships/emmanuel%40split.io'), { status: 200, body: membershipsEmmanuel }); fetchMock.post(url(settings, '/testImpressions/bulk'), 200); fetchMock.post(url(settings, '/testImpressions/count'), 200); Math.random = () => 0.5; // SDKs without telemetry @@ -124,7 +124,7 @@ tape('## E2E CI Tests ##', function (assert) { /* Check that impressions and events are sended to backend via Beacon API or Fetch when pagehide/visibilitychange events are triggered. */ assert.test('E2E / Use Beacon API (or Fetch if not available) to send remaining impressions and events when browser page is unload or hidden', useBeaconApiSuite.bind(null, fetchMock)); assert.test('E2E / Use Beacon API DEBUG (or Fetch if not available) to send remaining impressions and events when browser page is unload or hidden', useBeaconDebugApiSuite.bind(null, fetchMock)); - /* Validate ready from cache behaviour (might be merged into another suite if we end up having simple behavior around it as expected) */ + /* Validate ready from cache behavior (might be merged into another suite if we end up having simple behavior around it as expected) */ assert.test('E2E / Readiness from cache', readyFromCache.bind(null, fetchMock)); /* Validate readiness with ready promises */ assert.test('E2E / Ready promise', readyPromiseSuite.bind(null, fetchMock)); diff --git a/src/__tests__/push/browser.spec.js b/src/__tests__/push/browser.spec.js index f4d5103..ed071ad 100644 --- a/src/__tests__/push/browser.spec.js +++ b/src/__tests__/push/browser.spec.js @@ -4,7 +4,7 @@ import { testAuthWithPushDisabled, testAuthWith401, testNoEventSource, testSSEWi import { testPushRetriesDueToAuthErrors, testPushRetriesDueToSseErrors, testSdkDestroyWhileAuthRetries, testSdkDestroyWhileAuthSuccess, testSdkDestroyWhileConnDelay } from '../browserSuites/push-initialization-retries.spec'; import { testSynchronization } from '../browserSuites/push-synchronization.spec'; import { testSynchronizationRetries } from '../browserSuites/push-synchronization-retries.spec'; -import { testFallbacking } from '../browserSuites/push-fallbacking.spec'; +import { testFallback } from '../browserSuites/push-fallback.spec'; import { testRefreshToken } from '../browserSuites/push-refresh-token.spec'; import { testSplitKillOnReadyFromCache } from '../browserSuites/push-corner-cases.spec'; @@ -31,7 +31,7 @@ tape('## Browser JS - E2E CI Tests for PUSH ##', function (assert) { assert.test('E2E / PUSH synchronization: happy paths', testSynchronization.bind(null, fetchMock)); assert.test('E2E / PUSH synchronization: retries', testSynchronizationRetries.bind(null, fetchMock)); - assert.test('E2E / PUSH fallbacking, CONTROL, OCCUPANCY and STREAMING_RESET messages', testFallbacking.bind(null, fetchMock)); + assert.test('E2E / PUSH fallback, CONTROL, OCCUPANCY and STREAMING_RESET messages', testFallback.bind(null, fetchMock)); assert.test('E2E / PUSH refresh token and connection delay', testRefreshToken.bind(null, fetchMock)); diff --git a/src/__tests__/testUtils/index.js b/src/__tests__/testUtils/index.js index 7f01fc5..5994a3c 100644 --- a/src/__tests__/testUtils/index.js +++ b/src/__tests__/testUtils/index.js @@ -14,7 +14,7 @@ export function nearlyEqual(actual, expected, epsilon = DEFAULT_ERROR_MARGIN) { } /** - * mock the basic behaviour for `/segmentChanges` endpoint: + * mock the basic behavior for `/segmentChanges` endpoint: * - when `?since=-1`, it returns the given segment `keys` in `added` list. * - otherwise, it returns empty `added` and `removed` lists, and the same since and till values. * diff --git a/src/full/index.ts b/src/full/index.ts index 42991c3..be0ed7a 100644 --- a/src/full/index.ts +++ b/src/full/index.ts @@ -1,7 +1,5 @@ export { SplitFactory } from './splitFactory'; export { InLocalStorage } from '@splitsoftware/splitio-commons/src/storages/inLocalStorage/index'; -export { GoogleAnalyticsToSplit } from '@splitsoftware/splitio-commons/src/integrations/ga/GoogleAnalyticsToSplit'; -export { SplitToGoogleAnalytics } from '@splitsoftware/splitio-commons/src/integrations/ga/SplitToGoogleAnalytics'; export { ErrorLogger } from '@splitsoftware/splitio-commons/src/logger/browser/ErrorLogger'; export { WarnLogger } from '@splitsoftware/splitio-commons/src/logger/browser/WarnLogger'; export { InfoLogger } from '@splitsoftware/splitio-commons/src/logger/browser/InfoLogger'; diff --git a/src/full/splitFactory.ts b/src/full/splitFactory.ts index 83ba000..6b36516 100644 --- a/src/full/splitFactory.ts +++ b/src/full/splitFactory.ts @@ -1,3 +1,4 @@ +import type SplitIO from '@splitsoftware/splitio-commons/types/splitio'; import { settingsFactory } from '../settings/full'; import { getModules } from '../platform/getModules'; import { sdkFactory } from '@splitsoftware/splitio-commons/src/sdkFactory/index'; @@ -6,20 +7,19 @@ import { getFetch } from '../platform/getFetchFull'; import { getEventSource } from '../platform/getEventSource'; import { EventEmitter } from '@splitsoftware/splitio-commons/src/utils/MinEvents'; import { now } from '@splitsoftware/splitio-commons/src/utils/timeTracker/now/browser'; -import { IBrowserSettings } from '../../types/splitio'; const platform = { getFetch, getEventSource, EventEmitter, now }; /** * SplitFactory with pluggable modules for Browser. - * Includes localhost mode and fetch polyfill out-of-the-box. + * It includes a `fetch` polyfill out-of-the-box. * - * @param config configuration object used to instantiate the SDK - * @param __updateModules optional function that lets redefine internal SDK modules. Use with + * @param config - configuration object used to instantiate the SDK + * @param __updateModules - optional function that lets redefine internal SDK modules. Use with * caution since, unlike `config`, this param is not validated neither considered part of the public API. * @throws Will throw an error if the provided config is invalid. */ -export function SplitFactory(config: IBrowserSettings, __updateModules?: (modules: ISdkFactoryParams) => void) { +export function SplitFactory(config: SplitIO.IClientSideSettings, __updateModules?: (modules: ISdkFactoryParams) => void) { const settings = settingsFactory(config); const modules = getModules(settings, platform); if (__updateModules) __updateModules(modules); diff --git a/src/full/umd.ts b/src/full/umd.ts index f6061a6..c20676c 100644 --- a/src/full/umd.ts +++ b/src/full/umd.ts @@ -1,8 +1,6 @@ // @ts-nocheck import { SplitFactory } from './splitFactory'; import { InLocalStorage } from '@splitsoftware/splitio-commons/src/storages/inLocalStorage/index'; -import { GoogleAnalyticsToSplit } from '@splitsoftware/splitio-commons/src/integrations/ga/GoogleAnalyticsToSplit'; -import { SplitToGoogleAnalytics } from '@splitsoftware/splitio-commons/src/integrations/ga/SplitToGoogleAnalytics'; import { ErrorLogger } from '@splitsoftware/splitio-commons/src/logger/browser/ErrorLogger'; import { WarnLogger } from '@splitsoftware/splitio-commons/src/logger/browser/WarnLogger'; import { InfoLogger } from '@splitsoftware/splitio-commons/src/logger/browser/InfoLogger'; @@ -11,8 +9,6 @@ import { DebugLogger } from '@splitsoftware/splitio-commons/src/logger/browser/D // Include all pluggable modules as properties to expose at the global `splitio` object SplitFactory.SplitFactory = SplitFactory; SplitFactory.InLocalStorage = InLocalStorage; -SplitFactory.GoogleAnalyticsToSplit = GoogleAnalyticsToSplit; -SplitFactory.SplitToGoogleAnalytics = SplitToGoogleAnalytics; SplitFactory.ErrorLogger = ErrorLogger; SplitFactory.WarnLogger = WarnLogger; SplitFactory.InfoLogger = InfoLogger; diff --git a/src/index.ts b/src/index.ts index 3eb4354..be0ed7a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,10 +1,7 @@ export { SplitFactory } from './splitFactory'; export { InLocalStorage } from '@splitsoftware/splitio-commons/src/storages/inLocalStorage/index'; -export { GoogleAnalyticsToSplit } from '@splitsoftware/splitio-commons/src/integrations/ga/GoogleAnalyticsToSplit'; -export { SplitToGoogleAnalytics } from '@splitsoftware/splitio-commons/src/integrations/ga/SplitToGoogleAnalytics'; export { ErrorLogger } from '@splitsoftware/splitio-commons/src/logger/browser/ErrorLogger'; export { WarnLogger } from '@splitsoftware/splitio-commons/src/logger/browser/WarnLogger'; export { InfoLogger } from '@splitsoftware/splitio-commons/src/logger/browser/InfoLogger'; export { DebugLogger } from '@splitsoftware/splitio-commons/src/logger/browser/DebugLogger'; -export { LocalhostFromObject } from '@splitsoftware/splitio-commons/src/sync/offline/LocalhostFromObject'; export { PluggableStorage } from '@splitsoftware/splitio-commons/src/storages/pluggable'; diff --git a/src/platform/getModules.ts b/src/platform/getModules.ts index 90c45e9..a6e168f 100644 --- a/src/platform/getModules.ts +++ b/src/platform/getModules.ts @@ -7,11 +7,11 @@ import { sdkClientMethodCSFactory } from '@splitsoftware/splitio-commons/src/sdk import { BrowserSignalListener } from '@splitsoftware/splitio-commons/src/listeners/browser'; import { impressionObserverCSFactory } from '@splitsoftware/splitio-commons/src/trackers/impressionObserver/impressionObserverCS'; import { pluggableIntegrationsManagerFactory } from '@splitsoftware/splitio-commons/src/integrations/pluggable'; - import { IPlatform, ISdkFactoryParams } from '@splitsoftware/splitio-commons/src/sdkFactory/types'; import { ISettings } from '@splitsoftware/splitio-commons/src/types'; import { CONSUMER_MODE, CONSUMER_PARTIAL_MODE, LOCALHOST_MODE } from '@splitsoftware/splitio-commons/src/utils/constants'; import { createUserConsentAPI } from '@splitsoftware/splitio-commons/src/consent/sdkUserConsent'; +import { localhostFromObjectFactory } from '@splitsoftware/splitio-commons/src/sync/offline/LocalhostFromObject'; let syncManagerStandaloneFactory: ISdkFactoryParams['syncManagerFactory']; let syncManagerSubmittersFactory: ISdkFactoryParams['syncManagerFactory']; @@ -25,7 +25,7 @@ export function getModules(settings: ISettings, platform: IPlatform): ISdkFactor platform, - storageFactory: settings.storage, + storageFactory: settings.storage as ISdkFactoryParams['storageFactory'], splitApiFactory, @@ -51,7 +51,7 @@ export function getModules(settings: ISettings, platform: IPlatform): ISdkFactor switch (settings.mode) { case LOCALHOST_MODE: modules.splitApiFactory = undefined; - modules.syncManagerFactory = settings.sync.localhostMode; + modules.syncManagerFactory = localhostFromObjectFactory; modules.SignalListener = undefined; break; case CONSUMER_MODE: diff --git a/src/settings/defaults.ts b/src/settings/defaults.ts index 677a409..fd82a4e 100644 --- a/src/settings/defaults.ts +++ b/src/settings/defaults.ts @@ -1,8 +1,8 @@ +import type SplitIO from '@splitsoftware/splitio-commons/types/splitio'; import { LogLevels, isLogLevelString } from '@splitsoftware/splitio-commons/src/logger/index'; -import { ConsentStatus, LogLevel } from '@splitsoftware/splitio-commons/src/types'; import { CONSENT_GRANTED } from '@splitsoftware/splitio-commons/src/utils/constants'; -const packageVersion = '0.15.0'; +const packageVersion = '1.0.0'; /** * In browser, the default debug level, can be set via the `localStorage.splitio_debug` item. @@ -10,7 +10,7 @@ const packageVersion = '0.15.0'; * Other acceptable values are 'on', 'enable' and 'enabled', which are equivalent to 'DEBUG'. * Any other string value is equivalent to disable ('NONE'). */ -let initialLogLevel: LogLevel | undefined; +let initialLogLevel: SplitIO.LogLevel | undefined; const LS_KEY = 'splitio_debug'; @@ -30,11 +30,11 @@ export const defaults = { // Maximum amount of time used before notifies me a timeout. readyTimeout: 10, // Amount of time we will wait before the first push of events. - eventsFirstPushWindow: 10 + eventsFirstPushWindow: 10, }, // Consent is considered granted by default - userConsent: CONSENT_GRANTED as ConsentStatus, + userConsent: CONSENT_GRANTED as SplitIO.ConsentStatus, // Instance version. version: `browserjs-${packageVersion}`, diff --git a/src/settings/full.ts b/src/settings/full.ts index 8afa840..8c6f6fb 100644 --- a/src/settings/full.ts +++ b/src/settings/full.ts @@ -4,7 +4,6 @@ import { validateRuntime } from '@splitsoftware/splitio-commons/src/utils/settin import { validateStorageCS } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/storage/storageCS'; import { validatePluggableIntegrations } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/integrations/pluggable'; import { validateLogger } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/logger/pluggableLogger'; -import { validateLocalhostWithDefault } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/localhost/builtin'; import { validateConsent } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/consent'; const params = { @@ -14,10 +13,11 @@ const params = { storage: validateStorageCS, integrations: validatePluggableIntegrations, logger: validateLogger, - localhost: validateLocalhostWithDefault, // Full SplitFactory provides a default localhost module, except a valid one is provided consent: validateConsent, }; export function settingsFactory(config: any) { - return settingsValidation(config, params); + const settings = settingsValidation(config, params); + + return settings; } diff --git a/src/settings/index.ts b/src/settings/index.ts index bfe6541..8c6f6fb 100644 --- a/src/settings/index.ts +++ b/src/settings/index.ts @@ -4,7 +4,6 @@ import { validateRuntime } from '@splitsoftware/splitio-commons/src/utils/settin import { validateStorageCS } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/storage/storageCS'; import { validatePluggableIntegrations } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/integrations/pluggable'; import { validateLogger } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/logger/pluggableLogger'; -import { validateLocalhost } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/localhost/pluggable'; import { validateConsent } from '@splitsoftware/splitio-commons/src/utils/settingsValidation/consent'; const params = { @@ -14,10 +13,11 @@ const params = { storage: validateStorageCS, integrations: validatePluggableIntegrations, logger: validateLogger, - localhost: validateLocalhost, // Slim SplitFactory validates that the localhost module is passed in localhost mode consent: validateConsent, }; export function settingsFactory(config: any) { - return settingsValidation(config, params); + const settings = settingsValidation(config, params); + + return settings; } diff --git a/src/splitFactory.ts b/src/splitFactory.ts index db14cd3..cdf93fb 100644 --- a/src/splitFactory.ts +++ b/src/splitFactory.ts @@ -1,3 +1,4 @@ +import type SplitIO from '@splitsoftware/splitio-commons/types/splitio'; import { settingsFactory } from './settings'; import { getModules } from './platform/getModules'; import { sdkFactory } from '@splitsoftware/splitio-commons/src/sdkFactory/index'; @@ -6,20 +7,18 @@ import { getFetch } from './platform/getFetchSlim'; import { getEventSource } from './platform/getEventSource'; import { EventEmitter } from '@splitsoftware/splitio-commons/src/utils/MinEvents'; import { now } from '@splitsoftware/splitio-commons/src/utils/timeTracker/now/browser'; -import { IBrowserSettings } from '../types/splitio'; const platform = { getFetch, getEventSource, EventEmitter, now }; /** - * Slim SplitFactory with pluggable modules for Browser. - * Doesn't include localhost mode and fetch ponyfill out-of-the-box. + * SplitFactory with pluggable modules for Browser. * - * @param config configuration object used to instantiate the SDK - * @param __updateModules optional function that lets redefine internal SDK modules. Use with + * @param config - configuration object used to instantiate the SDK + * @param __updateModules - optional function that lets redefine internal SDK modules. Use with * caution since, unlike `config`, this param is not validated neither considered part of the public API. * @throws Will throw an error if the provided config is invalid. */ -export function SplitFactory(config: IBrowserSettings, __updateModules?: (modules: ISdkFactoryParams) => void) { +export function SplitFactory(config: SplitIO.IClientSideSettings, __updateModules?: (modules: ISdkFactoryParams) => void) { const settings = settingsFactory(config); const modules = getModules(settings, platform); if (__updateModules) __updateModules(modules); diff --git a/ts-tests/.gitignore b/ts-tests/.gitignore deleted file mode 100644 index e00a366..0000000 --- a/ts-tests/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -index.js -package-lock.json -node_modules/ \ No newline at end of file diff --git a/ts-tests/index.ts b/ts-tests/index.ts index 66d210f..6195c90 100644 --- a/ts-tests/index.ts +++ b/ts-tests/index.ts @@ -11,14 +11,12 @@ * @author Nico Zelaya */ -import { SplitFactory as SplitFactoryFull, InLocalStorage as InLocalStorageFull, GoogleAnalyticsToSplit as GoogleAnalyticsToSplitFull, SplitToGoogleAnalytics as SplitToGoogleAnalyticsFull, DebugLogger as DebugLoggerFull, InfoLogger as InfoLoggerFull, WarnLogger as WarnLoggerFull, ErrorLogger as ErrorLoggerFull, PluggableStorage as PluggableStorageFull } from '@splitsoftware/splitio-browserjs/full'; -import { SplitFactory, InLocalStorage, GoogleAnalyticsToSplit, SplitToGoogleAnalytics, DebugLogger, InfoLogger, WarnLogger, ErrorLogger, LocalhostFromObject, PluggableStorage } from '@splitsoftware/splitio-browserjs'; +import { SplitFactory as SplitFactoryFull, InLocalStorage as InLocalStorageFull, DebugLogger as DebugLoggerFull, InfoLogger as InfoLoggerFull, WarnLogger as WarnLoggerFull, ErrorLogger as ErrorLoggerFull, PluggableStorage as PluggableStorageFull } from '../types/full'; +import { SplitFactory, InLocalStorage, DebugLogger, InfoLogger, WarnLogger, ErrorLogger, PluggableStorage } from '../types/index'; // Entry points must export the same objects let splitFactory = SplitFactory; splitFactory = SplitFactoryFull; let inLocalStorage = InLocalStorage; inLocalStorage = InLocalStorageFull; -let gaToSplit = GoogleAnalyticsToSplit; gaToSplit = GoogleAnalyticsToSplitFull; -let splitToGa = SplitToGoogleAnalytics; splitToGa = SplitToGoogleAnalyticsFull; let pluggableStorage = PluggableStorage; pluggableStorage = PluggableStorageFull; let stringPromise: Promise; @@ -34,18 +32,18 @@ let trackPromise: Promise; // Facade return interface // let SDK: SplitIO.ISDK; -let AsyncSDK: SplitIO.IAsyncSDK; -let SDK: SplitIO.ISDK; +let AsyncSDK: SplitIO.IBrowserAsyncSDK; +let SDK: SplitIO.IBrowserSDK; // Settings interfaces // let nodeSettings: SplitIO.INodeSettings; // let asyncSettings: SplitIO.INodeAsyncSettings; -let browserSettings: SplitIO.IBrowserSettings; -let browserAsyncSettings: SplitIO.IBrowserAsyncSettings; +let browserSettings: SplitIO.IClientSideSettings; +let browserAsyncSettings: SplitIO.IClientSideAsyncSettings; // Client & Manager APIs // let client: SplitIO.IClient; -let client: SplitIO.IClient; +let client: SplitIO.IBrowserClient; let manager: SplitIO.IManager; -let asyncClient: SplitIO.IAsyncClient; +let asyncClient: SplitIO.IBrowserAsyncClient; let asyncManager: SplitIO.IAsyncManager; // Utility interfaces let impressionListener: SplitIO.IImpressionListener; @@ -184,7 +182,7 @@ browserAsyncSettings = { wrapper: {} }) }; -// With sync settings should return ISDK, if settings have async storage it should return IAsyncSDK +// With sync settings should return IBrowserSDK, if settings have async storage it should return IBrowserAsyncSDK SDK = SplitFactory(browserSettings); AsyncSDK = SplitFactory(browserAsyncSettings); // SDK = SplitFactory(nodeSettings); @@ -204,8 +202,8 @@ const instantiatedSettingsStartup: { [key: string]: number } = SDK.settings.star const instantiatedStorage: SplitIO.StorageSync = SDK.settings.storage; const instantiatedSettingsUrls: { [key: string]: string } = SDK.settings.urls; const instantiatedSettingsVersion: string = SDK.settings.version; -let instantiatedSettingsFeatures = SDK.settings.features; -// // We should be able to write on features prop. The rest are readonly props. +let instantiatedSettingsFeatures = SDK.settings.features as SplitIO.MockedFeaturesMap; +// We should be able to write on features prop. The rest are readonly props. instantiatedSettingsFeatures.something = 'something'; SDK.settings.features = { 'split_x': 'on' }; @@ -252,8 +250,11 @@ client = client.removeAllListeners(); // const b: number = client.listenerCount(splitEvent); // Not part of IEventEmitter // Ready and destroy -const readyPromise: Promise = client.ready(); -const destroyPromise: Promise = client.destroy(); +let promise: Promise = client.ready(); +promise = client.destroy(); +promise = SDK.destroy(); +// @TODO not public yet +// promise = client.flush(); // We can call getTreatment without a key. // treatment = client.getTreatment(splitKey, 'mySplit'); @@ -341,8 +342,9 @@ asyncClient = asyncClient.removeAllListeners(); // const b1: number = asyncClient.listenerCount(splitEvent); // Not part of IEventEmitter // Ready and destroy (same as for sync client, just for interface checking) -const readyPromise1: Promise = asyncClient.ready(); -asyncClient.destroy(); +promise = asyncClient.ready(); +promise = asyncClient.destroy(); +promise = AsyncSDK.destroy(); // We can call getTreatment asyncTreatment = asyncClient.getTreatment('mySplit'); @@ -417,7 +419,7 @@ splitView = manager.split('mySplit'); splitViews = manager.splits(); // Manager implements ready promise. -const managerReadyPromise: Promise = manager.ready(); +promise = manager.ready(); // Manager implements methods from NodeJS.Events. Testing a few. manager = manager.on(splitEvent, () => { }); @@ -440,7 +442,7 @@ splitViewAsync = asyncManager.split('mySplit'); splitViewsAsync = asyncManager.splits(); // asyncManager implements ready promise. -const asyncManagerReadyPromise: Promise = asyncManager.ready(); +promise = asyncManager.ready(); // asyncManager implements methods from NodeJS.Events. Testing a few. asyncManager = asyncManager.on(splitEvent, () => { }); @@ -521,30 +523,7 @@ userConsent = AsyncSDK.UserConsent.Status.UNKNOWN; // Split filters let splitFilters: SplitIO.SplitFilter[] = [{ type: 'bySet', values: ['set_a', 'set_b'] }, { type: 'byName', values: ['my_split_1', 'my_split_1'] }, { type: 'byPrefix', values: ['my_split', 'test_split_'] }] -// Browser integrations -let fieldsObjectSample: UniversalAnalytics.FieldsObject = { hitType: 'event', eventAction: 'action' }; -let eventDataSample: SplitIO.EventData = { eventTypeId: 'someEventTypeId', value: 10, properties: {} }; - -let minimalGoogleAnalyticsToSplitConfig: SplitIO.GoogleAnalyticsToSplitOptions = { identities: [{ key: 'user', trafficType: 'tt' }] }; -let emptySplitToGoogleAnalyticsConfig: SplitIO.SplitToGoogleAnalyticsOptions = {}; - -let customGoogleAnalyticsToSplitConfig: SplitIO.GoogleAnalyticsToSplitOptions = { - hits: false, - filter: function (model: UniversalAnalytics.Model): boolean { return true; }, - mapper: function (model: UniversalAnalytics.Model, defaultMapping: SplitIO.EventData): SplitIO.EventData { return eventDataSample; }, - prefix: 'PREFIX', - identities: [{ key: 'key1', trafficType: 'tt1' }, { key: 'key2', trafficType: 'tt2' }], - autoRequire: true -}; -let customSplitToGoogleAnalyticsConfig: SplitIO.SplitToGoogleAnalyticsOptions = { - events: false, - impressions: true, - filter: function (model: SplitIO.IntegrationData): boolean { return true; }, - mapper: function (model: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject): UniversalAnalytics.FieldsObject { return fieldsObjectSample; }, - trackerNames: ['t0', 'myTracker'], -} - -let fullBrowserSettings: SplitIO.IBrowserSettings = { +let fullBrowserSettings: SplitIO.IClientSideSettings = { core: { authorizationKey: 'asd', key: 'asd', @@ -579,16 +558,11 @@ let fullBrowserSettings: SplitIO.IBrowserSettings = { storage: syncStorageFactory, impressionListener: impressionListener, debug: true, - integrations: [ - GoogleAnalyticsToSplit(), SplitToGoogleAnalytics(), - GoogleAnalyticsToSplit(minimalGoogleAnalyticsToSplitConfig), SplitToGoogleAnalytics(emptySplitToGoogleAnalyticsConfig), - GoogleAnalyticsToSplit(customGoogleAnalyticsToSplitConfig), SplitToGoogleAnalytics(customSplitToGoogleAnalyticsConfig) - ], + integrations: [], streamingEnabled: true, sync: { splitFilters: splitFilters, impressionsMode: 'DEBUG', - localhostMode: LocalhostFromObject(), enabled: true, requestOptions: { getHeaderOverrides(context) { return { ...context.headers, 'header': 'value' }; }, @@ -599,7 +573,7 @@ let fullBrowserSettings: SplitIO.IBrowserSettings = { fullBrowserSettings.userConsent = 'DECLINED'; fullBrowserSettings.userConsent = 'UNKNOWN'; -let fullBrowserAsyncSettings: SplitIO.IBrowserAsyncSettings = { +let fullBrowserAsyncSettings: SplitIO.IClientSideAsyncSettings = { mode: 'consumer', core: { authorizationKey: 'asd', @@ -631,15 +605,9 @@ let fullBrowserAsyncSettings: SplitIO.IBrowserAsyncSettings = { }), impressionListener: impressionListener, debug: true, - integrations: [ - GoogleAnalyticsToSplit(), SplitToGoogleAnalytics(), - GoogleAnalyticsToSplit(minimalGoogleAnalyticsToSplitConfig), SplitToGoogleAnalytics(emptySplitToGoogleAnalyticsConfig), - GoogleAnalyticsToSplit(customGoogleAnalyticsToSplitConfig), SplitToGoogleAnalytics(customSplitToGoogleAnalyticsConfig) - ], - streamingEnabled: true, + integrations: [], sync: { impressionsMode: 'DEBUG', - enabled: true, requestOptions: { getHeaderOverrides(context) { return { ...context.headers, 'header': 'value' }; }, } @@ -661,8 +629,6 @@ fullBrowserSettings.debug = InfoLoggerFull(); fullBrowserSettings.debug = WarnLoggerFull(); fullBrowserSettings.debug = ErrorLoggerFull(); -// fullBrowserSettings.integrations[0].type = 'GOOGLE_ANALYTICS_TO_SPLIT'; - // let fullNodeSettings: SplitIO.INodeSettings = { // core: { // authorizationKey: 'asd', diff --git a/ts-tests/package.json b/ts-tests/package.json deleted file mode 100644 index 32358c4..0000000 --- a/ts-tests/package.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "name": "ts-tests", - "version": "1.0.0", - "description": "SDK tests for TypeScript declaration files", - "author": "Nico Zelaya", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "git+https://github.com/splitio/javascript-browser-client.git" - } -} diff --git a/ts-tests/tsconfig.json b/ts-tests/tsconfig.json index 4adc239..a0ceeb0 100644 --- a/ts-tests/tsconfig.json +++ b/ts-tests/tsconfig.json @@ -2,7 +2,8 @@ "compilerOptions": { "noImplicitAny": true, "target": "es5", - "module": "commonjs" + "module": "commonjs", + "noEmit": true }, "files": [ "index" diff --git a/types/full/index.d.ts b/types/full/index.d.ts index cc8f573..521c48a 100644 --- a/types/full/index.d.ts +++ b/types/full/index.d.ts @@ -2,21 +2,21 @@ // Project: http://www.split.io/ // Definitions by: Nico Zelaya -/// +import '@splitsoftware/splitio-commons'; + export = JsSdk; declare module JsSdk { /** - * Full version of the Split.io sdk factory function. + * Full version of the Split.io SDK factory function. * - * Unlike the slim version, it doesn't require a 'fetch' polyfill to support old browsers @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#language-support}. - * and includes localhost mode out-of-the-box @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode}. + * Unlike the default version, it includes a `fetch` polyfill to support old browsers @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#language-support}. * - * The settings parameter should be an object that complies with the SplitIO.IBrowserSettings. + * The settings parameter should be an object that complies with the SplitIO.IClientSideSettings or SplitIO.IClientSideAsyncSettings interfaces. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} */ - export function SplitFactory(settings: SplitIO.IBrowserSettings): SplitIO.ISDK; - export function SplitFactory(settings: SplitIO.IBrowserAsyncSettings): SplitIO.IAsyncSDK; + export function SplitFactory(settings: SplitIO.IClientSideSettings): SplitIO.IBrowserSDK; + export function SplitFactory(settings: SplitIO.IClientSideAsyncSettings): SplitIO.IBrowserAsyncSDK; /** * Persistent storage based on the LocalStorage Web API for browsers. @@ -30,21 +30,7 @@ declare module JsSdk { * * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} */ - export function PluggableStorage(options: SplitIO.PluggableStorageOptions): SplitIO.StorageAsyncFactory; - - /** - * Enable 'Google Analytics to Split' integration, to track Google Analytics hits as Split events. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#google-analytics-to-split} - */ - export function GoogleAnalyticsToSplit(options?: SplitIO.GoogleAnalyticsToSplitOptions): SplitIO.IntegrationFactory; - - /** - * Enable 'Split to Google Analytics' integration, to track Split impressions and events as Google Analytics hits. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#split-to-google-analytics} - */ - export function SplitToGoogleAnalytics(options?: SplitIO.SplitToGoogleAnalyticsOptions): SplitIO.IntegrationFactory; + export function PluggableStorage(options: SplitIO.PluggableStorageOptions): SplitIO.StorageAsyncFactory; /** * Creates a logger instance that enables descriptive log messages with DEBUG log level when passed in the factory settings. diff --git a/types/index.d.ts b/types/index.d.ts index 98928e0..796120d 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -2,21 +2,19 @@ // Project: http://www.split.io/ // Definitions by: Nico Zelaya -/// +import '@splitsoftware/splitio-commons'; + export = JsSdk; declare module JsSdk { /** - * Slim version of the Split.io sdk factory function. - * - * Recommended to use for bundle size reduction in production, since it doesn't include a 'fetch' polyfill and localhost mode out-of-the-box - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode}. + * Split.io SDK factory function. * - * The settings parameter should be an object that complies with the SplitIO.IBrowserSettings. + * The settings parameter should be an object that complies with the SplitIO.IClientSideSettings or SplitIO.IClientSideAsyncSettings interfaces. * For more information read the corresponding article: @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} */ - export function SplitFactory(settings: SplitIO.IBrowserSettings): SplitIO.ISDK; - export function SplitFactory(settings: SplitIO.IBrowserAsyncSettings): SplitIO.IAsyncSDK; + export function SplitFactory(settings: SplitIO.IClientSideSettings): SplitIO.IBrowserSDK; + export function SplitFactory(settings: SplitIO.IClientSideAsyncSettings): SplitIO.IBrowserAsyncSDK; /** * Persistent storage based on the LocalStorage Web API for browsers. @@ -32,20 +30,6 @@ declare module JsSdk { */ export function PluggableStorage(options: SplitIO.PluggableStorageOptions): SplitIO.StorageAsyncFactory; - /** - * Enable 'Google Analytics to Split' integration, to track Google Analytics hits as Split events. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#google-analytics-to-split} - */ - export function GoogleAnalyticsToSplit(options?: SplitIO.GoogleAnalyticsToSplitOptions): SplitIO.IntegrationFactory; - - /** - * Enable 'Split to Google Analytics' integration, to track Split impressions and events as Google Analytics hits. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#split-to-google-analytics} - */ - export function SplitToGoogleAnalytics(options?: SplitIO.SplitToGoogleAnalyticsOptions): SplitIO.IntegrationFactory; - /** * Creates a logger instance that enables descriptive log messages with DEBUG log level when passed in the factory settings. * @@ -73,12 +57,4 @@ declare module JsSdk { * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging} */ export function ErrorLogger(): SplitIO.ILogger; - - /** - * Required to enable localhost mode when importing the SDK from the slim entry point of the library. - * It uses the mocked features map defined in the 'features' config object. - * - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} - */ - export function LocalhostFromObject(): SplitIO.LocalhostFactory; } diff --git a/types/splitio.d.ts b/types/splitio.d.ts deleted file mode 100644 index 56ffff0..0000000 --- a/types/splitio.d.ts +++ /dev/null @@ -1,1704 +0,0 @@ -// Type definitions for JavaScript Browser Split Software SDK -// Project: http://www.split.io/ -// Definitions by: Nico Zelaya - -/// - -export as namespace SplitIO; -export = SplitIO; - -/** - * EventEmitter interface based on a subset of the NodeJS.EventEmitter methods. - */ -interface IEventEmitter { - addListener(event: string, listener: (...args: any[]) => void): this - on(event: string, listener: (...args: any[]) => void): this - once(event: string, listener: (...args: any[]) => void): this - removeListener(event: string, listener: (...args: any[]) => void): this - off(event: string, listener: (...args: any[]) => void): this - removeAllListeners(event?: string): this - emit(event: string, ...args: any[]): boolean -} -/** - * @typedef {Object} EventConsts - * @property {string} SDK_READY The ready event. - * @property {string} SDK_READY_FROM_CACHE The ready event when fired with cached data. - * @property {string} SDK_READY_TIMED_OUT The timeout event. - * @property {string} SDK_UPDATE The update event. - */ -type EventConsts = { - SDK_READY: 'init::ready', - SDK_READY_FROM_CACHE: 'init::cache-ready', - SDK_READY_TIMED_OUT: 'init::timeout', - SDK_UPDATE: 'state::update' -}; -/** - * SDK Modes. - * @typedef {string} SDKMode - */ -type SDKMode = 'standalone' | 'localhost' | 'consumer' | 'consumer_partial'; -/** - * Storage types. - * @typedef {string} StorageType - */ -type StorageType = 'MEMORY' | 'LOCALSTORAGE'; -/** - * Settings interface. This is a representation of the settings the SDK expose, that's why - * most of it's props are readonly. Only features should be rewritten when localhost mode is active. - * @interface ISettings - */ -interface ISettings { - readonly core: { - authorizationKey: string, - key: SplitIO.SplitKey, - labelsEnabled: boolean, - IPAddressesEnabled: boolean - }, - readonly mode: SDKMode, - readonly scheduler: { - featuresRefreshRate: number, - impressionsRefreshRate: number, - impressionsQueueSize: number, - telemetryRefreshRate: number, - segmentsRefreshRate: number, - offlineRefreshRate: number, - eventsPushRate: number, - eventsQueueSize: number, - pushRetryBackoffBase: number - }, - readonly startup: { - readyTimeout: number, - requestTimeoutBeforeReady: number, - retriesOnFailureBeforeReady: number, - eventsFirstPushWindow: number - }, - readonly storage?: SplitIO.StorageSyncFactory | SplitIO.StorageAsyncFactory, - readonly urls: { - events: string, - sdk: string, - auth: string, - streaming: string, - telemetry: string - }, - readonly integrations?: SplitIO.IntegrationFactory[], - readonly debug: boolean | LogLevel | SplitIO.ILogger, - readonly version: string, - /** - * Mocked features map. - */ - features?: SplitIO.MockedFeaturesMap, - readonly streamingEnabled: boolean, - readonly sync: { - splitFilters: SplitIO.SplitFilter[], - impressionsMode: SplitIO.ImpressionsMode, - enabled: boolean, - flagSpecVersion: string, - localhostMode?: SplitIO.LocalhostFactory, - requestOptions?: { - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - }, - readonly userConsent: SplitIO.ConsentStatus -} -/** - * Log levels. - * @typedef {string} LogLevel - */ -type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'NONE'; -/** - * Logger API - * @interface ILoggerAPI - */ -interface ILoggerAPI { - /** - * Enables SDK logging to the console. - * @function enable - * @returns {void} - */ - enable(): void, - /** - * Disables SDK logging. - * @function disable - * @returns {void} - */ - disable(): void, - /** - * Sets a log level for the SDK logs. - * @function setLogLevel - * @returns {void} - */ - setLogLevel(logLevel: LogLevel): void, - /** - * Log level constants. Use this to pass them to setLogLevel function. - */ - LogLevel: { - [level in LogLevel]: LogLevel - } -} -/** - * User consent API - * @interface IUserConsentAPI - */ -interface IUserConsentAPI { - /** - * Sets or updates the user consent status. Possible values are `true` and `false`, which represent user consent `'GRANTED'` and `'DECLINED'` respectively. - * - `true ('GRANTED')`: the user has granted consent for tracking events and impressions. The SDK will send them to Split cloud. - * - `false ('DECLINED')`: the user has declined consent for tracking events and impressions. The SDK will not send them to Split cloud. - * - * NOTE: calling this method updates the user consent at a factory level, affecting all clients of the same factory. - * - * @function setStatus - * @param {boolean} userConsent The user consent status, true for 'GRANTED' and false for 'DECLINED'. - * @returns {boolean} Whether the provided param is a valid value (i.e., a boolean value) or not. - */ - setStatus(userConsent: boolean): boolean; - /** - * Gets the user consent status. - * - * @function getStatus - * @returns {ConsentStatus} The user consent status. - */ - getStatus(): SplitIO.ConsentStatus; - /** - * Consent status constants. Use this to compare with the getStatus function result. - */ - Status: { - [status in SplitIO.ConsentStatus]: SplitIO.ConsentStatus - } -} -/** - * Common settings between Browser and NodeJS settings interface. - * @interface ISharedSettings - */ -interface ISharedSettings { - /** - * Boolean value to indicate whether the logger should be enabled or disabled by default, or a log level string or a Logger object. - * Passing a logger object is required to get descriptive log messages. Otherwise most logs will print with message codes. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#logging} - * - * Examples: - * ```typescript - * config.debug = true - * config.debug = 'WARN' - * config.debug = ErrorLogger() - * ``` - * @property {boolean | LogLevel | ILogger} debug - * @default false - */ - debug?: boolean | LogLevel | SplitIO.ILogger, - /** - * The impression listener, which is optional. Whatever you provide here needs to comply with the SplitIO.IImpressionListener interface, - * which will check for the logImpression method. - * @property {IImpressionListener} impressionListener - * @default undefined - */ - impressionListener?: SplitIO.IImpressionListener, - /** - * Boolean flag to enable the streaming service as default synchronization mechanism. In the event of any issue with streaming, - * the SDK would fallback to the polling mechanism. If false, the SDK would poll for changes as usual without attempting to use streaming. - * @property {boolean} streamingEnabled - * @default true - */ - streamingEnabled?: boolean, - /** - * SDK synchronization settings. - * @property {Object} sync - */ - sync?: { - /** - * List of feature flag filters. These filters are used to fetch a subset of the feature flag definitions in your environment, in order to reduce the delay of the SDK to be ready. - * This configuration is only meaningful when the SDK is working in "standalone" mode. - * - * Example: - * `splitFilter: [ - * { type: 'byName', values: ['my_feature_flag_1', 'my_feature_flag_2'] }, // will fetch feature flags named 'my_feature_flag_1' and 'my_feature_flag_2' - * ]` - * @property {SplitIO.SplitFilter[]} splitFilters - */ - splitFilters?: SplitIO.SplitFilter[] - /** - * Impressions Collection Mode. Option to determine how impressions are going to be sent to Split servers. - * Possible values are 'DEBUG', 'OPTIMIZED', and 'NONE'. - * - DEBUG: will send all the impressions generated (recommended only for debugging purposes). - * - OPTIMIZED: will send unique impressions to Split servers, avoiding a considerable amount of traffic that duplicated impressions could generate. - * - NONE: will send unique keys evaluated per feature to Split servers instead of full blown impressions, avoiding a considerable amount of traffic that impressions could generate. - * - * @property {String} impressionsMode - * @default 'OPTIMIZED' - */ - impressionsMode?: SplitIO.ImpressionsMode, - /** - * Defines the factory function to instantiate the SDK in localhost mode. - * - * NOTE: this is only required if using the slim entry point of the library to init the SDK in localhost mode. - * - * For more information @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} - * - * Example: - * ```typescript - * SplitFactory({ - * ... - * sync: { - * localhostMode: LocalhostFromObject() - * } - * }) - * ``` - * @property {Object} localhostMode - */ - localhostMode?: SplitIO.LocalhostFactory - /** - * Controls the SDK continuous synchronization flags. - * - * When `true` a running SDK will process rollout plan updates performed on the UI (default). - * When false it'll just fetch all data upon init. - * - * @property {boolean} enabled - * @default true - */ - enabled?: boolean - /** - * Custom options object for HTTP(S) requests in the Browser. - * If provided, this object is merged with the options object passed by the SDK for EventSource and Fetch calls. - * This configuration has no effect in "consumer" mode, as no HTTP(S) requests are made by the SDK. - */ - requestOptions?: { - /** - * Custom function called before each request, allowing you to add or update headers in SDK HTTP requests. - * Some headers, such as `SplitSDKVersion`, are required by the SDK and cannot be overridden. - * To pass multiple headers with the same name, combine their values into a single line, separated by commas. Example: `{ 'Authorization': 'value1, value2' }` - * Or provide keys with different case since headers are case-insensitive. Example: `{ 'authorization': 'value1', 'Authorization': 'value2' }` - * - * NOTE: to pass custom headers to the streaming connection in Browser, you should polyfill the `window.EventSource` object with a library that supports headers, - * like https://www.npmjs.com/package/event-source-polyfill, since native EventSource does not support them and will be ignored. - * - * @property getHeaderOverrides - * @default undefined - * - * @param context - The context for the request. - * @param context.headers - The current headers in the request. - * @returns A set of headers to be merged with the current headers. - * - * @example - * const getHeaderOverrides = (context) => { - * return { - * 'Authorization': context.headers['Authorization'] + ', other-value', - * 'custom-header': 'custom-value' - * }; - * }; - */ - getHeaderOverrides?: (context: { headers: Record }) => Record - }, - } -} -/** - * Common API for entities that expose status handlers. - * @interface IStatusInterface - * @extends IEventEmitter - */ -interface IStatusInterface extends IEventEmitter { - /** - * Constant object containing the SDK events for you to use. - * @property {EventConsts} Event - */ - Event: EventConsts, - /** - * Returns a promise that resolves once the SDK has finished loading (SDK_READY event emitted) or rejected if the SDK has timedout (SDK_READY_TIMED_OUT event emitted). - * As it's meant to provide similar flexibility to the event approach, given that the SDK might be eventually ready after a timeout event, calling the `ready` method after the - * SDK had timed out will return a new promise that should eventually resolve if the SDK gets ready. - * - * Caveats: the method was designed to avoid an unhandled Promise rejection if the rejection case is not handled, so that `onRejected` handler is optional when using promises. - * However, when using async/await syntax, the rejection should be explicitly propagated like in the following example: - * ``` - * try { - * await client.ready().catch((e) => { throw e; }); - * // SDK is ready - * } catch(e) { - * // SDK has timedout - * } - * ``` - * - * @function ready - * @returns {Promise} - */ - ready(): Promise -} -/** - * Common definitions between clients for different environments interface. - * @interface IBasicClient - * @extends IStatusInterface - */ -interface IBasicClient extends IStatusInterface { - /** - * Destroys the client instance. - * - * In 'standalone' and 'partial consumer' modes, this method will flush any pending impressions and events. - * In 'standalone' mode, it also stops the synchronization of feature flag definitions with the backend. - * In 'consumer' and 'partial consumer' modes, this method also disconnects the SDK from the Pluggable storage. - * - * @function destroy - * @returns {Promise} A promise that resolves once the client is destroyed. - */ - destroy(): Promise -} -/** - * Common definitions between SDK instances for different environments interface. - * @interface IBasicSDK - */ -interface IBasicSDK { - /** - * Current settings of the SDK instance. - * @property settings - */ - settings: ISettings, - /** - * Logger API. - * @property Logger - */ - Logger: ILoggerAPI, - /** - * User consent API. - * @property UserConsent - */ - UserConsent: IUserConsentAPI -} -/****** Exposed namespace ******/ -/** - * Types and interfaces for @splitsoftware/splitio-browserjs package for usage when integrating javascript browser sdk on typescript apps. - * For the SDK package information - * @see {@link https://www.npmjs.com/package/@splitsoftware/splitio-browserjs} - */ -declare namespace SplitIO { - /** - * Feature flag treatment value, returned by getTreatment. - * @typedef {string} Treatment - */ - type Treatment = string; - /** - * Feature flag treatment promise that resolves to actual treatment value. - * @typedef {Promise} AsyncTreatment - */ - type AsyncTreatment = Promise; - /** - * An object with the treatments for a bulk of feature flags, returned by getTreatments. For example: - * { - * feature1: 'on', - * feature2: 'off - * } - * @typedef {Object.} Treatments - */ - type Treatments = { - [featureName: string]: Treatment - }; - /** - * Feature flags treatments promise that resolves to the actual SplitIO.Treatments object. - * @typedef {Promise} AsyncTreatments - */ - type AsyncTreatments = Promise; - /** - * Feature flag evaluation result with treatment and configuration, returned by getTreatmentWithConfig. - * @typedef {Object} TreatmentWithConfig - * @property {string} treatment The treatment string - * @property {string | null} config The stringified version of the JSON config defined for that treatment, null if there is no config for the resulting treatment. - */ - type TreatmentWithConfig = { - treatment: string, - config: string | null - }; - /** - * Feature flag treatment promise that resolves to actual treatment with config value. - * @typedef {Promise} AsyncTreatmentWithConfig - */ - type AsyncTreatmentWithConfig = Promise; - /** - * An object with the treatments with configs for a bulk of feature flags, returned by getTreatmentsWithConfig. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. For example: - * { - * feature1: { treatment: 'on', config: null } - * feature2: { treatment: 'off', config: '{"bannerText":"Click here."}' } - * } - * @typedef {Object.} Treatments - */ - type TreatmentsWithConfig = { - [featureName: string]: TreatmentWithConfig - }; - /** - * Feature flags treatments promise that resolves to the actual SplitIO.TreatmentsWithConfig object. - * @typedef {Promise} AsyncTreatmentsWithConfig - */ - type AsyncTreatmentsWithConfig = Promise; - /** - * Possible Split SDK events. - * @typedef {string} Event - */ - type Event = 'init::timeout' | 'init::ready' | 'init::cache-ready' | 'state::update'; - /** - * Attributes should be on object with values of type string or number (dates should be sent as millis since epoch). - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#attribute-syntax} - */ - type Attributes = { - [attributeName: string]: AttributeType - }; - /** - * Type of an attribute value - * @typedef {string | number | boolean | Array} AttributeType - */ - type AttributeType = string | number | boolean | Array; - /** - * Properties should be an object with values of type string, number, boolean or null. Size limit of ~31kb. - * @typedef {Object.} Attributes - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#track - */ - type Properties = { - [propertyName: string]: string | number | boolean | null - }; - /** - * The customer identifier represented by a string. - * @typedef {string} SplitKey - */ - type SplitKey = string; - /** - * Path to file with mocked features (for node). - * @typedef {string} MockedFeaturesFilePath - */ - type MockedFeaturesFilePath = string; - /** - * Object with mocked features mapping (for browser). We need to specify the featureName as key, and the mocked treatment as value. - * @typedef {Object} MockedFeaturesMap - */ - type MockedFeaturesMap = { - [featureName: string]: string | TreatmentWithConfig - }; - /** - * Localhost types. - * @typedef {string} LocalhostType - */ - type LocalhostType = 'LocalhostFromObject' - /** - * Object with information about an impression. It contains the generated impression DTO as well as - * complementary information around where and how it was generated in that way. - * @typedef {Object} ImpressionData - */ - type ImpressionData = { - impression: { - feature: string, - keyName: string, - treatment: string, - time: number, - bucketingKey?: string, - label: string, - changeNumber: number, - pt?: number, - }, - attributes?: SplitIO.Attributes, - ip: string, - hostname: string, - sdkLanguageVersion: string - }; - /** - * Data corresponding to one feature flag view. - * @typedef {Object} SplitView - */ - type SplitView = { - /** - * The name of the feature flag. - * @property {string} name - */ - name: string, - /** - * The traffic type of the feature flag. - * @property {string} trafficType - */ - trafficType: string, - /** - * Whether the feature flag is killed or not. - * @property {boolean} killed - */ - killed: boolean, - /** - * The list of treatments available for the feature flag. - * @property {Array} treatments - */ - treatments: Array, - /** - * Current change number of the feature flag. - * @property {number} changeNumber - */ - changeNumber: number, - /** - * Map of configurations per treatment. - * Each existing configuration is a stringified version of the JSON you defined on the Split user interface. - * @property {Object.} configs - */ - configs: { - [treatmentName: string]: string - }, - /** - * List of sets of the feature flag. - * @property {string[]} sets - */ - sets: string[], - /** - * The default treatment of the feature flag. - * @property {string} defaultTreatment - */ - defaultTreatment: string, - }; - /** - * A promise that resolves to a feature flag view. - * @typedef {Promise} SplitView - */ - type SplitViewAsync = Promise; - /** - * An array containing the SplitIO.SplitView elements. - */ - type SplitViews = Array; - /** - * A promise that resolves to an SplitIO.SplitViews array. - * @typedef {Promise} SplitViewsAsync - */ - type SplitViewsAsync = Promise; - /** - * An array of feature flag names. - * @typedef {Array} SplitNames - */ - type SplitNames = Array; - /** - * A promise that resolves to an array of feature flag names. - * @typedef {Promise} SplitNamesAsync - */ - type SplitNamesAsync = Promise; - /** - * Storage for synchronous (standalone) SDK. - * Its interface details are not part of the public API. - */ - type StorageSync = {}; - /** - * Storage builder for synchronous (standalone) SDK. - * By returning undefined, the SDK will use the default IN MEMORY storage. - * Input parameter details are not part of the public API. - */ - type StorageSyncFactory = { - readonly type: StorageType - (params: {}): (StorageSync | undefined) - } - /** - * Configuration params for `InLocalStorage` - */ - type InLocalStorageOptions = { - /** - * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage type. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string - } - /** - * Storage for asynchronous (consumer) SDK. - * Its interface details are not part of the public API. - */ - type StorageAsync = {} - /** - * Storage builder for asynchronous (consumer) SDK. - * Input parameter details are not part of the public API. - */ - type StorageAsyncFactory = { - readonly type: 'PLUGGABLE' - (params: {}): StorageAsync - } - /** - * Configuration params for `PluggableStorage` - */ - type PluggableStorageOptions = { - /** - * Optional prefix to prevent any kind of data collision when having multiple factories using the same storage wrapper. - * @property {string} prefix - * @default 'SPLITIO' - */ - prefix?: string, - /** - * Storage wrapper. - * @property {Object} wrapper - */ - wrapper: Object - } - /** - * Localhost mode factory. - * Its interface details are not part of the public API. - */ - type LocalhostFactory = { - readonly type: LocalhostType - (params: {}): {} - } - /** - * Impression listener interface. This is the interface that needs to be implemented - * by the element you provide to the SDK as impression listener. - * @interface IImpressionListener - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#listener} - */ - interface IImpressionListener { - logImpression(data: SplitIO.ImpressionData): void - } - /** - * SDK integration instance. - * Its interface details are not part of the public API. - */ - type Integration = {}; - /** - * SDK integration factory. - * By returning an integration, the SDK will queue events and impressions into it. - * Input parameter details are not part of the public API. - */ - type IntegrationFactory = { - readonly type: string - (params: {}): (Integration | void) - } - /** - * A pair of user key and it's trafficType, required for tracking valid Split events. - * @typedef {Object} Identity - * @property {string} key The user key. - * @property {string} trafficType The key traffic type. - */ - type Identity = { - key: string; - trafficType: string; - }; - /** - * Object with information about a Split event. - * @typedef {Object} EventData - */ - type EventData = { - eventTypeId: string; - value?: number; - properties?: Properties; - trafficTypeName?: string; - key?: string; - timestamp?: number; - }; - /** - * Configuration params for 'Google Analytics to Split' integration plugin, to track Google Analytics hits as Split events. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#google-analytics-to-split} - */ - interface GoogleAnalyticsToSplitOptions { - /** - * Optional flag to filter GA hits from being tracked as Split events. - * @property {boolean} hits - * @default true - */ - hits?: boolean, - /** - * Optional predicate used to define a custom filter for tracking GA hits as Split events. - * For example, the following filter allows to track only 'event' hits: - * `(model) => model.get('hitType') === 'event'` - * By default, all hits are tracked as Split events. - */ - filter?: (model: UniversalAnalytics.Model) => boolean, - /** - * Optional function useful when you need to modify the Split event before tracking it. - * This function is invoked with two arguments: - * 1. the GA model object representing the hit. - * 2. the default format of the mapped Split event instance. - * The return value must be a Split event, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom property to events: - * `(model, defaultMapping) => { - * defaultMapping.properties.someProperty = SOME_VALUE; - * return defaultMapping; - * }` - */ - mapper?: (model: UniversalAnalytics.Model, defaultMapping: SplitIO.EventData) => SplitIO.EventData, - /** - * Optional prefix for EventTypeId, to prevent any kind of data collision between events. - * @property {string} prefix - * @default 'ga' - */ - prefix?: string, - /** - * List of Split identities (key & traffic type pairs) used to track events. - * If not provided, events are sent using the key and traffic type provided at SDK config. - */ - identities: Identity[], - /** - * Optional flag to log an error if the `auto-require` script is not detected. - * The auto-require script automatically requires the `splitTracker` plugin for created trackers, - * and should be placed right after your Google Analytics, Google Tag Manager or gtag.js script tag. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#set-up-with-gtm-and-gtag.js} - * - * @property {boolean} autoRequire - * @default false - */ - autoRequire?: boolean, - } - /** - * Object representing the data sent by Split (events and impressions). - * @typedef {Object} IntegrationData - * @property {string} type The type of Split data, either 'IMPRESSION' or 'EVENT'. - * @property {ImpressionData | EventData} payload The data instance itself. - */ - type IntegrationData = { type: 'IMPRESSION', payload: SplitIO.ImpressionData } | { type: 'EVENT', payload: SplitIO.EventData }; - /** - * Configuration params for 'Split to Google Analytics' integration plugin, to track Split impressions and events as Google Analytics hits. - * - * @see {@link https://help.split.io/hc/en-us/articles/360040838752#split-to-google-analytics} - */ - interface SplitToGoogleAnalyticsOptions { - /** - * Optional flag to filter Split impressions from being tracked as GA hits. - * @property {boolean} impressions - * @default true - */ - impressions?: boolean, - /** - * Optional flag to filter Split events from being tracked as GA hits. - * @property {boolean} events - * @default true - */ - events?: boolean, - /** - * Optional predicate used to define a custom filter for tracking Split data (events and impressions) as GA hits. - * For example, the following filter allows to track only impressions, equivalent to setting events to false: - * `(data) => data.type === 'IMPRESSION'` - */ - filter?: (data: SplitIO.IntegrationData) => boolean, - /** - * Optional function useful when you need to modify the GA hit before sending it. - * This function is invoked with two arguments: - * 1. the input data (Split event or impression). - * 2. the default format of the mapped FieldsObject instance (GA hit). - * The return value must be a FieldsObject, that can be the second argument or a new object. - * - * For example, the following mapper adds a custom dimension to hits: - * `(data, defaultMapping) => { - * defaultMapping.dimension1 = SOME_VALUE; - * return defaultMapping; - * }` - * - * Default FieldsObject instance for data.type === 'IMPRESSION': - * `{ - * hitType: 'event', - * eventCategory: 'split-impression', - * eventAction: 'Evaluate ' + data.payload.impression.feature, - * eventLabel: 'Treatment: ' + data.payload.impression.treatment + '. Targeting rule: ' + data.payload.impression.label + '.', - * nonInteraction: true, - * }` - * Default FieldsObject instance for data.type === 'EVENT': - * `{ - * hitType: 'event', - * eventCategory: 'split-event', - * eventAction: data.payload.eventTypeId, - * eventValue: data.payload.value, - * nonInteraction: true, - * }` - */ - mapper?: (data: SplitIO.IntegrationData, defaultMapping: UniversalAnalytics.FieldsObject) => UniversalAnalytics.FieldsObject, - /** - * List of tracker names to send the hit. An empty string represents the default tracker. - * If not provided, hits are only sent to default tracker. - */ - trackerNames?: string[], - } - /** - * Available URL settings for the SDKs. - */ - type UrlSettings = { - /** - * String property to override the base URL where the SDK will get rollout plan related data, like feature flags and segments definitions. - * @property {string} sdk - * @default 'https://sdk.split.io/api' - */ - sdk?: string, - /** - * String property to override the base URL where the SDK will post event-related information like impressions. - * @property {string} events - * @default 'https://events.split.io/api' - */ - events?: string, - /** - * String property to override the base URL where the SDK will get authorization tokens to be used with functionality that requires it, like streaming. - * @property {string} auth - * @default 'https://auth.split.io/api' - */ - auth?: string, - /** - * String property to override the base URL where the SDK will connect to receive streaming updates. - * @property {string} streaming - * @default 'https://streaming.split.io' - */ - streaming?: string, - /** - * String property to override the base URL where the SDK will post telemetry data. - * @property {string} telemetry - * @default 'https://telemetry.split.io/api' - */ - telemetry?: string - }; - - /** - * SplitFilter type. - * - * @typedef {string} SplitFilterType - */ - type SplitFilterType = 'bySet' | 'byName' | 'byPrefix'; - /** - * Defines a feature flag filter, described by a type and list of values. - */ - interface SplitFilter { - /** - * Type of the filter. - * - * @property {SplitFilterType} type - */ - type: SplitFilterType, - /** - * List of values: feature flag names for 'byName' filter type, and feature flag name prefixes for 'byPrefix' type. - * - * @property {string[]} values - */ - values: string[], - } - /** - * ImpressionsMode type - * @typedef {string} ImpressionsMode - */ - type ImpressionsMode = 'OPTIMIZED' | 'DEBUG' | 'NONE'; - /** - * User consent status. - * @typedef {string} ConsentStatus - */ - type ConsentStatus = 'GRANTED' | 'DECLINED' | 'UNKNOWN'; - /** - * Logger - * Its interface details are not part of the public API. It shouldn't be used directly. - * @interface ILogger - */ - interface ILogger { - setLogLevel(logLevel: LogLevel): void - } - /** - * Common settings interface for SDK instances created on the browser. - * @interface IBrowserBasicSettings - * @extends ISharedSettings - */ - interface IBrowserBasicSettings extends ISharedSettings { - /** - * SDK Core settings for the browser. - * @property {Object} core - */ - core: { - /** - * Your SDK key. More information: @see {@link https://help.split.io/hc/en-us/articles/360019916211-API-keys} - * @property {string} authorizationKey - */ - authorizationKey: string, - /** - * Customer identifier. Whatever this means to you. @see {@link https://help.split.io/hc/en-us/articles/360019916311-Traffic-type} - * @property {SplitKey} key - */ - key: SplitKey, - /** - * Disable labels from being sent to Split backend. Labels may contain sensitive information. - * @property {boolean} labelsEnabled - * @default true - */ - labelsEnabled?: boolean - }, - /** - * List of URLs that the SDK will use as base for it's synchronization functionalities, applicable only when running as standalone. - * Do not change these settings unless you're working an advanced use case, like connecting to the Split proxy. - * @property {Object} urls - */ - urls?: UrlSettings, - /** - * Defines an optional list of factory functions used to instantiate SDK integrations. - * - * Example: - * ```typescript - * SplitFactory({ - * ... - * integrations: [SplitToGoogleAnalytics(), GoogleAnalyticsToSplit()] - * }) - * ``` - * @property {Object} integrations - */ - integrations?: IntegrationFactory[], - /** - * User consent status. Possible values are `'GRANTED'`, which is the default, `'DECLINED'` or `'UNKNOWN'`. - * - `'GRANTED'`: the user grants consent for tracking events and impressions. The SDK sends them to Split cloud. - * - `'DECLINED'`: the user declines consent for tracking events and impressions. The SDK does not send them to Split cloud. - * - `'UNKNOWN'`: the user neither grants nor declines consent for tracking events and impressions. The SDK tracks them in its internal storage, and eventually either sends - * them or not if the consent status is updated to 'GRANTED' or 'DECLINED' respectively. The status can be updated at any time with the `UserConsent.setStatus` factory method. - * - * @typedef {string} userConsent - * @default 'GRANTED' - */ - userConsent?: ConsentStatus - } - /** - * Settings interface for SDK instances created on the browser. - * @interface IBrowserSettings - * @extends ISharedSettings - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} - */ - interface IBrowserSettings extends IBrowserBasicSettings { - /** - * The SDK mode. When using the default in memory storage or `InLocalStorage` as storage, the only possible value is "standalone", which is the default. - * For "localhost" mode, use "localhost" as authorizationKey. - * - * @property {'standalone'} mode - * @default 'standalone' - */ - mode?: 'standalone', - /** - * Mocked features map. For testing purposses only. For using this you should specify "localhost" as authorizationKey on core settings. - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} - */ - features?: MockedFeaturesMap, - /** - * Defines the factory function to instantiate the storage. If not provided, the default IN MEMORY storage is used. - * - * Example: - * ```typescript - * SplitFactory({ - * ... - * storage: InLocalStorage() - * }) - * ``` - * @property {Object} storage - */ - storage?: StorageSyncFactory, - /** - * SDK Startup settings for the Browser. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 1.5 - */ - readyTimeout?: number, - /** - * Time to wait for a request before the SDK is ready. If this time expires, JS Sdk will retry 'retriesOnFailureBeforeReady' times before notifying its failure to be 'ready'. - * @property {number} requestTimeoutBeforeReady - * @default 1.5 - */ - requestTimeoutBeforeReady?: number, - /** - * How many quick retries we will do while starting up the SDK. - * @property {number} retriesOnFailureBeforeReady - * @default 1 - */ - retriesOnFailureBeforeReady?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * @property {number} eventsFirstPushWindow - * @default 10 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK polls Split servers for changes to feature flag definitions. This parameter controls this polling period in seconds. - * @property {number} featuresRefreshRate - * @default 60 - */ - featuresRefreshRate?: number, - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * @property {number} impressionsRefreshRate - * @default 60 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK polls Split servers for changes to segment definitions. This parameter controls this polling period in seconds. - * @property {number} segmentsRefreshRate - * @default 60 - */ - segmentsRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - /** - * For mocking/testing only. The SDK will refresh the features mocked data when mode is set to "localhost" by defining the key. - * For more information @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#localhost-mode} - * @property {number} offlineRefreshRate - * @default 15 - */ - offlineRefreshRate?: number, - /** - * When using streaming mode, seconds to wait before re attempting to connect for push notifications. - * Next attempts follow intervals in power of two: base seconds, base x 2 seconds, base x 4 seconds, ... - * @property {number} pushRetryBackoffBase - * @default 1 - */ - pushRetryBackoffBase?: number, - } - } - /** - * Settings interface with async storage for SDK instances created on the browser. - * If your storage is synchronous (by defaut we use memory, which is sync) use SplitIO.IBrowserSettings instead. - * @interface IBrowserAsyncSettings - * @extends IBrowserBasicSettings - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#configuration} - */ - interface IBrowserAsyncSettings extends IBrowserBasicSettings { - /** - * The SDK mode. When using `PluggableStorage` as storage, the possible values are "consumer" and "consumer_partial". - * - * @see {@link https://help.split.io/hc/en-us/articles/360058730852-Browser-SDK#sharing-state-with-a-pluggable-storage} - * - * @property {'consumer' | 'consumer_partial'} mode - */ - mode: 'consumer' | 'consumer_partial', - /** - * Defines the factory function to instantiate the storage. - * - * Example: - * ```typescript - * SplitFactory({ - * ... - * storage: PluggableStorage({ wrapper: SomeWrapper }) - * }) - * ``` - * @property {Object} storage - */ - storage: StorageAsyncFactory, - /** - * SDK Startup settings for the Browser. - * @property {Object} startup - */ - startup?: { - /** - * Maximum amount of time used before notify a timeout. - * @property {number} readyTimeout - * @default 1.5 - */ - readyTimeout?: number, - /** - * For SDK posts the queued events data in bulks with a given rate, but the first push window is defined separately, - * to better control on browsers. This number defines that window before the first events push. - * - * NOTE: this param is ignored in 'consumer' mode. - * @property {number} eventsFirstPushWindow - * @default 10 - */ - eventsFirstPushWindow?: number, - }, - /** - * SDK scheduler settings. - * @property {Object} scheduler - */ - scheduler?: { - /** - * The SDK sends information on who got what treatment at what time back to Split servers to power analytics. This parameter controls how often this data is sent to Split servers. The parameter should be in seconds. - * - * NOTE: this param is ignored in 'consumer' mode. - * @property {number} impressionsRefreshRate - * @default 60 - */ - impressionsRefreshRate?: number, - /** - * The maximum number of impression items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * @property {number} impressionsQueueSize - * @default 30000 - */ - impressionsQueueSize?: number, - /** - * The SDK sends diagnostic metrics to Split servers. This parameters controls this metric flush period in seconds. - * @property {number} telemetryRefreshRate - * @default 3600 - */ - telemetryRefreshRate?: number, - /** - * The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds. - * - * NOTE: this param is ignored in 'consumer' mode. - * @property {number} eventsPushRate - * @default 60 - */ - eventsPushRate?: number, - /** - * The maximum number of event items we want to queue. If we queue more values, it will trigger a flush and reset the timer. - * If you use a 0 here, the queue will have no maximum size. - * - * NOTE: this param is ignored in 'consumer' mode. - * @property {number} eventsQueueSize - * @default 500 - */ - eventsQueueSize?: number, - } - } - /** - * This represents the interface for the SDK instance with synchronous storage and client-side API, - * i.e., where client instances have a bound user key. - * @interface ISDK - * @extends IBasicSDK - */ - interface ISDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK, associated with the key provided on settings. - * @function client - * @returns {IClient} The client instance. - */ - client(): IClient, - /** - * Returns a shared client of the SDK, associated with the given key. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {IClient} The client instance. - */ - client(key: SplitKey): IClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IManager - } - /** - * This represents the interface for the SDK instance with asynchronous storage and client-side API, - * i.e., where client instances have a bound user key. - * @interface IAsyncSDK - * @extends IBasicSDK - */ - interface IAsyncSDK extends IBasicSDK { - /** - * Returns the default client instance of the SDK, associated with the key provided on settings. - * @function client - * @returns {IAsyncClient} The asynchronous client instance. - */ - client(): IAsyncClient, - /** - * Returns a shared client of the SDK, associated with the given key. - * @function client - * @param {SplitKey} key The key for the new client instance. - * @returns {IAsyncClient} The asynchronous client instance. - */ - client(key: SplitKey): IAsyncClient, - /** - * Returns a manager instance of the SDK to explore available information. - * @function manager - * @returns {IManager} The manager instance. - */ - manager(): IAsyncManager - } - /** - * This represents the interface for the Client instance with synchronous storage for server-side SDK, where we don't have only one key. - * @interface IClient - * @extends IBasicClient - */ - interface IClientSS extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The TreatmentWithConfig, the object containing the treatment string and the - * configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatment objects - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatment objects - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - } - /** - * This represents the interface for the Client instance with asynchronous storage for server-side SDK, where we don't have only one key. - * @interface IAsyncClient - * @extends IBasicClient - */ - interface IAsyncClientSS extends IBasicClient { - /** - * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatment - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. - */ - getTreatment(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatment, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. - * For usage on NodeJS as we don't have only one key. - * NOTE: Treatment will be a promise only in async storages, like REDIS. - * @function getTreatmentWithConfig - * @param {string} key - The string key representing the consumer. - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. - */ - getTreatmentWithConfig(key: SplitKey, featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatments - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatments(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfig - * @param {string} key - The string key representing the consumer. - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. - */ - getTreatmentsWithConfig(key: SplitKey, featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatmentsByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} key - The string key representing the consumer. - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. - */ - getTreatmentsWithConfigByFlagSet(key: SplitKey, flagSet: string, attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatmentsByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * For usage on NodeJS as we don't have only one key. - * @function getTreatmentsWithConfigByFlagSets - * @param {string} key - The string key representing the consumer. - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the map of TreatmentsWithConfig objects. - */ - getTreatmentsWithConfigByFlagSets(key: SplitKey, flagSets: string[], attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). - * @function track - * @param {SplitKey} key - The key that identifies the entity related to this event. - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {Promise} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. - */ - track(key: SplitIO.SplitKey, trafficType: string, eventType: string, value?: number, properties?: Properties): Promise - } - /** - * This represents the interface for the Client instance with synchronous storage for client-side SDK, where each client has associated a key. - * @interface IClient - * @extends IBasicClient - */ - interface IClient extends IBasicClient { - /** - * Returns a Treatment value, which is the treatment string for the given feature. - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatment} The treatment string. - */ - getTreatment(featureFlagName: string, attributes?: Attributes): Treatment, - /** - * Returns a TreatmentWithConfig value, which is an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentWithConfig} The map containing the treatment and the configuration stringified JSON (or null if there was no config for that treatment). - */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): TreatmentWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the given features. - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The treatments object map. - */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects - */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): TreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {Treatments} The map with all the Treatments objects - */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): Treatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {TreatmentsWithConfig} The map with all the TreatmentWithConfig objects - */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): TreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface. - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} Whether the event was added to the queue successfully or not. - */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): boolean, - /** - * Add an attribute to client's in memory attributes storage. - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, - /** - * Returns the attribute with the given name. - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType, - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean, - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean, - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes, - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean - } - /** - * This represents the interface for the Client instance with asynchronous storage for client-side SDK, where each client has associated a key. - * @interface IAsyncClient - * @extends IBasicClient - */ - interface IAsyncClient extends IBasicClient { - /** - * Returns a Treatment value, which will be (or eventually be) the treatment string for the given feature. - * @function getTreatment - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatment} Treatment promise that resolves to the treatment string. - */ - getTreatment(featureFlagName: string, attributes?: Attributes): AsyncTreatment, - /** - * Returns a TreatmentWithConfig value, which will be (or eventually be) an object with both treatment and config string for the given feature. - * @function getTreatmentWithConfig - * @param {string} featureFlagName - The string that represents the feature flag we want to get the treatment. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentWithConfig} TreatmentWithConfig promise that resolves to the TreatmentWithConfig object. - */ - getTreatmentWithConfig(featureFlagName: string, attributes?: Attributes): AsyncTreatmentWithConfig, - /** - * Returns a Treatments value, which will be (or eventually be) an object map with the treatments for the given features. - * @function getTreatments - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatments(featureFlagNames: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which will be (or eventually be) an object map with the TreatmentWithConfig (an object with both treatment and config string) for the given features. - * @function getTreatmentsWithConfig - * @param {Array} featureFlagNames - An array of the feature flag names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. - */ - getTreatmentsWithConfig(featureFlagNames: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Treatments value, which is an object map with the treatments for the feature flags related to the given flag set. - * @function getTreatmentsByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatmentsByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag set. - * @function getTreatmentsWithConfigByFlagSet - * @param {string} flagSet - The flag set name we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. - */ - getTreatmentsWithConfigByFlagSet(flagSet: string, attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Returns a Returns a Treatments value, which is an object with both treatment and config string for to the feature flags related to the given flag sets. - * @function getTreatmentsByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatments} Treatments promise that resolves to the treatments object map. - */ - getTreatmentsByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatments, - /** - * Returns a TreatmentsWithConfig value, which is an object map with the TreatmentWithConfig (an object with both treatment and config string) for the feature flags related to the given flag sets. - * @function getTreatmentsWithConfigByFlagSets - * @param {Array} flagSets - An array of the flag set names we want to get the treatments. - * @param {Attributes=} attributes - An object of type Attributes defining the attributes for the given key. - * @returns {AsyncTreatmentsWithConfig} TreatmentsWithConfig promise that resolves to the TreatmentsWithConfig object. - */ - getTreatmentsWithConfigByFlagSets(flagSets: string[], attributes?: Attributes): AsyncTreatmentsWithConfig, - /** - * Tracks an event to be fed to the results product on Split user interface, and returns a promise to signal when the event was successfully queued (or not). - * @function track - * @param {string} trafficType - The traffic type of the entity related to this event. - * @param {string} eventType - The event type corresponding to this event. - * @param {number=} value - The value of this event. - * @param {Properties=} properties - The properties of this event. Values can be string, number, boolean or null. - * @returns {boolean} A promise that resolves to a boolean indicating if the event was added to the queue successfully or not. - */ - track(trafficType: string, eventType: string, value?: number, properties?: Properties): Promise, - /** - * Add an attribute to client's in memory attributes storage. - * - * @param {string} attributeName Attribute name - * @param {AttributeType} attributeValue Attribute value - * @returns {boolean} true if the attribute was stored and false otherwise - */ - setAttribute(attributeName: string, attributeValue: AttributeType): boolean, - /** - * Returns the attribute with the given name. - * - * @param {string} attributeName Attribute name - * @returns {AttributeType} Attribute with the given name - */ - getAttribute(attributeName: string): AttributeType, - /** - * Removes from client's in memory attributes storage the attribute with the given name. - * - * @param {string} attributeName - * @returns {boolean} true if attribute was removed and false otherwise - */ - removeAttribute(attributeName: string): boolean, - /** - * Add to client's in memory attributes storage the attributes in 'attributes'. - * - * @param {Attributes} attributes Object with attributes to store - * @returns true if attributes were stored an false otherwise - */ - setAttributes(attributes: Attributes): boolean, - /** - * Return all the attributes stored in client's in memory attributes storage. - * - * @returns {Attributes} returns all the stored attributes - */ - getAttributes(): Attributes, - /** - * Remove all the stored attributes in the client's in memory attribute storage. - * - * @returns {boolean} true if all attribute were removed and false otherwise - */ - clearAttributes(): boolean - } - /** - * Representation of a manager instance with synchronous storage of the SDK. - * @interface IManager - * @extends IStatusInterface - */ - interface IManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNames} The list of feature flag names. - */ - names(): SplitNames, - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViews} The list of SplitIO.SplitView. - */ - splits(): SplitViews, - /** - * Get the data of a split in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitView} The SplitIO.SplitView of the given split. - */ - split(featureFlagName: string): SplitView, - } - /** - * Representation of a manager instance with asynchronous storage of the SDK. - * @interface IAsyncManager - * @extends IStatusInterface - */ - interface IAsyncManager extends IStatusInterface { - /** - * Get the array of feature flag names. - * @function names - * @returns {SplitNamesAsync} A promise that resolves to the list of feature flag names. - */ - names(): SplitNamesAsync, - /** - * Get the array of feature flags data in SplitView format. - * @function splits - * @returns {SplitViewsAsync} A promise that resolves to the SplitIO.SplitView list. - */ - splits(): SplitViewsAsync, - /** - * Get the data of a split in SplitView format. - * @function split - * @param {string} featureFlagName The name of the feature flag we want to get info of. - * @returns {SplitViewAsync} A promise that resolves to the SplitIO.SplitView value. - */ - split(featureFlagName: string): SplitViewAsync, - } -}