Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Large Segments tests: readiness and streaming mode #813

Merged
merged 4 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 61 additions & 9 deletions src/__tests__/browserSuites/push-synchronization.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ import unboundedMessage from '../mocks/message.V2.UNBOUNDED.1457552650000.json';
import boundedZlibMessage from '../mocks/message.V2.BOUNDED.ZLIB.1457552651000.json';
import keylistGzipMessage from '../mocks/message.V2.KEYLIST.GZIP.1457552652000.json';
import segmentRemovalMessage from '../mocks/message.V2.SEGMENT_REMOVAL.1457552653000.json';
import unboundedMyLargeSegmentsMessage from '../mocks/message.MY_LARGE_SEGMENTS_UPDATE.UNBOUNDED.1457552650000.json';
import myLargeSegmentRemovalMessage from '../mocks/message.MY_LARGE_SEGMENTS_UPDATE.SEGMENT_REMOVAL.1457552653000.json';

import authPushEnabledNicolas from '../mocks/[email protected]';
import authPushEnabledNicolasAndMarcio from '../mocks/[email protected]@split.io.json';
Expand Down Expand Up @@ -49,6 +51,9 @@ const config = {
},
urls: baseUrls,
streamingEnabled: true,
sync: {
largeSegmentsEnabled: true
}
};
const settings = settingsFactory(config);

Expand All @@ -68,17 +73,19 @@ const MILLIS_KEYLIST_FALLBACK = 1300;
const MILLIS_BOUNDED = 1400;
const MILLIS_KEYLIST = 1500;
const MILLIS_SEGMENT_REMOVAL = 1600;
const MILLIS_UNBOUNDED_FETCH_LS = 1700;
const MILLIS_SEGMENT_REMOVAL_LS = 2000;

/**
* Sequence of calls:
* 0.0 secs: initial SyncAll (/splitChanges, /mySegments/*), auth, SSE connection
* 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*)
* 0.1 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*, /myLargeSegments/*)
* 0.2 secs: SPLIT_UPDATE event -> /splitChanges
* 0.3 secs: SPLIT_UPDATE event with old changeNumber
* 0.4 secs: MY_SEGMENTS_UPDATE event -> /mySegments/[email protected]
* 0.5 secs: SPLIT_KILL event -> /splitChanges
* 0.6 secs: creates a new client -> new auth and SSE connection
* 0.7 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*)
* 0.7 secs: SSE connection opened -> syncAll (/splitChanges, /mySegments/*, /myLargeSegments/*)
* 0.8 secs: MY_SEGMENTS_UPDATE event for new client (with payload).
* 0.9 secs: MY_SEGMENTS_UPDATE event for new client (with empty payload).
* 1.0 secs: creates more clients
Expand All @@ -88,9 +95,12 @@ const MILLIS_SEGMENT_REMOVAL = 1600;
* 1.4 secs: MY_SEGMENTS_UPDATE_V2 BoundedFetchRequest event.
* 1.5 secs: MY_SEGMENTS_UPDATE_V2 KeyList event.
* 1.6 secs: MY_SEGMENTS_UPDATE_V2 SegmentRemoval event.
* 1.7 secs: MY_LARGE_SEGMENTS_UPDATE UnboundedFetchRequest event, with 241 ms delay for '[email protected]' (hash('[email protected]') % 300)
* 1.941 secs: /myLargeSegments/* fetch due to unbounded MY_LARGE_SEGMENTS_UPDATE event -> SPLIT_UPDATE event
* 2.0 secs: MY_LARGE_SEGMENTS_UPDATE SegmentRemoval event -> SPLIT_UPDATE event
*/
export function testSynchronization(fetchMock, assert) {
assert.plan(38);
assert.plan(44);
fetchMock.reset();

let start, splitio, client, otherClient, keylistAddClient, keylistRemoveClient, bitmapTrueClient, sharedClients = [];
Expand Down Expand Up @@ -236,6 +246,31 @@ export function testSynchronization(fetchMock, assert) {
assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'on', 'off', 'on'], 'evaluation before segment removal');
bitmapTrueClient.once(bitmapTrueClient.Event.SDK_UPDATE, () => {
assert.deepEqual(sharedClients.map(c => c.getTreatment('splitters')), ['off', 'off', 'off', 'off'], 'evaluation after segment removal');
});

eventSourceInstance.emitMessage(segmentRemovalMessage);
}, MILLIS_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS);

setTimeout(() => {
assert.equal(client.getTreatment('in_large_segment'), 'no', 'evaluation before myLargeSegment fetch');

const timestampUnboundEvent = Date.now();
const EXPECTED_DELAY = 241;

client.once(client.Event.SDK_UPDATE, () => {
assert.true(nearlyEqual(Date.now() - timestampUnboundEvent, EXPECTED_DELAY), 'SDK_UPDATE after fetching myLargeSegments with a delay');
assert.equal(client.getTreatment('in_large_segment'), 'yes', 'evaluation after myLargeSegment fetch');
});

eventSourceInstance.emitMessage(unboundedMyLargeSegmentsMessage);
}, MILLIS_UNBOUNDED_FETCH_LS - MILLIS_MORE_CLIENTS);

setTimeout(() => {
assert.equal(client.getTreatment('in_large_segment'), 'yes', 'evaluation before large segment removal');
assert.deepEqual(sharedClients.map(c => c.getTreatment('in_large_segment')), ['no', 'no', 'no', 'no'], 'evaluation before segment removal');

client.once(client.Event.SDK_UPDATE, () => {
assert.equal(client.getTreatment('in_large_segment'), 'no', 'evaluation after large segment removal');

// destroy shared clients and then main client
Promise.all(sharedClients.map(c => c.destroy()))
Expand All @@ -252,8 +287,8 @@ export function testSynchronization(fetchMock, assert) {
});
});

eventSourceInstance.emitMessage(segmentRemovalMessage);
}, MILLIS_SEGMENT_REMOVAL - MILLIS_MORE_CLIENTS);
eventSourceInstance.emitMessage(myLargeSegmentRemovalMessage);
}, MILLIS_SEGMENT_REMOVAL_LS - MILLIS_MORE_CLIENTS);
});
}, MILLIS_MORE_CLIENTS - MILLIS_NEW_CLIENT);

Expand Down Expand Up @@ -283,7 +318,7 @@ export function testSynchronization(fetchMock, assert) {
authParams += `&users=${encodeURIComponent(keylistAddKey)}&users=${encodeURIComponent(keylistRemoveKey)}&users=${encodeURIComponent(bitmapTrueKey)}`;
fetchMock.getOnce(url(settings, `/v2/auth?s=1.1&${authParams}`), { status: 200, body: authPushEnabledNicolasAndMarcio });

// initial split and mySegments sync
// initial sync
fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=-1'), function (url, opts) {
const lapse = Date.now() - start;
assert.true(nearlyEqual(lapse, 0), 'initial sync');
Expand All @@ -294,6 +329,7 @@ export function testSynchronization(fetchMock, assert) {
if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header');
return { status: 200, body: mySegmentsNicolasMock1 };
});
fetchMock.getOnce(url(settings, '/myLargeSegments/nicolas%40split.io'), { status: 200, body: { myLargeSegments: [] } });

// split and segment sync after SSE opened
fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function (url, opts) {
Expand All @@ -306,6 +342,7 @@ export function testSynchronization(fetchMock, assert) {
if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header');
return { status: 200, body: mySegmentsNicolasMock1 };
});
fetchMock.getOnce(url(settings, '/myLargeSegments/nicolas%40split.io'), { status: 200, body: { myLargeSegments: [] } });

// fetch due to SPLIT_UPDATE event
fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552620999'), function (url, opts) {
Expand All @@ -326,13 +363,15 @@ export function testSynchronization(fetchMock, assert) {
return { status: 200, body: splitChangesMock4 };
});

// initial fetch of mySegments for new client
// initial fetch of mySegments and myLargeSegments for new client
fetchMock.getOnce(url(settings, '/mySegments/marcio%40split.io'), function (url, opts) {
if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header');
return { status: 200, body: mySegmentsMarcio };
});
fetchMock.getOnce(url(settings, '/myLargeSegments/marcio%40split.io'), { status: 200, body: { myLargeSegments: [] } });


// split and mySegment sync after second SSE opened
// sync after second SSE opened
fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552650000'), function (url, opts) {
const lapse = Date.now() - start;
assert.true(nearlyEqual(lapse, MILLIS_SECOND_SSE_OPEN), 'sync after second SSE connection is opened');
Expand All @@ -347,6 +386,9 @@ export function testSynchronization(fetchMock, assert) {
if (hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header');
return { status: 200, body: mySegmentsMarcio };
});
fetchMock.get({ url: url(settings, '/myLargeSegments/nicolas%40split.io'), repeat: 2 }, { status: 200, body: { myLargeSegments: [] } });
fetchMock.get({ url: url(settings, '/myLargeSegments/marcio%40split.io'), repeat: 2 }, { status: 200, body: { myLargeSegments: [] } });

// 3 unbounded fetch requests
fetchMock.get({ url: url(settings, '/mySegments/nicolas%40split.io'), repeat: 3 }, function (url, opts) {
if (!hasNoCacheHeader(opts)) assert.fail('request must not include `Cache-Control` header');
Expand All @@ -357,15 +399,25 @@ export function testSynchronization(fetchMock, assert) {
return { status: 200, body: mySegmentsMarcio };
});

// initial fetch of mySegments for other clients + sync after third SSE opened + 3 unbounded fetch requests
// initial fetch of mySegments and myLargeSegments for other clients + sync after third SSE opened + 3 unbounded fetch requests for mySegments
fetchMock.getOnce(url(settings, '/splitChanges?s=1.1&since=1457552650000'), { status: 200, body: { splits: [], since: 1457552650000, till: 1457552650000 } });
fetchMock.get({ url: url(settings, '/mySegments/key1'), repeat: 5 }, { status: 200, body: { mySegments: [] } });
fetchMock.get({ url: url(settings, '/mySegments/key3'), repeat: 5 }, { status: 200, body: { mySegments: [{ name: 'splitters' }] } });
fetchMock.get({ url: url(settings, `/mySegments/${bitmapTrueKey}`), repeat: 5 }, { status: 200, body: { mySegments: [] } });
fetchMock.get({ url: url(settings, '/myLargeSegments/key1'), repeat: 2 }, { status: 200, body: { myLargeSegments: [] } });
fetchMock.get({ url: url(settings, '/myLargeSegments/key3'), repeat: 2 }, { status: 200, body: { myLargeSegments: [] } });
fetchMock.get({ url: url(settings, `/myLargeSegments/${bitmapTrueKey}`), repeat: 2 }, { status: 200, body: { myLargeSegments: [] } });

// bounded fetch request
fetchMock.get(url(settings, `/mySegments/${bitmapTrueKey}`), { status: 200, body: { mySegments: [{ name: 'splitters' }] } });

// unbounded myLargeSegments fetch requests
fetchMock.getOnce(url(settings, '/myLargeSegments/nicolas%40split.io'), { status: 200, body: { myLargeSegments: ['employees', 'splitters'] } });
fetchMock.getOnce(url(settings, '/myLargeSegments/marcio%40split.io'), { status: 200, body: { myLargeSegments: [] } });
fetchMock.getOnce(url(settings, '/myLargeSegments/key1'), { status: 200, body: { myLargeSegments: [] } });
fetchMock.getOnce(url(settings, '/myLargeSegments/key3'), { status: 200, body: { myLargeSegments: [] } });
fetchMock.getOnce(url(settings, `/myLargeSegments/${bitmapTrueKey}`), { status: 200, body: { myLargeSegments: [] } });

fetchMock.get(new RegExp('.*'), function (url) {
assert.fail('unexpected GET request with url: ' + url);
});
Expand Down
149 changes: 149 additions & 0 deletions src/__tests__/browserSuites/readiness-large-segments.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
import { SplitFactory } from '../../';

// Mocks
import mySegments from '../mocks/[email protected]';
import myLargeSegments from '../mocks/mylargesegments.employees.json';
import { nearlyEqual } from '../testUtils';

const FF = {
name: 'FF',
status: 'ACTIVE',
conditions: [{
matcherGroup: {
combiner: 'AND',
matchers: []
}
}]
};

const FF_WITH_SEGMENTS = {
name: 'FF_WITH_SEGMENTS',
status: 'ACTIVE',
conditions: [{
matcherGroup: {
combiner: 'AND',
matchers: [{
matcherType: 'IN_SEGMENT',
userDefinedSegmentMatcherData: {
segmentName: 'A'
}
}]
}
}]
};

const FF_WITH_LARGE_SEGMENTS = {
name: 'FF_WITH_LARGE_SEGMENTS',
status: 'ACTIVE',
conditions: [{
matcherGroup: {
combiner: 'AND',
matchers: [{
matcherType: 'IN_LARGE_SEGMENT',
userDefinedSegmentMatcherData: {
segmentName: 'A'
}
}]
}
}]
};

const waitConfig = {
core: {
authorizationKey: '<fake-token>',
key: '[email protected]'
},
urls: {
sdk: 'https://sdk.baseurl/largeSegments',
},
sync: {
largeSegmentsEnabled: true
},
streamingEnabled: false
};

const noWaitConfig = {
...waitConfig,
startup: {
waitForLargeSegments: false
}
};

const SEGMENTS_DELAY = 50;
const LARGE_SEGMENTS_DELAY = 100;
const TEST_END_DELAY = 150;

export default function (fetchMock, assert) {

const testCases = [
{ waitForLargeSegments: true, featureFlagsWithSegments: true, featureFlagsWithLS: true },
{ waitForLargeSegments: true, featureFlagsWithSegments: true, featureFlagsWithLS: false },
{ waitForLargeSegments: true, featureFlagsWithSegments: false, featureFlagsWithLS: true },
{ waitForLargeSegments: true, featureFlagsWithSegments: false, featureFlagsWithLS: false },
{ waitForLargeSegments: false, featureFlagsWithSegments: true, featureFlagsWithLS: true },
{ waitForLargeSegments: false, featureFlagsWithSegments: true, featureFlagsWithLS: false },
{ waitForLargeSegments: false, featureFlagsWithSegments: false, featureFlagsWithLS: true },
{ waitForLargeSegments: false, featureFlagsWithSegments: false, featureFlagsWithLS: false },

// Special cases where large segments are not supported for the given SDK key: `/myLargeSegments/*` responds with 403 and there cannot be FFs with large segments
{ waitForLargeSegments: true, featureFlagsWithSegments: true, featureFlagsWithLS: false, myLargeSegmentsForbidden: true },
{ waitForLargeSegments: false, featureFlagsWithSegments: true, featureFlagsWithLS: false, myLargeSegmentsForbidden: true },
];

testCases.forEach(({ waitForLargeSegments, featureFlagsWithSegments, featureFlagsWithLS, myLargeSegmentsForbidden }) => {

const config = waitForLargeSegments ? waitConfig : noWaitConfig;

const splitChangesMock = {
since: -1,
till: 1457552620999,
splits: [FF, featureFlagsWithSegments && FF_WITH_SEGMENTS, featureFlagsWithLS && FF_WITH_LARGE_SEGMENTS].filter(ff => ff)
};

// smart ready: if FFs are not using segments (or LS) we don't need to wait for them
const SDK_READY_DELAY = Math.max(
featureFlagsWithSegments ? SEGMENTS_DELAY : 0,
featureFlagsWithLS && waitForLargeSegments ? LARGE_SEGMENTS_DELAY : 0
);

// emit SDK_UPDATE if large segments arrive after SDK_READY event is emitted and FFs are using them
const shouldEmitSdkUpdate = waitForLargeSegments === false && featureFlagsWithLS === true && (LARGE_SEGMENTS_DELAY > SEGMENTS_DELAY || featureFlagsWithSegments === false);

assert.test(t => {
fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=-1', { status: 200, body: splitChangesMock });
fetchMock.getOnce(config.urls.sdk + '/splitChanges?s=1.1&since=1457552620999', { status: 200, body: { since: 1457552620999, till: 1457552620999, splits: [] } });
fetchMock.getOnce(config.urls.sdk + '/mySegments/emi%40split.io', { status: 200, body: mySegments }, { delay: SEGMENTS_DELAY });
fetchMock.getOnce(config.urls.sdk + '/myLargeSegments/emi%40split.io', { status: myLargeSegmentsForbidden ? 403 : 200, body: myLargeSegments }, { delay: LARGE_SEGMENTS_DELAY });

// smart pausing: if FFs are not using segments (or LS) we don't need to fetch them
if (featureFlagsWithSegments) fetchMock.getOnce(config.urls.sdk + '/mySegments/shared', { status: 200, body: mySegments }, { delay: SEGMENTS_DELAY });
if (featureFlagsWithLS) fetchMock.getOnce(config.urls.sdk + '/myLargeSegments/shared', { status: myLargeSegmentsForbidden ? 403 : 200, body: myLargeSegments }, { delay: LARGE_SEGMENTS_DELAY });

const splitio = SplitFactory(config);
const client = splitio.client();

const start = Date.now();
client.once(client.Event.SDK_READY, () => {
assert.true(nearlyEqual(Date.now() - start, SDK_READY_DELAY));

splitio.client('shared').ready().then(() => {
assert.true(nearlyEqual(Date.now() - start, 2 * SDK_READY_DELAY));
});
});

let updateEmitted = false;

client.once(client.Event.SDK_UPDATE, () => {
assert.true(nearlyEqual(Date.now() - start, LARGE_SEGMENTS_DELAY));
updateEmitted = true;
});

setTimeout(() => {
assert.true(updateEmitted === shouldEmitSdkUpdate);
client.destroy().then(() => { t.end(); });
}, TEST_END_DELAY);
});

});

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"type": "message",
"data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552653000,\\\"largeSegments\\\":[\\\"harnessians\\\",\\\"splitters\\\"],\\\"c\\\": 0,\\\"u\\\": 3,\\\"d\\\":\\\"\\\"}\"}"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"type": "message",
"data": "{\"data\":\"{\\\"type\\\":\\\"MY_LARGE_SEGMENTS_UPDATE\\\",\\\"changeNumber\\\":1457552650000,\\\"largeSegments\\\":[],\\\"c\\\": 0,\\\"u\\\": 0,\\\"d\\\":\\\"\\\",\\\"i\\\":300,\\\"h\\\":0,\\\"s\\\":0}\"}"
}
6 changes: 6 additions & 0 deletions src/__tests__/mocks/mylargesegments.employees.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"myLargeSegments": [
"employees"
],
"changeNumber": 1234567890
}
42 changes: 42 additions & 0 deletions src/__tests__/mocks/splitchanges.real.withSegments.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,47 @@
{
"splits": [
{
"orgId": null,
"environment": null,
"trafficTypeId": null,
"trafficTypeName": null,
"name": "in_large_segment",
"seed": -1984784937,
"status": "ACTIVE",
"killed": false,
"defaultTreatment": "no",
"conditions": [
{
"matcherGroup": {
"combiner": "AND",
"matchers": [
{
"keySelector": {
"trafficType": "user",
"attribute": null
},
"matcherType": "IN_LARGE_SEGMENT",
"negate": false,
"userDefinedSegmentMatcherData": {
"segmentName": "harnessians"
},
"whitelistMatcherData": null,
"unaryNumericMatcherData": null,
"betweenMatcherData": null,
"unaryStringMatcherData": null
}
]
},
"partitions": [
{
"treatment": "yes",
"size": 100
}
]
}
],
"configurations": {}
},
{
"trafficTypeName": "user",
"name": "real_split",
Expand Down
Loading
Loading