From 964024e43086144effcd6b1c7332a42797d194e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 17 Dec 2020 11:20:15 +0100 Subject: [PATCH 01/43] [RUMF-804] implement the deflate worker --- .../src/domain/deflateWorker.d.ts | 32 + .../rum-recorder/src/domain/deflateWorker.js | 4563 +++++++++++++++++ .../src/domain/deflateWorker.spec.ts | 64 + packages/rum-recorder/tsconfig.cjs.json | 3 +- packages/rum-recorder/tsconfig.esm.json | 3 +- tsconfig.base.json | 1 + 6 files changed, 4664 insertions(+), 2 deletions(-) create mode 100644 packages/rum-recorder/src/domain/deflateWorker.d.ts create mode 100644 packages/rum-recorder/src/domain/deflateWorker.js create mode 100644 packages/rum-recorder/src/domain/deflateWorker.spec.ts diff --git a/packages/rum-recorder/src/domain/deflateWorker.d.ts b/packages/rum-recorder/src/domain/deflateWorker.d.ts new file mode 100644 index 0000000000..ef4ffea322 --- /dev/null +++ b/packages/rum-recorder/src/domain/deflateWorker.d.ts @@ -0,0 +1,32 @@ +export function createDeflateWorker(): DeflateWorker + +export interface DeflateWorker { + addEventListener(name: 'message', listener: DeflateWorkerListener): void + removeEventListener(name: 'message', listener: DeflateWorkerListener): void + postMessage(message: DeflateWorkerAction): void + terminate(): void +} + +export type DeflateWorkerListener = (event: { data: DeflateWorkerResponse }) => void + +export type DeflateWorkerAction = + | { + id: number + action: 'write' + data: string + } + | { + id: number + action: 'complete' + data?: string + } + +export type DeflateWorkerResponse = + | { + id: number + size: number + } + | { + id: number + result: Uint8Array + } diff --git a/packages/rum-recorder/src/domain/deflateWorker.js b/packages/rum-recorder/src/domain/deflateWorker.js new file mode 100644 index 0000000000..466da2a6b5 --- /dev/null +++ b/packages/rum-recorder/src/domain/deflateWorker.js @@ -0,0 +1,4563 @@ +// Warning: this file is *not* transpiled. It should use a syntax supported by our supported +// browsers. + +let workerURL + +export function createDeflateWorker() { + // Lazily compute the worker URL to allow importing the SDK in NodeJS + if (!workerURL) { + workerURL = URL.createObjectURL(new Blob(['(' + workerCodeFn + ')(self)'])) + } + return new Worker(workerURL) +} + +function workerCodeFn() { + const pako = makePakoDeflate() + + let deflate = new pako.Deflate() + self.addEventListener('message', function (event) { + const data = event.data + switch (data.action) { + case 'write': + deflate.push(data.data, pako.constants.Z_SYNC_FLUSH) + self.postMessage({ + id: data.id, + size: deflate.chunks.reduce(function (total, chunk) { + return total + chunk.length + }, 0), + }) + break + case 'complete': + if (data.data) { + deflate.push(data.data, pako.constants.Z_SYNC_FLUSH) + } + deflate.push('', pako.constants.Z_FINISH) + self.postMessage({ + id: data.id, + result: deflate.result, + }) + deflate = new pako.Deflate() + break + } + }) + + // https://github.com/nodeca/pako/blob/034669ba0f1a4c0590e45f7c2820128200f972b3/dist/pako_deflate.es5.js + function makePakoDeflate() { + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + + /* eslint-disable space-unary-ops */ + + /* Public constants ==========================================================*/ + + /* ===========================================================================*/ + //const Z_FILTERED = 1; + //const Z_HUFFMAN_ONLY = 2; + //const Z_RLE = 3; + + var Z_FIXED = 4 //const Z_DEFAULT_STRATEGY = 0; + + /* Possible values of the data_type field (though see inflate()) */ + + var Z_BINARY = 0 + var Z_TEXT = 1 //const Z_ASCII = 1; // = Z_TEXT + + var Z_UNKNOWN = 2 + /*============================================================================*/ + + function zero(buf) { + var len = buf.length + + while (--len >= 0) { + buf[len] = 0 + } + } // From zutil.h + + var STORED_BLOCK = 0 + var STATIC_TREES = 1 + var DYN_TREES = 2 + /* The three kinds of block type */ + + var MIN_MATCH = 3 + var MAX_MATCH = 258 + /* The minimum and maximum match lengths */ + // From deflate.h + + /* =========================================================================== + * Internal compression state. + */ + + var LENGTH_CODES = 29 + /* number of length codes, not counting the special END_BLOCK code */ + + var LITERALS = 256 + /* number of literal bytes 0..255 */ + + var L_CODES = LITERALS + 1 + LENGTH_CODES + /* number of Literal or Length codes, including the END_BLOCK code */ + + var D_CODES = 30 + /* number of distance codes */ + + var BL_CODES = 19 + /* number of codes used to transfer the bit lengths */ + + var HEAP_SIZE = 2 * L_CODES + 1 + /* maximum heap size */ + + var MAX_BITS = 15 + /* All codes must not exceed MAX_BITS bits */ + + var Buf_size = 16 + /* size of bit buffer in bi_buf */ + + /* =========================================================================== + * Constants + */ + + var MAX_BL_BITS = 7 + /* Bit length codes must not exceed MAX_BL_BITS bits */ + + var END_BLOCK = 256 + /* end of block literal code */ + + var REP_3_6 = 16 + /* repeat previous bit length 3-6 times (2 bits of repeat count) */ + + var REPZ_3_10 = 17 + /* repeat a zero length 3-10 times (3 bits of repeat count) */ + + var REPZ_11_138 = 18 + /* repeat a zero length 11-138 times (7 bits of repeat count) */ + + /* eslint-disable comma-spacing,array-bracket-spacing */ + + var extra_lbits = + /* extra bits for each length code */ + new Uint8Array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0]) + var extra_dbits = + /* extra bits for each distance code */ + new Uint8Array([0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13]) + var extra_blbits = + /* extra bits for each bit length code */ + new Uint8Array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7]) + var bl_order = new Uint8Array([16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]) + /* eslint-enable comma-spacing,array-bracket-spacing */ + + /* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + + /* =========================================================================== + * Local data. These are initialized only once. + */ + // We pre-fill arrays with 0 to avoid uninitialized gaps + + var DIST_CODE_LEN = 512 + /* see definition of array dist_code below */ + // !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1 + + var static_ltree = new Array((L_CODES + 2) * 2) + zero(static_ltree) + /* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + + var static_dtree = new Array(D_CODES * 2) + zero(static_dtree) + /* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + + var _dist_code = new Array(DIST_CODE_LEN) + + zero(_dist_code) + /* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + + var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1) + + zero(_length_code) + /* length code for each normalized match length (0 == MIN_MATCH) */ + + var base_length = new Array(LENGTH_CODES) + zero(base_length) + /* First normalized length for each code (0 = MIN_MATCH) */ + + var base_dist = new Array(D_CODES) + zero(base_dist) + /* First normalized distance for each code (0 = distance of 1) */ + + function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + this.static_tree = static_tree + /* static tree or NULL */ + + this.extra_bits = extra_bits + /* extra bits for each code or NULL */ + + this.extra_base = extra_base + /* base index for extra_bits */ + + this.elems = elems + /* max number of elements in the tree */ + + this.max_length = max_length + /* max bit length for the codes */ + // show if `static_tree` has data or dummy - needed for monomorphic objects + + this.has_stree = static_tree && static_tree.length + } + + var static_l_desc + var static_d_desc + var static_bl_desc + + function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree + /* the dynamic tree */ + + this.max_code = 0 + /* largest code with non zero frequency */ + + this.stat_desc = stat_desc + /* the corresponding static tree */ + } + + var d_code = function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)] + } + /* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ + + var put_short = function put_short(s, w) { + // put_byte(s, (uch)((w) & 0xff)); + // put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = w & 0xff + s.pending_buf[s.pending++] = (w >>> 8) & 0xff + } + /* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ + + var send_bits = function send_bits(s, value, length) { + if (s.bi_valid > Buf_size - length) { + s.bi_buf |= (value << s.bi_valid) & 0xffff + put_short(s, s.bi_buf) + s.bi_buf = value >> (Buf_size - s.bi_valid) + s.bi_valid += length - Buf_size + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff + s.bi_valid += length + } + } + + var send_code = function send_code(s, c, tree) { + send_bits( + s, + tree[c * 2], + /*.Code*/ + tree[c * 2 + 1] + /*.Len*/ + ) + } + /* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ + + var bi_reverse = function bi_reverse(code, len) { + var res = 0 + + do { + res |= code & 1 + code >>>= 1 + res <<= 1 + } while (--len > 0) + + return res >>> 1 + } + /* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ + + var bi_flush = function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf) + s.bi_buf = 0 + s.bi_valid = 0 + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff + s.bi_buf >>= 8 + s.bi_valid -= 8 + } + } + /* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ + + var gen_bitlen = function gen_bitlen( + s, + desc // deflate_state *s; // tree_desc *desc; /* the tree descriptor */ + ) { + var tree = desc.dyn_tree + var max_code = desc.max_code + var stree = desc.stat_desc.static_tree + var has_stree = desc.stat_desc.has_stree + var extra = desc.stat_desc.extra_bits + var base = desc.stat_desc.extra_base + var max_length = desc.stat_desc.max_length + var h + /* heap index */ + + var n, m + /* iterate over the tree elements */ + + var bits + /* bit length */ + + var xbits + /* extra bits */ + + var f + /* frequency */ + + var overflow = 0 + /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0 + } + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + + tree[s.heap[s.heap_max] * 2 + 1] = + /*.Len*/ + 0 + /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h] + bits = + tree[ + tree[n * 2 + 1] * + /*.Dad*/ + 2 + + 1 + ] + + /*.Len*/ + 1 + + if (bits > max_length) { + bits = max_length + overflow++ + } + + tree[n * 2 + 1] = + /*.Len*/ + bits + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { + continue + } + /* not a leaf node */ + + s.bl_count[bits]++ + xbits = 0 + + if (n >= base) { + xbits = extra[n - base] + } + + f = tree[n * 2] + /*.Freq*/ + s.opt_len += f * (bits + xbits) + + if (has_stree) { + s.static_len += + f * + (stree[n * 2 + 1] + + /*.Len*/ + xbits) + } + } + + if (overflow === 0) { + return + } // Trace((stderr,"\nbit length overflow\n")); + + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + + do { + bits = max_length - 1 + + while (s.bl_count[bits] === 0) { + bits-- + } + + s.bl_count[bits]-- + /* move one leaf down the tree */ + + s.bl_count[bits + 1] += 2 + /* move one overflow item as its brother */ + + s.bl_count[max_length]-- + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + + overflow -= 2 + } while (overflow > 0) + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits] + + while (n !== 0) { + m = s.heap[--h] + + if (m > max_code) { + continue + } + + if ( + tree[m * 2 + 1] !== + /*.Len*/ + bits + ) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += + (bits - tree[m * 2 + 1]) * + /*.Len*/ + tree[m * 2] + /*.Freq*/ + tree[m * 2 + 1] = + /*.Len*/ + bits + } + + n-- + } + } + } + /* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ + + var gen_codes = function gen_codes( + tree, + max_code, + bl_count // ct_data *tree; /* the tree to decorate */ // int max_code; /* largest code with non zero frequency */ // ushf *bl_count; /* number of codes at each bit length */ + ) { + var next_code = new Array(MAX_BITS + 1) + /* next code value for each bit length */ + + var code = 0 + /* running code value */ + + var bits + /* bit index */ + + var n + /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1 + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + + length = 0 + + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length + + for (n = 0; n < 1 << extra_lbits[code]; n++) { + _length_code[length++] = code + } + } //Assert (length == 256, "tr_static_init: length != 256"); + + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + + _length_code[length - 1] = code + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + + dist = 0 + + for (code = 0; code < 16; code++) { + base_dist[code] = dist + + for (n = 0; n < 1 << extra_dbits[code]; n++) { + _dist_code[dist++] = code + } + } //Assert (dist == 256, "tr_static_init: dist != 256"); + + dist >>= 7 + /* from now on, all distances are divided by 128 */ + + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7 + + for (n = 0; n < 1 << (extra_dbits[code] - 7); n++) { + _dist_code[256 + dist++] = code + } + } //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0 + } + + n = 0 + + while (n <= 143) { + static_ltree[n * 2 + 1] = + /*.Len*/ + 8 + n++ + bl_count[8]++ + } + + while (n <= 255) { + static_ltree[n * 2 + 1] = + /*.Len*/ + 9 + n++ + bl_count[9]++ + } + + while (n <= 279) { + static_ltree[n * 2 + 1] = + /*.Len*/ + 7 + n++ + bl_count[7]++ + } + + while (n <= 287) { + static_ltree[n * 2 + 1] = + /*.Len*/ + 8 + n++ + bl_count[8]++ + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + + gen_codes(static_ltree, L_CODES + 1, bl_count) + /* The static distance tree is trivial: */ + + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1] = + /*.Len*/ + 5 + static_dtree[n * 2] = + /*.Code*/ + bi_reverse(n, 5) + } // Now data ready and we can init static trees + + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS) + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS) + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS) //static_init_done = true; + } + /* =========================================================================== + * Initialize a new block. + */ + + var init_block = function init_block(s) { + var n + /* iterates over tree elements */ + + /* Initialize the trees. */ + + for (n = 0; n < L_CODES; n++) { + s.dyn_ltree[n * 2] = + /*.Freq*/ + 0 + } + + for (n = 0; n < D_CODES; n++) { + s.dyn_dtree[n * 2] = + /*.Freq*/ + 0 + } + + for (n = 0; n < BL_CODES; n++) { + s.bl_tree[n * 2] = + /*.Freq*/ + 0 + } + + s.dyn_ltree[END_BLOCK * 2] = + /*.Freq*/ + 1 + s.opt_len = s.static_len = 0 + s.last_lit = s.matches = 0 + } + /* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ + + var bi_windup = function bi_windup(s) { + if (s.bi_valid > 8) { + put_short(s, s.bi_buf) + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf + } + + s.bi_buf = 0 + s.bi_valid = 0 + } + /* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ + + var copy_block = function copy_block( + s, + buf, + len, + header //DeflateState *s; //charf *buf; /* the input data */ //unsigned len; /* its length */ //int header; /* true if block header must be written */ + ) { + bi_windup(s) + /* align on byte boundary */ + + if (header) { + put_short(s, len) + put_short(s, ~len) + } // while (len--) { + // put_byte(s, *buf++); + // } + + s.pending_buf.set(s.window.subarray(buf, buf + len), s.pending) + s.pending += len + } + /* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ + + var smaller = function smaller(tree, n, m, depth) { + var _n2 = n * 2 + + var _m2 = m * 2 + + return ( + tree[_n2] < + /*.Freq*/ + tree[_m2] || + /*.Freq*/ + (tree[_n2] === + /*.Freq*/ + tree[_m2] && + /*.Freq*/ + depth[n] <= depth[m]) + ) + } + /* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ + + var pqdownheap = function pqdownheap( + s, + tree, + k // deflate_state *s; // ct_data *tree; /* the tree to restore */ // int k; /* node to move down */ + ) { + var v = s.heap[k] + var j = k << 1 + /* left son of k */ + + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++ + } + /* Exit if v is smaller than both sons */ + + if (smaller(tree, v, s.heap[j], s.depth)) { + break + } + /* Exchange v with the smallest son */ + + s.heap[k] = s.heap[j] + k = j + /* And continue down the tree, setting j to the left son of k */ + + j <<= 1 + } + + s.heap[k] = v + } // inlined manually + // const SMALLEST = 1; + + /* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ + + var compress_block = function compress_block( + s, + ltree, + dtree // deflate_state *s; // const ct_data *ltree; /* literal tree */ // const ct_data *dtree; /* distance tree */ + ) { + var dist + /* distance of matched string */ + + var lc + /* match length or unmatched char (if dist == 0) */ + + var lx = 0 + /* running index in l_buf */ + + var code + /* the code to send */ + + var extra + /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | s.pending_buf[s.d_buf + lx * 2 + 1] + lc = s.pending_buf[s.l_buf + lx] + lx++ + + if (dist === 0) { + send_code(s, lc, ltree) + /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc] + send_code(s, code + LITERALS + 1, ltree) + /* send the length code */ + + extra = extra_lbits[code] + + if (extra !== 0) { + lc -= base_length[code] + send_bits(s, lc, extra) + /* send the extra length bits */ + } + + dist-- + /* dist is now the match distance - 1 */ + + code = d_code(dist) //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree) + /* send the distance code */ + + extra = extra_dbits[code] + + if (extra !== 0) { + dist -= base_dist[code] + send_bits(s, dist, extra) + /* send the extra distance bits */ + } + } + /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + } while (lx < s.last_lit) + } + + send_code(s, END_BLOCK, ltree) + } + /* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ + + var build_tree = function build_tree( + s, + desc // deflate_state *s; // tree_desc *desc; /* the tree descriptor */ + ) { + var tree = desc.dyn_tree + var stree = desc.stat_desc.static_tree + var has_stree = desc.stat_desc.has_stree + var elems = desc.stat_desc.elems + var n, m + /* iterate over heap elements */ + + var max_code = -1 + /* largest code with non zero frequency */ + + var node + /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + + s.heap_len = 0 + s.heap_max = HEAP_SIZE + + for (n = 0; n < elems; n++) { + if ( + tree[n * 2] !== + /*.Freq*/ + 0 + ) { + s.heap[++s.heap_len] = max_code = n + s.depth[n] = 0 + } else { + tree[n * 2 + 1] = + /*.Len*/ + 0 + } + } + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = max_code < 2 ? ++max_code : 0 + tree[node * 2] = + /*.Freq*/ + 1 + s.depth[node] = 0 + s.opt_len-- + + if (has_stree) { + s.static_len -= stree[node * 2 + 1] + /*.Len*/ + } + /* node is 0 or 1 so it does not have extra bits */ + } + + desc.max_code = max_code + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + + for ( + n = s.heap_len >> 1; + /*int /2*/ + n >= 1; + n-- + ) { + pqdownheap(s, tree, n) + } + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + + node = elems + /* next internal node of the tree */ + + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + + /*** pqremove ***/ + n = s.heap[1] + /*SMALLEST*/ + s.heap[1] = s.heap[s.heap_len--] + /*SMALLEST*/ + pqdownheap( + s, + tree, + 1 + /*SMALLEST*/ + ) + /***/ + + m = s.heap[1] + /*SMALLEST*/ + /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n + /* keep the nodes sorted by frequency */ + + s.heap[--s.heap_max] = m + /* Create a new node father of n and m */ + + tree[node * 2] = + /*.Freq*/ + tree[n * 2] + + /*.Freq*/ + tree[m * 2] + /*.Freq*/ + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1 + tree[n * 2 + 1] = + /*.Dad*/ + tree[m * 2 + 1] = + /*.Dad*/ + node + /* and insert the new node in the heap */ + + s.heap[1] = node++ + /*SMALLEST*/ + pqdownheap( + s, + tree, + 1 + /*SMALLEST*/ + ) + } while (s.heap_len >= 2) + + s.heap[--s.heap_max] = s.heap[1] + /*SMALLEST*/ + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + + gen_bitlen(s, desc) + /* The field len is now set, we can generate the bit codes */ + + gen_codes(tree, max_code, s.bl_count) + } + /* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ + + var scan_tree = function scan_tree( + s, + tree, + max_code // deflate_state *s; // ct_data *tree; /* the tree to be scanned */ // int max_code; /* and its largest code of non zero frequency */ + ) { + var n + /* iterates over all tree elements */ + + var prevlen = -1 + /* last emitted length */ + + var curlen + /* length of current code */ + + var nextlen = tree[0 * 2 + 1] + /*.Len*/ + /* length of next code */ + + var count = 0 + /* repeat count of the current code */ + + var max_count = 7 + /* max repeat count */ + + var min_count = 4 + /* min repeat count */ + + if (nextlen === 0) { + max_count = 138 + min_count = 3 + } + + tree[(max_code + 1) * 2 + 1] = + /*.Len*/ + 0xffff + /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen + nextlen = tree[(n + 1) * 2 + 1] + /*.Len*/ + + if (++count < max_count && curlen === nextlen) { + continue + } else if (count < min_count) { + s.bl_tree[curlen * 2] += + /*.Freq*/ + count + } else if (curlen !== 0) { + if (curlen !== prevlen) { + s.bl_tree[curlen * 2] /*.Freq*/++ + } + + s.bl_tree[REP_3_6 * 2] /*.Freq*/++ + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2] /*.Freq*/++ + } else { + s.bl_tree[REPZ_11_138 * 2] /*.Freq*/++ + } + + count = 0 + prevlen = curlen + + if (nextlen === 0) { + max_count = 138 + min_count = 3 + } else if (curlen === nextlen) { + max_count = 6 + min_count = 3 + } else { + max_count = 7 + min_count = 4 + } + } + } + /* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ + + var send_tree = function send_tree( + s, + tree, + max_code // deflate_state *s; // ct_data *tree; /* the tree to be scanned */ // int max_code; /* and its largest code of non zero frequency */ + ) { + var n + /* iterates over all tree elements */ + + var prevlen = -1 + /* last emitted length */ + + var curlen + /* length of current code */ + + var nextlen = tree[0 * 2 + 1] + /*.Len*/ + /* length of next code */ + + var count = 0 + /* repeat count of the current code */ + + var max_count = 7 + /* max repeat count */ + + var min_count = 4 + /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ + + /* guard already set */ + + if (nextlen === 0) { + max_count = 138 + min_count = 3 + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen + nextlen = tree[(n + 1) * 2 + 1] + /*.Len*/ + + if (++count < max_count && curlen === nextlen) { + continue + } else if (count < min_count) { + do { + send_code(s, curlen, s.bl_tree) + } while (--count !== 0) + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree) + count-- + } //Assert(count >= 3 && count <= 6, " 3_6?"); + + send_code(s, REP_3_6, s.bl_tree) + send_bits(s, count - 3, 2) + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree) + send_bits(s, count - 3, 3) + } else { + send_code(s, REPZ_11_138, s.bl_tree) + send_bits(s, count - 11, 7) + } + + count = 0 + prevlen = curlen + + if (nextlen === 0) { + max_count = 138 + min_count = 3 + } else if (curlen === nextlen) { + max_count = 6 + min_count = 3 + } else { + max_count = 7 + min_count = 4 + } + } + } + /* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ + + var build_bl_tree = function build_bl_tree(s) { + var max_blindex + /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + + scan_tree(s, s.dyn_ltree, s.l_desc.max_code) + scan_tree(s, s.dyn_dtree, s.d_desc.max_code) + /* Build the bit length tree: */ + + build_tree(s, s.bl_desc) + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if ( + s.bl_tree[bl_order[max_blindex] * 2 + 1] !== + /*.Len*/ + 0 + ) { + break + } + } + /* Update opt_len to include the bit length tree and counts */ + + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4 //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex + } + /* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ + + var send_all_trees = function send_all_trees( + s, + lcodes, + dcodes, + blcodes // deflate_state *s; // int lcodes, dcodes, blcodes; /* number of codes for each tree */ + ) { + var rank + /* index in bl_order */ + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + + send_bits(s, lcodes - 257, 5) + /* not +255 as stated in appnote.txt */ + + send_bits(s, dcodes - 1, 5) + send_bits(s, blcodes - 4, 4) + /* not -3 as stated in appnote.txt */ + + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits( + s, + s.bl_tree[bl_order[rank] * 2 + 1], + /*.Len*/ + 3 + ) + } //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1) + /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1) + /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); + } + /* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ + + var detect_data_type = function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f + var n + /* Check for non-textual ("black-listed") bytes. */ + + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ( + black_mask & 1 && + s.dyn_ltree[n * 2] !== + /*.Freq*/ + 0 + ) { + return Z_BINARY + } + } + /* Check for textual ("white-listed") bytes. */ + + if ( + s.dyn_ltree[9 * 2] !== + /*.Freq*/ + 0 || + s.dyn_ltree[10 * 2] !== + /*.Freq*/ + 0 || + s.dyn_ltree[13 * 2] !== + /*.Freq*/ + 0 + ) { + return Z_TEXT + } + + for (n = 32; n < LITERALS; n++) { + if ( + s.dyn_ltree[n * 2] !== + /*.Freq*/ + 0 + ) { + return Z_TEXT + } + } + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + + return Z_BINARY + } + + var static_init_done = false + /* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ + + var _tr_init = function _tr_init(s) { + if (!static_init_done) { + tr_static_init() + static_init_done = true + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc) + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc) + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc) + s.bi_buf = 0 + s.bi_valid = 0 + /* Initialize the first block of the first file: */ + + init_block(s) + } + /* =========================================================================== + * Send a stored block + */ + + var _tr_stored_block = function _tr_stored_block( + s, + buf, + stored_len, + last //DeflateState *s; //charf *buf; /* input block */ //ulg stored_len; /* length of input block */ //int last; /* one if this is the last block for a file */ + ) { + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3) + /* send block type */ + + copy_block(s, buf, stored_len, true) + /* with header */ + } + /* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ + + var _tr_align = function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3) + send_code(s, END_BLOCK, static_ltree) + bi_flush(s) + } + /* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ + + var _tr_flush_block = function _tr_flush_block( + s, + buf, + stored_len, + last //DeflateState *s; //charf *buf; /* input block, or NULL if too old */ //ulg stored_len; /* length of input block */ //int last; /* one if this is the last block for a file */ + ) { + var opt_lenb, static_lenb + /* opt_len and static_len in bytes */ + + var max_blindex = 0 + /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + + if (s.level > 0) { + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s) + } + /* Construct the literal and distance trees */ + + build_tree(s, s.l_desc) // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc) // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + + max_blindex = build_bl_tree(s) + /* Determine the best encoding. Compute the block lengths in bytes. */ + + opt_lenb = (s.opt_len + 3 + 7) >>> 3 + static_lenb = (s.static_len + 3 + 7) >>> 3 // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { + opt_lenb = static_lenb + } + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5 + /* force a stored block */ + } + + if (stored_len + 4 <= opt_lenb && buf !== -1) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last) + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3) + compress_block(s, static_ltree, static_dtree) + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3) + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1) + compress_block(s, s.dyn_ltree, s.dyn_dtree) + } // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + + init_block(s) + + if (last) { + bi_windup(s) + } // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); + } + /* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ + + var _tr_tally = function _tr_tally( + s, + dist, + lc // deflate_state *s; // unsigned dist; /* distance of matched string */ // unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ + ) { + //let out_length, in_length, dcode; + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff + s.last_lit++ + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2] /*.Freq*/++ + } else { + s.matches++ + /* Here, lc is the match length - MIN_MATCH */ + + dist-- + /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2] /*.Freq*/++ + s.dyn_dtree[d_code(dist) * 2] /*.Freq*/++ + } // (!) This block is disabled in zlib defaults, + // don't enable it for binary compatibility + //#ifdef TRUNCATE_BLOCK + // /* Try to guess if it is profitable to stop the current block here */ + // if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { + // /* Compute an upper bound for the compressed length */ + // out_length = s.last_lit*8; + // in_length = s.strstart - s.block_start; + // + // for (dcode = 0; dcode < D_CODES; dcode++) { + // out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); + // } + // out_length >>>= 3; + // //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", + // // s->last_lit, in_length, out_length, + // // 100L - out_length*100L/in_length)); + // if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { + // return true; + // } + // } + //#endif + + return s.last_lit === s.lit_bufsize - 1 + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ + } + + var _tr_init_1 = _tr_init + var _tr_stored_block_1 = _tr_stored_block + var _tr_flush_block_1 = _tr_flush_block + var _tr_tally_1 = _tr_tally + var _tr_align_1 = _tr_align + var trees = { + _tr_init: _tr_init_1, + _tr_stored_block: _tr_stored_block_1, + _tr_flush_block: _tr_flush_block_1, + _tr_tally: _tr_tally_1, + _tr_align: _tr_align_1, + } + + // It isn't worth it to make additional optimizations as in original. + // Small size is preferable. + // (C) 1995-2013 Jean-loup Gailly and Mark Adler + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + + var adler32 = function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) | 0, + s2 = ((adler >>> 16) & 0xffff) | 0, + n = 0 + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len + len -= n + + do { + s1 = (s1 + buf[pos++]) | 0 + s2 = (s2 + s1) | 0 + } while (--n) + + s1 %= 65521 + s2 %= 65521 + } + + return s1 | (s2 << 16) | 0 + } + + var adler32_1 = adler32 + + // So write code to minimize size - no pregenerated tables + // and array tools dependencies. + // (C) 1995-2013 Jean-loup Gailly and Mark Adler + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + // Use ordinary array, since untyped makes no boost here + + var makeTable = function makeTable() { + var c, + table = [] + + for (var n = 0; n < 256; n++) { + c = n + + for (var k = 0; k < 8; k++) { + c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1 + } + + table[n] = c + } + + return table + } // Create table on load. Just 255 signed longs. Not a problem. + + var crcTable = new Uint32Array(makeTable()) + + var crc32 = function crc32(crc, buf, len, pos) { + var t = crcTable + var end = pos + len + crc ^= -1 + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xff] + } + + return crc ^ -1 // >>> 0; + } + + var crc32_1 = crc32 + + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + + var messages = { + 2: 'need dictionary', + + /* Z_NEED_DICT 2 */ + 1: 'stream end', + + /* Z_STREAM_END 1 */ + 0: '', + + /* Z_OK 0 */ + '-1': 'file error', + + /* Z_ERRNO (-1) */ + '-2': 'stream error', + + /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', + + /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', + + /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', + + /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version', + /* Z_VERSION_ERROR (-6) */ + } + + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + + var constants = { + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8, //Z_NULL: null // Use -1 or null inline, depending on var type + } + + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + + var _tr_init$1 = trees._tr_init, + _tr_stored_block$1 = trees._tr_stored_block, + _tr_flush_block$1 = trees._tr_flush_block, + _tr_tally$1 = trees._tr_tally, + _tr_align$1 = trees._tr_align + /* Public constants ==========================================================*/ + + /* ===========================================================================*/ + + var Z_NO_FLUSH = constants.Z_NO_FLUSH, + Z_PARTIAL_FLUSH = constants.Z_PARTIAL_FLUSH, + Z_FULL_FLUSH = constants.Z_FULL_FLUSH, + Z_FINISH = constants.Z_FINISH, + Z_BLOCK = constants.Z_BLOCK, + Z_OK = constants.Z_OK, + Z_STREAM_END = constants.Z_STREAM_END, + Z_STREAM_ERROR = constants.Z_STREAM_ERROR, + Z_DATA_ERROR = constants.Z_DATA_ERROR, + Z_BUF_ERROR = constants.Z_BUF_ERROR, + Z_DEFAULT_COMPRESSION = constants.Z_DEFAULT_COMPRESSION, + Z_FILTERED = constants.Z_FILTERED, + Z_HUFFMAN_ONLY = constants.Z_HUFFMAN_ONLY, + Z_RLE = constants.Z_RLE, + Z_FIXED$1 = constants.Z_FIXED, + Z_DEFAULT_STRATEGY = constants.Z_DEFAULT_STRATEGY, + Z_UNKNOWN$1 = constants.Z_UNKNOWN, + Z_DEFLATED = constants.Z_DEFLATED + /*============================================================================*/ + + var MAX_MEM_LEVEL = 9 + /* Maximum value for memLevel in deflateInit2 */ + + var MAX_WBITS = 15 + /* 32K LZ77 window */ + + var DEF_MEM_LEVEL = 8 + var LENGTH_CODES$1 = 29 + /* number of length codes, not counting the special END_BLOCK code */ + + var LITERALS$1 = 256 + /* number of literal bytes 0..255 */ + + var L_CODES$1 = LITERALS$1 + 1 + LENGTH_CODES$1 + /* number of Literal or Length codes, including the END_BLOCK code */ + + var D_CODES$1 = 30 + /* number of distance codes */ + + var BL_CODES$1 = 19 + /* number of codes used to transfer the bit lengths */ + + var HEAP_SIZE$1 = 2 * L_CODES$1 + 1 + /* maximum heap size */ + + var MAX_BITS$1 = 15 + /* All codes must not exceed MAX_BITS bits */ + + var MIN_MATCH$1 = 3 + var MAX_MATCH$1 = 258 + var MIN_LOOKAHEAD = MAX_MATCH$1 + MIN_MATCH$1 + 1 + var PRESET_DICT = 0x20 + var INIT_STATE = 42 + var EXTRA_STATE = 69 + var NAME_STATE = 73 + var COMMENT_STATE = 91 + var HCRC_STATE = 103 + var BUSY_STATE = 113 + var FINISH_STATE = 666 + var BS_NEED_MORE = 1 + /* block not completed, need more input or more output */ + + var BS_BLOCK_DONE = 2 + /* block flush performed */ + + var BS_FINISH_STARTED = 3 + /* finish started, need only more output at next deflate */ + + var BS_FINISH_DONE = 4 + /* finish done, accept no more input or output */ + + var OS_CODE = 0x03 // Unix :) . Don't detect, use this default. + + var err = function err(strm, errorCode) { + strm.msg = messages[errorCode] + return errorCode + } + + var rank = function rank(f) { + return (f << 1) - (f > 4 ? 9 : 0) + } + + var zero$1 = function zero(buf) { + var len = buf.length + + while (--len >= 0) { + buf[len] = 0 + } + } + /* eslint-disable new-cap */ + + var HASH_ZLIB = function HASH_ZLIB(s, prev, data) { + return ((prev << s.hash_shift) ^ data) & s.hash_mask + } // This hash causes less collisions, https://github.com/nodeca/pako/issues/135 + // But breaks binary compatibility + //let HASH_FAST = (s, prev, data) => ((prev << 8) + (prev >> 8) + (data << 4)) & s.hash_mask; + + var HASH = HASH_ZLIB + /* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ + + var flush_pending = function flush_pending(strm) { + var s = strm.state //_tr_flush_bits(s); + + var len = s.pending + + if (len > strm.avail_out) { + len = strm.avail_out + } + + if (len === 0) { + return + } + + strm.output.set(s.pending_buf.subarray(s.pending_out, s.pending_out + len), strm.next_out) + strm.next_out += len + s.pending_out += len + strm.total_out += len + strm.avail_out -= len + s.pending -= len + + if (s.pending === 0) { + s.pending_out = 0 + } + } + + var flush_block_only = function flush_block_only(s, last) { + _tr_flush_block$1(s, s.block_start >= 0 ? s.block_start : -1, s.strstart - s.block_start, last) + + s.block_start = s.strstart + flush_pending(s.strm) + } + + var put_byte = function put_byte(s, b) { + s.pending_buf[s.pending++] = b + } + /* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ + + var putShortMSB = function putShortMSB(s, b) { + // put_byte(s, (Byte)(b >> 8)); + // put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff + s.pending_buf[s.pending++] = b & 0xff + } + /* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ + + var read_buf = function read_buf(strm, buf, start, size) { + var len = strm.avail_in + + if (len > size) { + len = size + } + + if (len === 0) { + return 0 + } + + strm.avail_in -= len // zmemcpy(buf, strm->next_in, len); + + buf.set(strm.input.subarray(strm.next_in, strm.next_in + len), start) + + if (strm.state.wrap === 1) { + strm.adler = adler32_1(strm.adler, buf, len, start) + } else if (strm.state.wrap === 2) { + strm.adler = crc32_1(strm.adler, buf, len, start) + } + + strm.next_in += len + strm.total_in += len + return len + } + /* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ + + var longest_match = function longest_match(s, cur_match) { + var chain_length = s.max_chain_length + /* max hash chain length */ + + var scan = s.strstart + /* current string */ + + var match + /* matched string */ + + var len + /* length of current match */ + + var best_len = s.prev_length + /* best match length so far */ + + var nice_match = s.nice_match + /* stop if match long enough */ + + var limit = s.strstart > s.w_size - MIN_LOOKAHEAD ? s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0 + /*NIL*/ + var _win = s.window // shortcut + + var wmask = s.w_mask + var prev = s.prev + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH$1 + var scan_end1 = _win[scan + best_len - 1] + var scan_end = _win[scan + best_len] + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + + if (s.prev_length >= s.good_match) { + chain_length >>= 2 + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + + if (nice_match > s.lookahead) { + nice_match = s.lookahead + } // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if ( + _win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1] + ) { + continue + } + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + + scan += 2 + match++ // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + + do { + /*jshint noempty:false*/ + } while ( + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && + scan < strend + ) // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH$1 - (strend - scan) + scan = strend - MAX_MATCH$1 + + if (len > best_len) { + s.match_start = cur_match + best_len = len + + if (len >= nice_match) { + break + } + + scan_end1 = _win[scan + best_len - 1] + scan_end = _win[scan + best_len] + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0) + + if (best_len <= s.lookahead) { + return best_len + } + + return s.lookahead + } + /* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ + + var fill_window = function fill_window(s) { + var _w_size = s.w_size + var p, n, m, more, str //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart // JS ints have 32 bit, block below not needed + + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + s.window.set(s.window.subarray(_w_size, _w_size + _w_size), 0) + s.match_start -= _w_size + s.strstart -= _w_size + /* we now have strstart >= MAX_DIST */ + + s.block_start -= _w_size + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size + p = n + + do { + m = s.head[--p] + s.head[p] = m >= _w_size ? m - _w_size : 0 + } while (--n) + + n = _w_size + p = n + + do { + m = s.prev[--p] + s.prev[p] = m >= _w_size ? m - _w_size : 0 + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n) + + more += _w_size + } + + if (s.strm.avail_in === 0) { + break + } + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more) + s.lookahead += n + /* Initialize the hash value now that we have some input: */ + + if (s.lookahead + s.insert >= MIN_MATCH$1) { + str = s.strstart - s.insert + s.ins_h = s.window[str] + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + + s.ins_h = HASH(s, s.ins_h, s.window[str + 1]) //#if MIN_MATCH != 3 + // Call update_hash() MIN_MATCH-3 more times + //#endif + + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = HASH(s, s.ins_h, s.window[str + MIN_MATCH$1 - 1]) + s.prev[str & s.w_mask] = s.head[s.ins_h] + s.head[s.ins_h] = str + str++ + s.insert-- + + if (s.lookahead + s.insert < MIN_MATCH$1) { + break + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0) + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ + // if (s.high_water < s.window_size) { + // const curr = s.strstart + s.lookahead; + // let init = 0; + // + // if (s.high_water < curr) { + // /* Previous high water mark below current data -- zero WIN_INIT + // * bytes or up to end of window, whichever is less. + // */ + // init = s.window_size - curr; + // if (init > WIN_INIT) + // init = WIN_INIT; + // zmemzero(s->window + curr, (unsigned)init); + // s->high_water = curr + init; + // } + // else if (s->high_water < (ulg)curr + WIN_INIT) { + // /* High water mark at or above current data, but below current data + // * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up + // * to end of window, whichever is less. + // */ + // init = (ulg)curr + WIN_INIT - s->high_water; + // if (init > s->window_size - s->high_water) + // init = s->window_size - s->high_water; + // zmemzero(s->window + s->high_water, (unsigned)init); + // s->high_water += init; + // } + // } + // + // Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, + // "not enough room for search"); + } + /* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ + + var deflate_stored = function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5 + } + /* Copy as much as possible from input to output: */ + + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); + // if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || + // s.block_start >= s.w_size)) { + // throw new Error("slide too late"); + // } + fill_window(s) + + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE + } + + if (s.lookahead === 0) { + break + } + /* flush the current block */ + } //Assert(s->block_start >= 0L, "block gone"); + // if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead + s.lookahead = 0 + /* Emit a stored block if pending_buf will be full: */ + + var max_start = s.block_start + max_block_size + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start + s.strstart = max_start + /*** FLUSH_BLOCK(s, 0); ***/ + + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + + if (s.strstart - s.block_start >= s.w_size - MIN_LOOKAHEAD) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + } + + s.insert = 0 + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true) + + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED + } + /***/ + + return BS_FINISH_DONE + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + + return BS_NEED_MORE + } + /* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ + + var deflate_fast = function deflate_fast(s, flush) { + var hash_head + /* head of the hash chain */ + + var bflush + /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s) + + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE + } + + if (s.lookahead === 0) { + break + /* flush the current block */ + } + } + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + + hash_head = 0 + /*NIL*/ + + if (s.lookahead >= MIN_MATCH$1) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH$1 - 1]) + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h] + s.head[s.ins_h] = s.strstart + /***/ + } + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + + if ( + hash_head !== 0 && + /*NIL*/ + s.strstart - hash_head <= s.w_size - MIN_LOOKAHEAD + ) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head) + /* longest_match() sets match_start */ + } + + if (s.match_length >= MIN_MATCH$1) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = _tr_tally$1(s, s.strstart - s.match_start, s.match_length - MIN_MATCH$1) + s.lookahead -= s.match_length + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + + if ( + s.match_length <= s.max_lazy_match && + /*max_insert_length*/ + s.lookahead >= MIN_MATCH$1 + ) { + s.match_length-- + /* string at strstart already in table */ + + do { + s.strstart++ + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + + s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH$1 - 1]) + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h] + s.head[s.ins_h] = s.strstart + /***/ + + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0) + + s.strstart++ + } else { + s.strstart += s.match_length + s.match_length = 0 + s.ins_h = s.window[s.strstart] + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + + s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + 1]) //#if MIN_MATCH != 3 + // Call UPDATE_HASH() MIN_MATCH-3 more times + //#endif + + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = _tr_tally$1(s, 0, s.window[s.strstart]) + s.lookahead-- + s.strstart++ + } + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + } + + s.insert = s.strstart < MIN_MATCH$1 - 1 ? s.strstart : MIN_MATCH$1 - 1 + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true) + + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED + } + /***/ + + return BS_FINISH_DONE + } + + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + + return BS_BLOCK_DONE + } + /* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ + + var deflate_slow = function deflate_slow(s, flush) { + var hash_head + /* head of hash chain */ + + var bflush + /* set if current block must be flushed */ + + var max_insert + /* Process the input block. */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s) + + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE + } + + if (s.lookahead === 0) { + break + } + /* flush the current block */ + } + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + + hash_head = 0 + /*NIL*/ + + if (s.lookahead >= MIN_MATCH$1) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH$1 - 1]) + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h] + s.head[s.ins_h] = s.strstart + /***/ + } + /* Find the longest match, discarding those <= prev_length. + */ + + s.prev_length = s.match_length + s.prev_match = s.match_start + s.match_length = MIN_MATCH$1 - 1 + + if ( + hash_head !== 0 && + /*NIL*/ + s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= s.w_size - MIN_LOOKAHEAD + /*MAX_DIST(s)*/ + ) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head) + /* longest_match() sets match_start */ + + if ( + s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH$1 && s.strstart - s.match_start > 4096)) + /*TOO_FAR*/ + ) { + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH$1 - 1 + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + + if (s.prev_length >= MIN_MATCH$1 && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH$1 + /* Do not insert strings in hash table beyond this. */ + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + + bflush = _tr_tally$1(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH$1) + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + + s.lookahead -= s.prev_length - 1 + s.prev_length -= 2 + + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH$1 - 1]) + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h] + s.head[s.ins_h] = s.strstart + /***/ + } + } while (--s.prev_length !== 0) + + s.match_available = 0 + s.match_length = MIN_MATCH$1 - 1 + s.strstart++ + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = _tr_tally$1(s, 0, s.window[s.strstart - 1]) + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false) + /***/ + } + + s.strstart++ + s.lookahead-- + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1 + s.strstart++ + s.lookahead-- + } + } //Assert (flush != Z_NO_FLUSH, "no flush?"); + + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = _tr_tally$1(s, 0, s.window[s.strstart - 1]) + s.match_available = 0 + } + + s.insert = s.strstart < MIN_MATCH$1 - 1 ? s.strstart : MIN_MATCH$1 - 1 + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true) + + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED + } + /***/ + + return BS_FINISH_DONE + } + + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + + return BS_BLOCK_DONE + } + /* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ + + var deflate_rle = function deflate_rle(s, flush) { + var bflush + /* set if current block must be flushed */ + + var prev + /* byte at distance one to match */ + + var scan, strend + /* scan goes up to strend for length of run */ + + var _win = s.window + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH$1) { + fill_window(s) + + if (s.lookahead <= MAX_MATCH$1 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE + } + + if (s.lookahead === 0) { + break + } + /* flush the current block */ + } + /* See how many times the previous byte repeats */ + + s.match_length = 0 + + if (s.lookahead >= MIN_MATCH$1 && s.strstart > 0) { + scan = s.strstart - 1 + prev = _win[scan] + + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH$1 + + do { + /*jshint noempty:false*/ + } while ( + prev === _win[++scan] && + prev === _win[++scan] && + prev === _win[++scan] && + prev === _win[++scan] && + prev === _win[++scan] && + prev === _win[++scan] && + prev === _win[++scan] && + prev === _win[++scan] && + scan < strend + ) + + s.match_length = MAX_MATCH$1 - (strend - scan) + + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead + } + } //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + + if (s.match_length >= MIN_MATCH$1) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = _tr_tally$1(s, 1, s.match_length - MIN_MATCH$1) + s.lookahead -= s.match_length + s.strstart += s.match_length + s.match_length = 0 + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = _tr_tally$1(s, 0, s.window[s.strstart]) + s.lookahead-- + s.strstart++ + } + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + } + + s.insert = 0 + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true) + + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED + } + /***/ + + return BS_FINISH_DONE + } + + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + + return BS_BLOCK_DONE + } + /* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ + + var deflate_huff = function deflate_huff(s, flush) { + var bflush + /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s) + + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE + } + + break + /* flush the current block */ + } + } + /* Output a literal byte */ + + s.match_length = 0 //Tracevv((stderr,"%c", s->window[s->strstart])); + + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + + bflush = _tr_tally$1(s, 0, s.window[s.strstart]) + s.lookahead-- + s.strstart++ + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + } + + s.insert = 0 + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true) + + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED + } + /***/ + + return BS_FINISH_DONE + } + + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false) + + if (s.strm.avail_out === 0) { + return BS_NEED_MORE + } + /***/ + } + + return BS_BLOCK_DONE + } + /* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ + + function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length + this.max_lazy = max_lazy + this.nice_length = nice_length + this.max_chain = max_chain + this.func = func + } + + var configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), + /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), + /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), + /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), + /* 3 */ + new Config(4, 4, 16, 16, deflate_slow), + /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), + /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), + /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), + /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), + /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow), + /* 9 max compression */ + ] + /* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ + + var lm_init = function lm_init(s) { + s.window_size = 2 * s.w_size + /*** CLEAR_HASH(s); ***/ + + zero$1(s.head) // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + + s.max_lazy_match = configuration_table[s.level].max_lazy + s.good_match = configuration_table[s.level].good_length + s.nice_match = configuration_table[s.level].nice_length + s.max_chain_length = configuration_table[s.level].max_chain + s.strstart = 0 + s.block_start = 0 + s.lookahead = 0 + s.insert = 0 + s.match_length = s.prev_length = MIN_MATCH$1 - 1 + s.match_available = 0 + s.ins_h = 0 + } + + function DeflateState() { + this.strm = null + /* pointer back to this zlib stream */ + + this.status = 0 + /* as the name implies */ + + this.pending_buf = null + /* output still pending */ + + this.pending_buf_size = 0 + /* size of pending_buf */ + + this.pending_out = 0 + /* next pending byte to output to the stream */ + + this.pending = 0 + /* nb of bytes in the pending buffer */ + + this.wrap = 0 + /* bit 0 true for zlib, bit 1 true for gzip */ + + this.gzhead = null + /* gzip header information to write */ + + this.gzindex = 0 + /* where in extra, name, or comment */ + + this.method = Z_DEFLATED + /* can only be DEFLATED */ + + this.last_flush = -1 + /* value of flush param for previous deflate call */ + + this.w_size = 0 + /* LZ77 window size (32K by default) */ + + this.w_bits = 0 + /* log2(w_size) (8..16) */ + + this.w_mask = 0 + /* w_size - 1 */ + + this.window = null + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0 + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null + /* Heads of the hash chains or NIL. */ + + this.ins_h = 0 + /* hash index of string to be inserted */ + + this.hash_size = 0 + /* number of elements in hash table */ + + this.hash_bits = 0 + /* log2(hash_size) */ + + this.hash_mask = 0 + /* hash_size-1 */ + + this.hash_shift = 0 + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0 + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0 + /* length of best match */ + + this.prev_match = 0 + /* previous match */ + + this.match_available = 0 + /* set if previous match exists */ + + this.strstart = 0 + /* start of string to insert */ + + this.match_start = 0 + /* start of matching string */ + + this.lookahead = 0 + /* number of valid bytes ahead in window */ + + this.prev_length = 0 + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0 + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0 + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0 + /* compression level (1..9) */ + + this.strategy = 0 + /* favor or force Huffman coding*/ + + this.good_match = 0 + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0 + /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + + this.dyn_ltree = new Uint16Array(HEAP_SIZE$1 * 2) + this.dyn_dtree = new Uint16Array((2 * D_CODES$1 + 1) * 2) + this.bl_tree = new Uint16Array((2 * BL_CODES$1 + 1) * 2) + zero$1(this.dyn_ltree) + zero$1(this.dyn_dtree) + zero$1(this.bl_tree) + this.l_desc = null + /* desc. for literal tree */ + + this.d_desc = null + /* desc. for distance tree */ + + this.bl_desc = null + /* desc. for bit length tree */ + //ush bl_count[MAX_BITS+1]; + + this.bl_count = new Uint16Array(MAX_BITS$1 + 1) + /* number of codes at each bit length for an optimal tree */ + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + + this.heap = new Uint16Array(2 * L_CODES$1 + 1) + /* heap used to build the Huffman trees */ + + zero$1(this.heap) + this.heap_len = 0 + /* number of elements in the heap */ + + this.heap_max = 0 + /* element of largest frequency */ + + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new Uint16Array(2 * L_CODES$1 + 1) //uch depth[2*L_CODES+1]; + + zero$1(this.depth) + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0 + /* buffer index for literals or lengths */ + + this.lit_bufsize = 0 + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0 + /* running index in l_buf */ + + this.d_buf = 0 + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0 + /* bit length of current block with optimal trees */ + + this.static_len = 0 + /* bit length of current block with static trees */ + + this.matches = 0 + /* number of string matches in current block */ + + this.insert = 0 + /* bytes at end of window left to insert */ + + this.bi_buf = 0 + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + + this.bi_valid = 0 + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ + } + + var deflateResetKeep = function deflateResetKeep(strm) { + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR) + } + + strm.total_in = strm.total_out = 0 + strm.data_type = Z_UNKNOWN$1 + var s = strm.state + s.pending = 0 + s.pending_out = 0 + + if (s.wrap < 0) { + s.wrap = -s.wrap + /* was made negative by deflate(..., Z_FINISH); */ + } + + s.status = s.wrap ? INIT_STATE : BUSY_STATE + strm.adler = + s.wrap === 2 + ? 0 // crc32(0, Z_NULL, 0) + : 1 // adler32(0, Z_NULL, 0) + + s.last_flush = Z_NO_FLUSH + + _tr_init$1(s) + + return Z_OK + } + + var deflateReset = function deflateReset(strm) { + var ret = deflateResetKeep(strm) + + if (ret === Z_OK) { + lm_init(strm.state) + } + + return ret + } + + var deflateSetHeader = function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { + return Z_STREAM_ERROR + } + + if (strm.state.wrap !== 2) { + return Z_STREAM_ERROR + } + + strm.state.gzhead = head + return Z_OK + } + + var deflateInit2 = function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { + // === Z_NULL + return Z_STREAM_ERROR + } + + var wrap = 1 + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6 + } + + if (windowBits < 0) { + /* suppress zlib wrapper */ + wrap = 0 + windowBits = -windowBits + } else if (windowBits > 15) { + wrap = 2 + /* write gzip wrapper instead */ + + windowBits -= 16 + } + + if ( + memLevel < 1 || + memLevel > MAX_MEM_LEVEL || + method !== Z_DEFLATED || + windowBits < 8 || + windowBits > 15 || + level < 0 || + level > 9 || + strategy < 0 || + strategy > Z_FIXED$1 + ) { + return err(strm, Z_STREAM_ERROR) + } + + if (windowBits === 8) { + windowBits = 9 + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState() + strm.state = s + s.strm = strm + s.wrap = wrap + s.gzhead = null + s.w_bits = windowBits + s.w_size = 1 << s.w_bits + s.w_mask = s.w_size - 1 + s.hash_bits = memLevel + 7 + s.hash_size = 1 << s.hash_bits + s.hash_mask = s.hash_size - 1 + s.hash_shift = ~~((s.hash_bits + MIN_MATCH$1 - 1) / MIN_MATCH$1) + s.window = new Uint8Array(s.w_size * 2) + s.head = new Uint16Array(s.hash_size) + s.prev = new Uint16Array(s.w_size) // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6) + /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4 //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + + s.pending_buf = new Uint8Array(s.pending_buf_size) // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + + s.d_buf = 1 * s.lit_bufsize //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + + s.l_buf = (1 + 2) * s.lit_bufsize + s.level = level + s.strategy = strategy + s.method = method + return deflateReset(strm) + } + + var deflateInit = function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY) + } + + var deflate = function deflate(strm, flush) { + var beg, val // for gzip header write only + + if (!strm || !strm.state || flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR + } + + var s = strm.state + + if (!strm.output || (!strm.input && strm.avail_in !== 0) || (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, strm.avail_out === 0 ? Z_BUF_ERROR : Z_STREAM_ERROR) + } + + s.strm = strm + /* just in case */ + + var old_flush = s.last_flush + s.last_flush = flush + /* Write the header */ + + if (s.status === INIT_STATE) { + if (s.wrap === 2) { + // GZIP header + strm.adler = 0 //crc32(0L, Z_NULL, 0); + + put_byte(s, 31) + put_byte(s, 139) + put_byte(s, 8) + + if (!s.gzhead) { + // s->gzhead == Z_NULL + put_byte(s, 0) + put_byte(s, 0) + put_byte(s, 0) + put_byte(s, 0) + put_byte(s, 0) + put_byte(s, s.level === 9 ? 2 : s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? 4 : 0) + put_byte(s, OS_CODE) + s.status = BUSY_STATE + } else { + put_byte( + s, + (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ) + put_byte(s, s.gzhead.time & 0xff) + put_byte(s, (s.gzhead.time >> 8) & 0xff) + put_byte(s, (s.gzhead.time >> 16) & 0xff) + put_byte(s, (s.gzhead.time >> 24) & 0xff) + put_byte(s, s.level === 9 ? 2 : s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? 4 : 0) + put_byte(s, s.gzhead.os & 0xff) + + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff) + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff) + } + + if (s.gzhead.hcrc) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending, 0) + } + + s.gzindex = 0 + s.status = EXTRA_STATE + } + } // DEFLATE header + else { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8 + var level_flags = -1 + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0 + } else if (s.level < 6) { + level_flags = 1 + } else if (s.level === 6) { + level_flags = 2 + } else { + level_flags = 3 + } + + header |= level_flags << 6 + + if (s.strstart !== 0) { + header |= PRESET_DICT + } + + header += 31 - (header % 31) + s.status = BUSY_STATE + putShortMSB(s, header) + /* Save the adler32 of the preset dictionary: */ + + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16) + putShortMSB(s, strm.adler & 0xffff) + } + + strm.adler = 1 // adler32(0L, Z_NULL, 0); + } + } //#ifdef GZIP + + if (s.status === EXTRA_STATE) { + if ( + s.gzhead.extra + /* != Z_NULL*/ + ) { + beg = s.pending + /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending - beg, beg) + } + + flush_pending(strm) + beg = s.pending + + if (s.pending === s.pending_buf_size) { + break + } + } + + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff) + s.gzindex++ + } + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending - beg, beg) + } + + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0 + s.status = NAME_STATE + } + } else { + s.status = NAME_STATE + } + } + + if (s.status === NAME_STATE) { + if ( + s.gzhead.name + /* != Z_NULL*/ + ) { + beg = s.pending + /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending - beg, beg) + } + + flush_pending(strm) + beg = s.pending + + if (s.pending === s.pending_buf_size) { + val = 1 + break + } + } // JS specific: little magic to add zero terminator to end of string + + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff + } else { + val = 0 + } + + put_byte(s, val) + } while (val !== 0) + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending - beg, beg) + } + + if (val === 0) { + s.gzindex = 0 + s.status = COMMENT_STATE + } + } else { + s.status = COMMENT_STATE + } + } + + if (s.status === COMMENT_STATE) { + if ( + s.gzhead.comment + /* != Z_NULL*/ + ) { + beg = s.pending + /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending - beg, beg) + } + + flush_pending(strm) + beg = s.pending + + if (s.pending === s.pending_buf_size) { + val = 1 + break + } + } // JS specific: little magic to add zero terminator to end of string + + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff + } else { + val = 0 + } + + put_byte(s, val) + } while (val !== 0) + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32_1(strm.adler, s.pending_buf, s.pending - beg, beg) + } + + if (val === 0) { + s.status = HCRC_STATE + } + } else { + s.status = HCRC_STATE + } + } + + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm) + } + + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff) + put_byte(s, (strm.adler >> 8) & 0xff) + strm.adler = 0 //crc32(0L, Z_NULL, 0); + + s.status = BUSY_STATE + } + } else { + s.status = BUSY_STATE + } + } //#endif + + /* Flush as much pending output as possible */ + + if (s.pending !== 0) { + flush_pending(strm) + + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1 + return Z_OK + } + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR) + } + /* User must not provide more input after the first FINISH: */ + + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR) + } + /* Start a new block or continue the current one. + */ + + if (strm.avail_in !== 0 || s.lookahead !== 0 || (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = + s.strategy === Z_HUFFMAN_ONLY + ? deflate_huff(s, flush) + : s.strategy === Z_RLE + ? deflate_rle(s, flush) + : configuration_table[s.level].func(s, flush) + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE + } + + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1 + /* avoid BUF_ERROR next call, see above */ + } + + return Z_OK + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + _tr_align$1(s) + } else if (flush !== Z_BLOCK) { + /* FULL_FLUSH or SYNC_FLUSH */ + _tr_stored_block$1(s, 0, 0, false) + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ + + /* forget history */ + zero$1(s.head) // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0 + s.block_start = 0 + s.insert = 0 + } + } + } + + flush_pending(strm) + + if (strm.avail_out === 0) { + s.last_flush = -1 + /* avoid BUF_ERROR at next call, see above */ + + return Z_OK + } + } + } //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { + return Z_OK + } + + if (s.wrap <= 0) { + return Z_STREAM_END + } + /* Write the trailer */ + + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff) + put_byte(s, (strm.adler >> 8) & 0xff) + put_byte(s, (strm.adler >> 16) & 0xff) + put_byte(s, (strm.adler >> 24) & 0xff) + put_byte(s, strm.total_in & 0xff) + put_byte(s, (strm.total_in >> 8) & 0xff) + put_byte(s, (strm.total_in >> 16) & 0xff) + put_byte(s, (strm.total_in >> 24) & 0xff) + } else { + putShortMSB(s, strm.adler >>> 16) + putShortMSB(s, strm.adler & 0xffff) + } + + flush_pending(strm) + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + + if (s.wrap > 0) { + s.wrap = -s.wrap + } + /* write the trailer only once! */ + + return s.pending !== 0 ? Z_OK : Z_STREAM_END + } + + var deflateEnd = function deflateEnd(strm) { + if ( + !strm || + /*== Z_NULL*/ + !strm.state + /*== Z_NULL*/ + ) { + return Z_STREAM_ERROR + } + + var status = strm.state.status + + if ( + status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR) + } + + strm.state = null + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK + } + /* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ + + var deflateSetDictionary = function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length + + if ( + !strm || + /*== Z_NULL*/ + !strm.state + /*== Z_NULL*/ + ) { + return Z_STREAM_ERROR + } + + var s = strm.state + var wrap = s.wrap + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR + } + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32_1(strm.adler, dictionary, dictLength, 0) + } + + s.wrap = 0 + /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + + if (dictLength >= s.w_size) { + if (wrap === 0) { + /* already empty otherwise */ + + /*** CLEAR_HASH(s); ***/ + zero$1(s.head) // Fill with NIL (= 0); + + s.strstart = 0 + s.block_start = 0 + s.insert = 0 + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + + var tmpDict = new Uint8Array(s.w_size) + tmpDict.set(dictionary.subarray(dictLength - s.w_size, dictLength), 0) + dictionary = tmpDict + dictLength = s.w_size + } + /* insert dictionary into window and hash */ + + var avail = strm.avail_in + var next = strm.next_in + var input = strm.input + strm.avail_in = dictLength + strm.next_in = 0 + strm.input = dictionary + fill_window(s) + + while (s.lookahead >= MIN_MATCH$1) { + var str = s.strstart + var n = s.lookahead - (MIN_MATCH$1 - 1) + + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = HASH(s, s.ins_h, s.window[str + MIN_MATCH$1 - 1]) + s.prev[str & s.w_mask] = s.head[s.ins_h] + s.head[s.ins_h] = str + str++ + } while (--n) + + s.strstart = str + s.lookahead = MIN_MATCH$1 - 1 + fill_window(s) + } + + s.strstart += s.lookahead + s.block_start = s.strstart + s.insert = s.lookahead + s.lookahead = 0 + s.match_length = s.prev_length = MIN_MATCH$1 - 1 + s.match_available = 0 + strm.next_in = next + strm.input = input + strm.avail_in = avail + s.wrap = wrap + return Z_OK + } + + var deflateInit_1 = deflateInit + var deflateInit2_1 = deflateInit2 + var deflateReset_1 = deflateReset + var deflateResetKeep_1 = deflateResetKeep + var deflateSetHeader_1 = deflateSetHeader + var deflate_2 = deflate + var deflateEnd_1 = deflateEnd + var deflateSetDictionary_1 = deflateSetDictionary + var deflateInfo = 'pako deflate (from Nodeca project)' + /* Not implemented + module.exports.deflateBound = deflateBound; + module.exports.deflateCopy = deflateCopy; + module.exports.deflateParams = deflateParams; + module.exports.deflatePending = deflatePending; + module.exports.deflatePrime = deflatePrime; + module.exports.deflateTune = deflateTune; + */ + + var deflate_1 = { + deflateInit: deflateInit_1, + deflateInit2: deflateInit2_1, + deflateReset: deflateReset_1, + deflateResetKeep: deflateResetKeep_1, + deflateSetHeader: deflateSetHeader_1, + deflate: deflate_2, + deflateEnd: deflateEnd_1, + deflateSetDictionary: deflateSetDictionary_1, + deflateInfo: deflateInfo, + } + + function _typeof(obj) { + '@babel/helpers - typeof' + + if (typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol') { + _typeof = function (obj) { + return typeof obj + } + } else { + _typeof = function (obj) { + return obj && typeof Symbol === 'function' && obj.constructor === Symbol && obj !== Symbol.prototype + ? 'symbol' + : typeof obj + } + } + + return _typeof(obj) + } + + var _has = function _has(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key) + } + + var assign = function assign( + obj + /*from1, from2, from3, ...*/ + ) { + var sources = Array.prototype.slice.call(arguments, 1) + + while (sources.length) { + var source = sources.shift() + + if (!source) { + continue + } + + if (_typeof(source) !== 'object') { + throw new TypeError(source + 'must be non-object') + } + + for (var p in source) { + if (_has(source, p)) { + obj[p] = source[p] + } + } + } + + return obj + } // Join array of chunks to single array. + + var flattenChunks = function flattenChunks(chunks) { + // calculate data length + var len = 0 + + for (var i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length + } // join chunks + + var result = new Uint8Array(len) + + for (var _i = 0, pos = 0, _l = chunks.length; _i < _l; _i++) { + var chunk = chunks[_i] + result.set(chunk, pos) + pos += chunk.length + } + + return result + } + + var common = { + assign: assign, + flattenChunks: flattenChunks, + } + + // String encode/decode helpers + // + // - apply(Array) can fail on Android 2.2 + // - apply(Uint8Array) can fail on iOS 5.1 Safari + // + + var STR_APPLY_UIA_OK = true + + try { + String.fromCharCode.apply(null, new Uint8Array(1)) + } catch (__) { + STR_APPLY_UIA_OK = false + } // Table with utf8 lengths (calculated by first byte of sequence) + // Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS, + // because max possible codepoint is 0x10ffff + + var _utf8len = new Uint8Array(256) + + for (var q = 0; q < 256; q++) { + _utf8len[q] = q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1 + } + + _utf8len[254] = _utf8len[254] = 1 // Invalid sequence start + // convert string to array (typed, when possible) + + var string2buf = function string2buf(str) { + var buf, + c, + c2, + m_pos, + i, + str_len = str.length, + buf_len = 0 // count binary size + + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos) + + if ((c & 0xfc00) === 0xd800 && m_pos + 1 < str_len) { + c2 = str.charCodeAt(m_pos + 1) + + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00) + m_pos++ + } + } + + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4 + } // allocate buffer + + buf = new Uint8Array(buf_len) // convert + + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos) + + if ((c & 0xfc00) === 0xd800 && m_pos + 1 < str_len) { + c2 = str.charCodeAt(m_pos + 1) + + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00) + m_pos++ + } + } + + if (c < 0x80) { + /* one byte */ + buf[i++] = c + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xc0 | (c >>> 6) + buf[i++] = 0x80 | (c & 0x3f) + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xe0 | (c >>> 12) + buf[i++] = 0x80 | ((c >>> 6) & 0x3f) + buf[i++] = 0x80 | (c & 0x3f) + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18) + buf[i++] = 0x80 | ((c >>> 12) & 0x3f) + buf[i++] = 0x80 | ((c >>> 6) & 0x3f) + buf[i++] = 0x80 | (c & 0x3f) + } + } + + return buf + } // Helper + + var buf2binstring = function buf2binstring(buf, len) { + // On Chrome, the arguments in a function call that are allowed is `65534`. + // If the length of the buffer is smaller than that, we can use this optimization, + // otherwise we will take a slower path. + if (len < 65534) { + if (buf.subarray && STR_APPLY_UIA_OK) { + return String.fromCharCode.apply(null, buf.length === len ? buf : buf.subarray(0, len)) + } + } + + var result = '' + + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]) + } + + return result + } // convert array to string + + var buf2string = function buf2string(buf, max) { + var i, out + var len = max || buf.length // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + + var utf16buf = new Array(len * 2) + + for (out = 0, i = 0; i < len; ) { + var c = buf[i++] // quick process ascii + + if (c < 0x80) { + utf16buf[out++] = c + continue + } + + var c_len = _utf8len[c] // skip 5 & 6 byte codes + + if (c_len > 4) { + utf16buf[out++] = 0xfffd + i += c_len - 1 + continue + } // apply mask on first byte + + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07 // join the rest + + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f) + c_len-- + } // terminated by end of string? + + if (c_len > 1) { + utf16buf[out++] = 0xfffd + continue + } + + if (c < 0x10000) { + utf16buf[out++] = c + } else { + c -= 0x10000 + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff) + utf16buf[out++] = 0xdc00 | (c & 0x3ff) + } + } + + return buf2binstring(utf16buf, out) + } // Calculate max possible position in utf8 buffer, + // that will not break sequence. If that's not possible + // - (very small limits) return max size as is. + // + // buf[] - utf8 bytes array + // max - length limit (mandatory); + + var utf8border = function utf8border(buf, max) { + max = max || buf.length + + if (max > buf.length) { + max = buf.length + } // go back from last position, until start of sequence found + + var pos = max - 1 + + while (pos >= 0 && (buf[pos] & 0xc0) === 0x80) { + pos-- + } // Very small and broken sequence, + // return max, because we should return something anyway. + + if (pos < 0) { + return max + } // If we came to start of buffer - that means buffer is too small, + // return max too. + + if (pos === 0) { + return max + } + + return pos + _utf8len[buf[pos]] > max ? pos : max + } + + var strings = { + string2buf: string2buf, + buf2string: buf2string, + utf8border: utf8border, + } + + // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin + // + // This software is provided 'as-is', without any express or implied + // warranty. In no event will the authors be held liable for any damages + // arising from the use of this software. + // + // Permission is granted to anyone to use this software for any purpose, + // including commercial applications, and to alter it and redistribute it + // freely, subject to the following restrictions: + // + // 1. The origin of this software must not be misrepresented; you must not + // claim that you wrote the original software. If you use this software + // in a product, an acknowledgment in the product documentation would be + // appreciated but is not required. + // 2. Altered source versions must be plainly marked as such, and must not be + // misrepresented as being the original software. + // 3. This notice may not be removed or altered from any source distribution. + + function ZStream() { + /* next input byte */ + this.input = null // JS specific, because we have no pointers + + this.next_in = 0 + /* number of bytes available at input */ + + this.avail_in = 0 + /* total number of input bytes read so far */ + + this.total_in = 0 + /* next output byte should be put there */ + + this.output = null // JS specific, because we have no pointers + + this.next_out = 0 + /* remaining free space at output */ + + this.avail_out = 0 + /* total number of bytes output so far */ + + this.total_out = 0 + /* last error message, NULL if no error */ + + this.msg = '' + /*Z_NULL*/ + /* not visible by applications */ + + this.state = null + /* best guess about the data type: binary or text */ + + this.data_type = 2 + /*Z_UNKNOWN*/ + /* adler32 value of the uncompressed data */ + + this.adler = 0 + } + + var zstream = ZStream + + var toString = Object.prototype.toString + /* Public constants ==========================================================*/ + + /* ===========================================================================*/ + + var Z_NO_FLUSH$1 = constants.Z_NO_FLUSH, + Z_SYNC_FLUSH = constants.Z_SYNC_FLUSH, + Z_FULL_FLUSH$1 = constants.Z_FULL_FLUSH, + Z_FINISH$1 = constants.Z_FINISH, + Z_OK$1 = constants.Z_OK, + Z_STREAM_END$1 = constants.Z_STREAM_END, + Z_DEFAULT_COMPRESSION$1 = constants.Z_DEFAULT_COMPRESSION, + Z_DEFAULT_STRATEGY$1 = constants.Z_DEFAULT_STRATEGY, + Z_DEFLATED$1 = constants.Z_DEFLATED + /* ===========================================================================*/ + + /** + * class Deflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[deflate]], + * [[deflateRaw]] and [[gzip]]. + **/ + + /* internal + * Deflate.chunks -> Array + * + * Chunks of output data, if [[Deflate#onData]] not overridden. + **/ + + /** + * Deflate.result -> Uint8Array + * + * Compressed result, generated by default [[Deflate#onData]] + * and [[Deflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Deflate#push]] with `Z_FINISH` / `true` param). + **/ + + /** + * Deflate.err -> Number + * + * Error code after deflate finished. 0 (Z_OK) on success. + * You will not need it in real life, because deflate errors + * are possible only on wrong options or bad `onData` / `onEnd` + * custom handlers. + **/ + + /** + * Deflate.msg -> String + * + * Error message, if [[Deflate.err]] != 0 + **/ + + /** + * new Deflate(options) + * - options (Object): zlib deflate options. + * + * Creates new deflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `level` + * - `windowBits` + * - `memLevel` + * - `strategy` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw deflate + * - `gzip` (Boolean) - create gzip wrapper + * - `header` (Object) - custom header for gzip + * - `text` (Boolean) - true if compressed data believed to be text + * - `time` (Number) - modification time, unix timestamp + * - `os` (Number) - operation system code + * - `extra` (Array) - array of bytes with extra data (max 65536) + * - `name` (String) - file name (binary string) + * - `comment` (String) - comment (binary string) + * - `hcrc` (Boolean) - true if header crc should be added + * + * ##### Example: + * + * ```javascript + * const pako = require('pako') + * , chunk1 = new Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = new Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * const deflate = new pako.Deflate({ level: 3}); + * + * deflate.push(chunk1, false); + * deflate.push(chunk2, true); // true -> last chunk + * + * if (deflate.err) { throw new Error(deflate.err); } + * + * console.log(deflate.result); + * ``` + **/ + + function Deflate(options) { + this.options = common.assign( + { + level: Z_DEFAULT_COMPRESSION$1, + method: Z_DEFLATED$1, + chunkSize: 16384, + windowBits: 15, + memLevel: 8, + strategy: Z_DEFAULT_STRATEGY$1, + }, + options || {} + ) + var opt = this.options + + if (opt.raw && opt.windowBits > 0) { + opt.windowBits = -opt.windowBits + } else if (opt.gzip && opt.windowBits > 0 && opt.windowBits < 16) { + opt.windowBits += 16 + } + + this.err = 0 // error code, if happens (0 = Z_OK) + + this.msg = '' // error message + + this.ended = false // used to avoid multiple onEnd() calls + + this.chunks = [] // chunks of compressed data + + this.strm = new zstream() + this.strm.avail_out = 0 + var status = deflate_1.deflateInit2(this.strm, opt.level, opt.method, opt.windowBits, opt.memLevel, opt.strategy) + + if (status !== Z_OK$1) { + throw new Error(messages[status]) + } + + if (opt.header) { + deflate_1.deflateSetHeader(this.strm, opt.header) + } + + if (opt.dictionary) { + var dict // Convert data if needed + + if (typeof opt.dictionary === 'string') { + // If we need to compress text, change encoding to utf8. + dict = strings.string2buf(opt.dictionary) + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(opt.dictionary) + } else { + dict = opt.dictionary + } + + status = deflate_1.deflateSetDictionary(this.strm, dict) + + if (status !== Z_OK$1) { + throw new Error(messages[status]) + } + + this._dict_set = true + } + } + /** + * Deflate#push(data[, flush_mode]) -> Boolean + * - data (Uint8Array|ArrayBuffer|String): input data. Strings will be + * converted to utf8 byte sequence. + * - flush_mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. + * + * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with + * new compressed chunks. Returns `true` on success. The last data block must + * have `flush_mode` Z_FINISH (or `true`). That will flush internal pending + * buffers and call [[Deflate#onEnd]]. + * + * On fail call [[Deflate#onEnd]] with error code and return false. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ + + Deflate.prototype.push = function (data, flush_mode) { + var strm = this.strm + var chunkSize = this.options.chunkSize + + var status, _flush_mode + + if (this.ended) { + return false + } + + if (flush_mode === ~~flush_mode) _flush_mode = flush_mode + else _flush_mode = flush_mode === true ? Z_FINISH$1 : Z_NO_FLUSH$1 // Convert data if needed + + if (typeof data === 'string') { + // If we need to compress text, change encoding to utf8. + strm.input = strings.string2buf(data) + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data) + } else { + strm.input = data + } + + strm.next_in = 0 + strm.avail_in = strm.input.length + + for (;;) { + if (strm.avail_out === 0) { + strm.output = new Uint8Array(chunkSize) + strm.next_out = 0 + strm.avail_out = chunkSize + } // Make sure avail_out > 6 to avoid repeating markers + + if ((_flush_mode === Z_SYNC_FLUSH || _flush_mode === Z_FULL_FLUSH$1) && strm.avail_out <= 6) { + this.onData(strm.output.subarray(0, strm.next_out)) + strm.avail_out = 0 + continue + } + + status = deflate_1.deflate(strm, _flush_mode) // Ended => flush and finish + + if (status === Z_STREAM_END$1) { + if (strm.next_out > 0) { + this.onData(strm.output.subarray(0, strm.next_out)) + } + + status = deflate_1.deflateEnd(this.strm) + this.onEnd(status) + this.ended = true + return status === Z_OK$1 + } // Flush if out buffer full + + if (strm.avail_out === 0) { + this.onData(strm.output) + continue + } // Flush if requested and has data + + if (_flush_mode > 0 && strm.next_out > 0) { + this.onData(strm.output.subarray(0, strm.next_out)) + strm.avail_out = 0 + continue + } + + if (strm.avail_in === 0) break + } + + return true + } + /** + * Deflate#onData(chunk) -> Void + * - chunk (Uint8Array): output data. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ + + Deflate.prototype.onData = function (chunk) { + this.chunks.push(chunk) + } + /** + * Deflate#onEnd(status) -> Void + * - status (Number): deflate status. 0 (Z_OK) on success, + * other if not. + * + * Called once after you tell deflate that the input stream is + * complete (Z_FINISH). By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ + + Deflate.prototype.onEnd = function (status) { + // On success - join + if (status === Z_OK$1) { + this.result = common.flattenChunks(this.chunks) + } + + this.chunks = [] + this.err = status + this.msg = this.strm.msg + } + /** + * deflate(data[, options]) -> Uint8Array + * - data (Uint8Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * Compress `data` with deflate algorithm and `options`. + * + * Supported options are: + * + * - level + * - windowBits + * - memLevel + * - strategy + * - dictionary + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * + * ##### Example: + * + * ```javascript + * const pako = require('pako') + * const data = new Uint8Array([1,2,3,4,5,6,7,8,9]); + * + * console.log(pako.deflate(data)); + * ``` + **/ + + function deflate$1(input, options) { + var deflator = new Deflate(options) + deflator.push(input, true) // That will never happens, if you don't cheat with options :) + + if (deflator.err) { + throw deflator.msg || messages[deflator.err] + } + + return deflator.result + } + /** + * deflateRaw(data[, options]) -> Uint8Array + * - data (Uint8Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ + + function deflateRaw(input, options) { + options = options || {} + options.raw = true + return deflate$1(input, options) + } + /** + * gzip(data[, options]) -> Uint8Array + * - data (Uint8Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but create gzip wrapper instead of + * deflate one. + **/ + + function gzip(input, options) { + options = options || {} + options.gzip = true + return deflate$1(input, options) + } + + return { Deflate: Deflate, constants: constants } + } +} diff --git a/packages/rum-recorder/src/domain/deflateWorker.spec.ts b/packages/rum-recorder/src/domain/deflateWorker.spec.ts new file mode 100644 index 0000000000..9876675a87 --- /dev/null +++ b/packages/rum-recorder/src/domain/deflateWorker.spec.ts @@ -0,0 +1,64 @@ +import { createDeflateWorker, DeflateWorker, DeflateWorkerResponse } from './deflateWorker' + +describe('deflateWorker', () => { + it('responds with the deflated size when pushing some data', (done) => { + const deflateWorker = createDeflateWorker() + listen(deflateWorker, 3, (events) => { + expect(events).toEqual([ + { id: 0, size: 11 }, + { id: 1, size: 20 }, + { id: 2, size: 29 }, + ]) + done() + }) + deflateWorker.postMessage({ id: 0, action: 'write', data: 'foo' }) + deflateWorker.postMessage({ id: 1, action: 'write', data: 'bar' }) + deflateWorker.postMessage({ id: 2, action: 'write', data: 'baz' }) + }) + + it('responds with the resulting bytes when completing', (done) => { + const deflateWorker = createDeflateWorker() + listen(deflateWorker, 2, (events) => { + expect(events).toEqual([ + { id: 0, size: 11 }, + { + id: 1, + result: new Uint8Array([120, 156, 74, 203, 207, 7, 0, 0, 0, 255, 255, 3, 0, 2, 130, 1, 69]), + }, + ]) + done() + }) + deflateWorker.postMessage({ id: 0, action: 'write', data: 'foo' }) + deflateWorker.postMessage({ id: 1, action: 'complete' }) + }) + + it('pushes the remaining data specified by "complete"', (done) => { + const deflateWorker = createDeflateWorker() + listen(deflateWorker, 1, (events) => { + expect(events).toEqual([ + { + id: 0, + result: new Uint8Array([120, 156, 74, 203, 207, 7, 0, 0, 0, 255, 255, 3, 0, 2, 130, 1, 69]), + }, + ]) + done() + }) + deflateWorker.postMessage({ id: 0, action: 'complete', data: 'foo' }) + }) + + function listen( + deflateWorker: DeflateWorker, + expectedResponseCount: number, + onComplete: (responses: DeflateWorkerResponse[]) => void + ) { + const responses: DeflateWorkerResponse[] = [] + const listener = (event: { data: DeflateWorkerResponse }) => { + const responsesCount = responses.push(event.data) + if (responsesCount === expectedResponseCount) { + deflateWorker.removeEventListener('message', listener) + onComplete(responses) + } + } + deflateWorker.addEventListener('message', listener) + } +}) diff --git a/packages/rum-recorder/tsconfig.cjs.json b/packages/rum-recorder/tsconfig.cjs.json index 43709ece19..e5c966beca 100644 --- a/packages/rum-recorder/tsconfig.cjs.json +++ b/packages/rum-recorder/tsconfig.cjs.json @@ -3,9 +3,10 @@ "compilerOptions": { "baseUrl": ".", "declaration": true, + "allowJs": true, "module": "commonjs", "outDir": "./cjs/" }, - "include": ["./src/**/*.ts"], + "include": ["./src"], "exclude": ["./src/**/*.spec.ts"] } diff --git a/packages/rum-recorder/tsconfig.esm.json b/packages/rum-recorder/tsconfig.esm.json index 87be0a16c9..bda89a5c0e 100644 --- a/packages/rum-recorder/tsconfig.esm.json +++ b/packages/rum-recorder/tsconfig.esm.json @@ -4,8 +4,9 @@ "baseUrl": ".", "declaration": true, "module": "es6", + "allowJs": true, "outDir": "./esm/" }, - "include": ["./src/**/*.ts"], + "include": ["./src"], "exclude": ["./src/**/*.spec.ts"] } diff --git a/tsconfig.base.json b/tsconfig.base.json index 66f9e0713a..3c8a0c67ab 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -9,6 +9,7 @@ "strict": true, "target": "es5", "sourceMap": true, + "allowJs": true, "plugins": [ { From 00cd89ec01c96d2bf89513a552d4440cabf5d339 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 30 Dec 2020 11:30:27 +0100 Subject: [PATCH 02/43] [RUMF-804] add sessionReplay endpoint support --- .../core/src/domain/configuration.spec.ts | 8 ++++++++ packages/core/src/domain/configuration.ts | 19 +++++++++++++++---- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/packages/core/src/domain/configuration.spec.ts b/packages/core/src/domain/configuration.spec.ts index b9936680ba..bff5832ce9 100644 --- a/packages/core/src/domain/configuration.spec.ts +++ b/packages/core/src/domain/configuration.spec.ts @@ -115,6 +115,7 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum-http-intake.logs.datadoghq.eu/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://browser-http-intake.logs.datadoghq.eu/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://public-trace-http-intake.logs.datadoghq.eu/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-datadoghq.eu/v1/input/xxx')).toBe(true) }) it('should detect intake request for US site', () => { @@ -123,6 +124,7 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum-http-intake.logs.datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://browser-http-intake.logs.datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://public-trace-http-intake.logs.datadoghq.com/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) }) it('should detect alternate intake domains for US site', () => { @@ -130,6 +132,7 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://logs.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://trace.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) }) it('should handle sites with subdomains and classic intake', () => { @@ -139,6 +142,9 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://public-trace-http-intake.logs.us3.datadoghq.com/v1/input/xxx')).toBe( true ) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-us3-datadoghq.com/v1/input/xxx')).toBe( + true + ) }) it('should handle sites with subdomains and alternate intake', () => { @@ -176,6 +182,7 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum-http-intake.logs.foo.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://browser-http-intake.logs.foo.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://public-trace-http-intake.logs.foo.com/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-foo.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://rum-http-intake.logs.datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://browser-http-intake.logs.datadoghq.com/v1/input/xxx')).toBe(true) @@ -189,6 +196,7 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum.browser-intake-foo.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://logs.browser-intake-foo.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://trace.browser-intake-foo.com/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-foo.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://rum.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://logs.browser-intake-datadoghq.com/v1/input/xxx')).toBe(true) diff --git a/packages/core/src/domain/configuration.ts b/packages/core/src/domain/configuration.ts index 63f1660d4d..6bccc30060 100644 --- a/packages/core/src/domain/configuration.ts +++ b/packages/core/src/domain/configuration.ts @@ -74,6 +74,7 @@ export type Configuration = typeof DEFAULT_CONFIGURATION & { logsEndpoint: string rumEndpoint: string traceEndpoint: string + sessionReplayEndpoint: string internalMonitoringEndpoint?: string proxyHost?: string @@ -111,11 +112,14 @@ const ENDPOINTS = { alternate: { logs: 'logs', rum: 'rum', + sessionReplay: 'session-replay', trace: 'trace', }, classic: { logs: 'browser', rum: 'rum', + // session-replay has no classic endpoint + sessionReplay: undefined, trace: 'public-trace', }, } @@ -151,6 +155,7 @@ export function buildConfiguration(userConfiguration: UserConfiguration, buildEn proxyHost: userConfiguration.proxyHost, rumEndpoint: getEndpoint(intakeType, 'rum', transportConfiguration), service: userConfiguration.service, + sessionReplayEndpoint: getEndpoint(intakeType, 'sessionReplay', transportConfiguration), traceEndpoint: getEndpoint(intakeType, 'trace', transportConfiguration), isIntakeUrl: (url) => intakeUrls.some((intakeUrl) => url.indexOf(intakeUrl) === 0), @@ -246,10 +251,16 @@ function getEndpoint( } function getHost(intakeType: IntakeType, endpointType: EndpointType, site: string) { - const endpoint = ENDPOINTS[intakeType][endpointType] - if (intakeType === 'classic') { - return `${endpoint}-http-intake.logs.${site}` - } + return (intakeType === 'classic' && getClassicHost(endpointType, site)) || getAlternateHost(endpointType, site) +} + +function getClassicHost(endpointType: EndpointType, site: string): string | undefined { + const endpoint = ENDPOINTS.classic[endpointType] + return endpoint && `${endpoint}-http-intake.logs.${site}` +} + +function getAlternateHost(endpointType: EndpointType, site: string): string { + const endpoint = ENDPOINTS.alternate[endpointType] const domainParts = site.split('.') const extension = domainParts.pop() const suffix = `${domainParts.join('-')}.${extension}` From 67cfd0f65e7d29d9d00dcf74995f70d47d5e3422 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 17 Dec 2020 17:10:33 +0100 Subject: [PATCH 03/43] [RUMF-804] implement segmentCollection --- .../src/domain/recordUtils.spec.ts | 67 ++++++ .../rum-recorder/src/domain/recordUtils.ts | 32 +++ .../src/domain/segmentCollection.spec.ts | 211 ++++++++++++++++++ .../src/domain/segmentCollection.ts | 138 ++++++++++++ packages/rum-recorder/src/types.ts | 45 ++++ packages/rum-recorder/test/utils.ts | 12 + 6 files changed, 505 insertions(+) create mode 100644 packages/rum-recorder/src/domain/recordUtils.spec.ts create mode 100644 packages/rum-recorder/src/domain/recordUtils.ts create mode 100644 packages/rum-recorder/src/domain/segmentCollection.spec.ts create mode 100644 packages/rum-recorder/src/domain/segmentCollection.ts create mode 100644 packages/rum-recorder/src/types.ts create mode 100644 packages/rum-recorder/test/utils.ts diff --git a/packages/rum-recorder/src/domain/recordUtils.spec.ts b/packages/rum-recorder/src/domain/recordUtils.spec.ts new file mode 100644 index 0000000000..903d4d3055 --- /dev/null +++ b/packages/rum-recorder/src/domain/recordUtils.spec.ts @@ -0,0 +1,67 @@ +import { makeMouseMoveRecord } from '../../test/utils' +import { IncrementalSource, Record, RecordType } from '../types' +import { getRecordStartEnd, groupMouseMoves, isMouseMoveRecord } from './recordUtils' + +const domContentLoadedRecord: Record = { + data: {}, + timestamp: 100, + type: RecordType.DomContentLoaded, +} + +const inputRecord: Record = { + data: { + id: 123, + isChecked: true, + source: IncrementalSource.Input, + text: '123', + }, + timestamp: 123, + type: RecordType.IncrementalSnapshot, +} + +describe('isMouseMoveRecord', () => { + it('returns false for non-MouseMove records', () => { + expect(isMouseMoveRecord(domContentLoadedRecord)).toBe(false) + expect(isMouseMoveRecord(inputRecord)).toBe(false) + }) + + it('returns true for MouseMove records', () => { + expect(isMouseMoveRecord(makeMouseMoveRecord(100, []))).toBe(true) + }) +}) + +describe('groupMouseMoves', () => { + it('returns the same event if a single event is provided', () => { + const event = makeMouseMoveRecord(10, [{ id: 0 }]) + expect(groupMouseMoves([event])).toEqual(event) + }) + + it('groups mouse events in a single mouse event', () => { + expect( + groupMouseMoves([ + makeMouseMoveRecord(10, [{ id: 0 }]), + makeMouseMoveRecord(14, [{ id: 1 }]), + makeMouseMoveRecord(20, [{ id: 2 }]), + ]) + ).toEqual( + makeMouseMoveRecord(20, [ + { id: 0, timeOffset: -10 }, + { id: 1, timeOffset: -6 }, + { id: 2, timeOffset: 0 }, + ]) + ) + }) +}) + +describe('getRecordStartEnd', () => { + it("returns the timestamp as 'start' and 'end' for non-MouseMove records", () => { + expect(getRecordStartEnd(domContentLoadedRecord)).toEqual([100, 100]) + expect(getRecordStartEnd(inputRecord)).toEqual([123, 123]) + }) + + it("returns the time from the first mouse position as 'start' for MouseMove records", () => { + expect( + getRecordStartEnd(makeMouseMoveRecord(150, [{ timeOffset: -50 }, { timeOffset: -30 }, { timeOffset: 0 }])) + ).toEqual([100, 150]) + }) +}) diff --git a/packages/rum-recorder/src/domain/recordUtils.ts b/packages/rum-recorder/src/domain/recordUtils.ts new file mode 100644 index 0000000000..6ee403346c --- /dev/null +++ b/packages/rum-recorder/src/domain/recordUtils.ts @@ -0,0 +1,32 @@ +import { IncrementalSource, MouseMoveRecord, Record, RecordType } from '../types' + +export function isMouseMoveRecord(record: Record): record is MouseMoveRecord { + return ( + record.type === RecordType.IncrementalSnapshot && + (record.data.source === IncrementalSource.MouseMove || record.data.source === IncrementalSource.TouchMove) + ) +} + +export function groupMouseMoves(records: MouseMoveRecord[]): MouseMoveRecord { + const mostRecentTimestamp = records[records.length - 1]!.timestamp + return { + data: { + // Because we disabled mouse move batching from RRWeb, there will be only one position in each + // record, and its timeOffset will be 0. + positions: records.map(({ timestamp, data: { positions: [position] } }) => ({ + ...position, + timeOffset: timestamp - mostRecentTimestamp, + })), + source: records[0]!.data.source, + }, + timestamp: mostRecentTimestamp, + type: RecordType.IncrementalSnapshot, + } +} + +export function getRecordStartEnd(record: Record): [number, number] { + if (isMouseMoveRecord(record)) { + return [record.timestamp + record.data.positions[0]!.timeOffset, record.timestamp] + } + return [record.timestamp, record.timestamp] +} diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts new file mode 100644 index 0000000000..fbdc5d84aa --- /dev/null +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -0,0 +1,211 @@ +import { makeMouseMoveRecord } from '../../test/utils' +import { IncrementalSource, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' +import { + MAX_MOUSE_MOVE_BATCH, + MAX_SEGMENT_DURATION, + RecordsIncrementalState, + Segment, + SegmentWriter, + startSegmentCollection, +} from './segmentCollection' + +class StringWriter implements SegmentWriter { + output = '' + completed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] + write(data: string) { + this.output += data + } + complete(data: string, meta: SegmentMeta) { + this.completed.push({ meta, segment: JSON.parse(this.output + data) as any }) + this.output = '' + } +} + +const CONTEXT: SegmentContext = { application: { id: 'a' }, view: { id: 'b' }, session: { id: 'c' } } +const RECORD: Record = { type: RecordType.Load, timestamp: 10, data: {} } + +describe('startSegmentCollection', () => { + let writer: StringWriter + + beforeEach(() => { + writer = new StringWriter() + }) + + afterEach(() => { + jasmine.clock().uninstall() + }) + + it('immediatly starts a new segment', () => { + const { addRecord } = startSegmentCollection(() => CONTEXT, writer) + expect(writer.output).toBe('') + addRecord(RECORD) + expect(writer.output).toBe('{"records":[{"type":1,"timestamp":10,"data":{}}') + expect(writer.completed.length).toBe(0) + }) + + it('writes a segment when renewing it', () => { + const { renewSegment, addRecord } = startSegmentCollection(() => CONTEXT, writer) + addRecord(RECORD) + renewSegment('before_unload') + expect(writer.completed.length).toBe(1) + }) + + it('writes a segment after MAX_SEGMENT_DURATION', () => { + jasmine.clock().install() + const { addRecord } = startSegmentCollection(() => CONTEXT, writer) + addRecord(RECORD) + jasmine.clock().tick(MAX_SEGMENT_DURATION) + expect(writer.completed.length).toBe(1) + }) + + it('does not write a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { + jasmine.clock().install() + const { renewSegment, addRecord } = startSegmentCollection(() => CONTEXT, writer) + addRecord(RECORD) + renewSegment('before_unload') + expect(writer.completed.length).toBe(1) + jasmine.clock().tick(MAX_SEGMENT_DURATION) + expect(writer.completed.length).toBe(1) + }) + + it("ignores calls to addRecord if context can't be get", () => { + const { renewSegment, addRecord } = startSegmentCollection(() => undefined, writer) + addRecord(RECORD) + renewSegment('before_unload') + expect(writer.output).toBe('') + expect(writer.completed.length).toBe(0) + }) +}) + +describe('Segment', () => { + it('writes a segment', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + segment.addRecord({ type: RecordType.Load, timestamp: 10, data: {} }) + expect(writer.output).toEqual('{"records":[{"type":1,"timestamp":10,"data":{}}') + expect(writer.completed).toEqual([]) + segment.finish() + + expect(writer.completed).toEqual([ + { + meta: { + creation_reason: 'init' as const, + end: 10, + has_full_snapshot: false, + records_count: 1, + start: 10, + ...CONTEXT, + }, + segment: { + creation_reason: 'init' as const, + end: 10, + has_full_snapshot: false, + records: [ + { + data: {}, + timestamp: 10, + type: RecordType.Load, + }, + ], + records_count: 1, + start: 10, + ...CONTEXT, + }, + }, + ]) + }) + + it('batches mousemove records', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) + segment.addRecord(makeMouseMoveRecord(20, [{ id: 1 }])) + segment.addRecord(makeMouseMoveRecord(30, [{ id: 2 }])) + segment.finish() + + expect(writer.completed[0].segment.records).toEqual([ + makeMouseMoveRecord(30, [ + { id: 0, timeOffset: -20 }, + { id: 1, timeOffset: -10 }, + { id: 2, timeOffset: 0 }, + ]), + ]) + }) + + it('flushes the mousemove records batch after a max number of records', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + for (let i = 0; i < MAX_MOUSE_MOVE_BATCH + 2; i += 1) { + segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) + } + segment.finish() + + const records = writer.completed[0].segment.records as MouseMoveRecord[] + expect(records.length).toBe(2) + expect(records[0].data.positions.length).toBe(MAX_MOUSE_MOVE_BATCH) + expect(records[1].data.positions.length).toBe(2) + }) + + it('ignores the "finish" call if no record have been added', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + segment.finish() + expect(writer.completed).toEqual([]) + }) +}) + +describe('RecordsIncrementalState', () => { + it('initializes with the data of the first record', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + expect(state.start).toBe(10) + expect(state.end).toBe(10) + expect(state.hasFullSnapshot).toBe(false) + expect(state.recordsCount).toBe(1) + }) + + it('adjusts the state when adding a record', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 15, data: {} }) + expect(state.start).toBe(10) + expect(state.end).toBe(15) + expect(state.hasFullSnapshot).toBe(false) + expect(state.recordsCount).toBe(2) + }) + + it("doesn't set hasFullSnapshot to true if a FullSnapshot is the first record", () => { + const state = new RecordsIncrementalState({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(false) + }) + + it("doesn't set hasFullSnapshot to true if a FullSnapshot is not directly preceeded by a Meta record", () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(false) + }) + + it('sets hasFullSnapshot to true if a FullSnapshot is preceeded by a Meta record', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) + state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(true) + }) + + it("doesn't overrides hasFullSnapshot to false once it has been set to true", () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) + state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(true) + }) + + it('use records start/end for mouse moves', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ + data: { source: IncrementalSource.MouseMove, positions: [{ timeOffset: -2, x: 0, y: 0, id: 0 }] }, + timestamp: 11, + type: RecordType.IncrementalSnapshot, + }) + expect(state.start).toBe(9) + expect(state.end).toBe(11) + }) +}) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts new file mode 100644 index 0000000000..ab505eabc4 --- /dev/null +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -0,0 +1,138 @@ +import { CreationReason, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' +import { getRecordStartEnd, groupMouseMoves, isMouseMoveRecord } from './recordUtils' + +export const MAX_SEGMENT_DURATION = 30_000 +export const MAX_MOUSE_MOVE_BATCH = 100 + +export interface SegmentWriter { + write(data: string): void + complete(data: string, meta: SegmentMeta): void +} + +export function startSegmentCollection(getSegmentContext: () => SegmentContext | undefined, writer: SegmentWriter) { + let currentSegment: Segment | undefined + + renewSegment('init') + + function renewSegment(creationReason: CreationReason) { + if (currentSegment) { + currentSegment.finish() + currentSegment = undefined + } + + const context = getSegmentContext() + if (!context) { + return + } + + const localSegment = (currentSegment = new Segment(writer, context, creationReason)) + + // Replace the newly created segment after MAX_SEGMENT_DURATION + setTimeout(() => { + if (currentSegment === localSegment) { + renewSegment('max_duration') + } + }, MAX_SEGMENT_DURATION) + } + + return { + renewSegment, + addRecord(record: Record) { + if (!currentSegment) { + return + } + + currentSegment.addRecord(record) + }, + } +} + +export class Segment { + private state?: RecordsIncrementalState + private batchedMouseMove: MouseMoveRecord[] = [] + + constructor( + private writer: SegmentWriter, + readonly context: SegmentContext, + private creationReason: CreationReason + ) {} + + addRecord(record: Record): void { + if (isMouseMoveRecord(record)) { + if (this.batchedMouseMove.push(record) === MAX_MOUSE_MOVE_BATCH) { + this.writeMouseMoves() + } + } else { + this.writeRecord(record) + } + } + + finish() { + this.writeMouseMoves() + + if (!this.state) { + return + } + + const meta: SegmentMeta = { + creation_reason: this.creationReason, + end: this.state.end, + has_full_snapshot: this.state.hasFullSnapshot, + records_count: this.state.recordsCount, + start: this.state.start, + ...this.context, + } + this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) + } + + private writeMouseMoves() { + if (this.batchedMouseMove.length === 0) { + return + } + + this.writeRecord(groupMouseMoves(this.batchedMouseMove)) + + this.batchedMouseMove.length = 0 + } + + private writeRecord(record: Record): void { + if (!this.state) { + this.writer.write(`{"records":[${JSON.stringify(record)}`) + this.state = new RecordsIncrementalState(record) + } else { + this.writer.write(`,${JSON.stringify(record)}`) + this.state.addRecord(record) + } + } +} + +export class RecordsIncrementalState { + start: number + end: number + recordsCount: number + hasFullSnapshot: boolean + private lastRecordType: RecordType + + constructor(initialRecord: Record) { + const [start, end] = getRecordStartEnd(initialRecord) + this.start = start + this.end = end + this.lastRecordType = initialRecord.type + this.hasFullSnapshot = false + this.recordsCount = 1 + } + + addRecord(record: Record) { + const [start, end] = getRecordStartEnd(record) + this.start = Math.min(this.start, start) + this.end = Math.max(this.end, end) + if (!this.hasFullSnapshot) { + // Note: to be exploitable by the replay, this field should be true only if the FullSnapshot + // is preceeded by a Meta record. Because rrweb is emiting both records synchronously and + // contiguously, it should always be the case, but check it nonetheless. + this.hasFullSnapshot = record.type === RecordType.FullSnapshot && this.lastRecordType === RecordType.Meta + } + this.lastRecordType = record.type + this.recordsCount += 1 + } +} diff --git a/packages/rum-recorder/src/types.ts b/packages/rum-recorder/src/types.ts new file mode 100644 index 0000000000..f06bb4c1b7 --- /dev/null +++ b/packages/rum-recorder/src/types.ts @@ -0,0 +1,45 @@ +// Alias EventWithTime to Record, to avoid naming clash between RRWeb events and RUM events +import { + EventType as RecordType, + EventWithTime as Record, + IncrementalSource, + MousePosition, +} from './domain/rrweb/types' + +export { Record, RecordType, IncrementalSource, MousePosition } + +export interface MouseMoveRecord { + type: RecordType.IncrementalSnapshot + timestamp: number + data: { + source: IncrementalSource.TouchMove | IncrementalSource.MouseMove + positions: MousePosition[] + } +} + +export interface Segment extends SegmentMeta { + records: Record[] +} + +export interface SegmentMeta extends SegmentContext { + start: number + end: number + has_full_snapshot: boolean + records_count: number + creation_reason: CreationReason +} + +export interface SegmentContext { + application: { id: string } + session: { id: string } + view: { id: string } +} + +export type CreationReason = + | 'init' + | 'max_duration' + | 'max_size' + | 'view_change' + | 'session_renewed' + | 'before_unload' + | 'visibility_change' diff --git a/packages/rum-recorder/test/utils.ts b/packages/rum-recorder/test/utils.ts new file mode 100644 index 0000000000..242fa3e1ab --- /dev/null +++ b/packages/rum-recorder/test/utils.ts @@ -0,0 +1,12 @@ +import { IncrementalSource, MouseMoveRecord, MousePosition, RecordType } from '../src/types' + +export function makeMouseMoveRecord(timestamp: number, positions: Array>): MouseMoveRecord { + return { + timestamp, + data: { + positions: positions.map((position) => ({ id: 0, timeOffset: 0, x: 0, y: 1, ...position })), + source: IncrementalSource.MouseMove, + }, + type: RecordType.IncrementalSnapshot, + } +} From fbfa47d5b4fa4ec661cf31e2b416cc73bc4304bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 30 Dec 2020 17:12:54 +0100 Subject: [PATCH 04/43] [RUMF-804] implement segment renewal tracking --- packages/rum-core/src/index.ts | 1 + .../src/domain/trackSegmentRenewal.spec.ts | 50 +++++++++++++++++++ .../src/domain/trackSegmentRenewal.ts | 45 +++++++++++++++++ 3 files changed, 96 insertions(+) create mode 100644 packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts create mode 100644 packages/rum-recorder/src/domain/trackSegmentRenewal.ts diff --git a/packages/rum-core/src/index.ts b/packages/rum-core/src/index.ts index d8a2934150..7e68340242 100644 --- a/packages/rum-core/src/index.ts +++ b/packages/rum-core/src/index.ts @@ -10,3 +10,4 @@ export { RumLongTaskEvent, } from './rumEvent.types' export { startRum } from './boot/rum' +export { LifeCycle, LifeCycleEventType } from './domain/lifeCycle' diff --git a/packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts b/packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts new file mode 100644 index 0000000000..bd9d525887 --- /dev/null +++ b/packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts @@ -0,0 +1,50 @@ +import { createNewEvent, DOM_EVENT, restorePageVisibility, setPageVisibility } from '@datadog/browser-core' +import { LifeCycle, LifeCycleEventType } from '@datadog/browser-rum-core' +import { CreationReason } from '../types' +import { trackSegmentRenewal } from './trackSegmentRenewal' + +describe('trackSegmentRenewal', () => { + let renewSegmentSpy: jasmine.Spy<(reason: CreationReason) => void> + let lifeCycle: LifeCycle + let eventEmitter: HTMLDivElement + let stopSegmentRenewal: () => void + + beforeEach(() => { + renewSegmentSpy = jasmine.createSpy() + lifeCycle = new LifeCycle() + eventEmitter = document.createElement('div') + ;({ stop: stopSegmentRenewal } = trackSegmentRenewal(lifeCycle, renewSegmentSpy, eventEmitter)) + }) + + afterEach(() => { + stopSegmentRenewal() + restorePageVisibility() + }) + + it('renews segment on unload', () => { + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) + expect(renewSegmentSpy).toHaveBeenCalledWith('before_unload') + }) + + it('renews segment on view change', () => { + lifeCycle.notify(LifeCycleEventType.VIEW_CREATED, {} as any) + expect(renewSegmentSpy).toHaveBeenCalledWith('view_change') + }) + + it('renews segment on session renew', () => { + lifeCycle.notify(LifeCycleEventType.SESSION_RENEWED) + expect(renewSegmentSpy).toHaveBeenCalledWith('session_renewed') + }) + + it('renews segment when the page become hidden', () => { + setPageVisibility('hidden') + eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) + expect(renewSegmentSpy).toHaveBeenCalledWith('visibility_change') + }) + + it('does not renew segment when the page become visible', () => { + setPageVisibility('visible') + eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) + expect(renewSegmentSpy).not.toHaveBeenCalled() + }) +}) diff --git a/packages/rum-recorder/src/domain/trackSegmentRenewal.ts b/packages/rum-recorder/src/domain/trackSegmentRenewal.ts new file mode 100644 index 0000000000..c5bbe64364 --- /dev/null +++ b/packages/rum-recorder/src/domain/trackSegmentRenewal.ts @@ -0,0 +1,45 @@ +import { addEventListener, DOM_EVENT, EventEmitter } from '@datadog/browser-core' +import { LifeCycle, LifeCycleEventType } from '@datadog/browser-rum-core' +import { CreationReason } from '../types' + +export function trackSegmentRenewal( + lifeCycle: LifeCycle, + renewSegment: (creationReason: CreationReason) => void, + emitter: EventEmitter = window +) { + // Flush when the RUM view changes + const { unsubscribe: unsubscribeViewCreated } = lifeCycle.subscribe(LifeCycleEventType.VIEW_CREATED, () => { + renewSegment('view_change') + }) + + // Flush when the session is renewed + const { unsubscribe: unsubscribeSessionRenewed } = lifeCycle.subscribe(LifeCycleEventType.SESSION_RENEWED, () => { + renewSegment('session_renewed') + }) + + // Flush when leaving the page + const { unsubscribe: unsubscribeBeforeUnload } = lifeCycle.subscribe(LifeCycleEventType.BEFORE_UNLOAD, () => { + renewSegment('before_unload') + }) + + // Flush when visibility changes + const { stop: unsubscribeVisibilityChange } = addEventListener( + emitter, + DOM_EVENT.VISIBILITY_CHANGE, + () => { + if (document.visibilityState === 'hidden') { + renewSegment('visibility_change') + } + }, + { capture: true } + ) + + return { + stop() { + unsubscribeViewCreated() + unsubscribeBeforeUnload() + unsubscribeVisibilityChange() + unsubscribeSessionRenewed() + }, + } +} From 79ef0b436821ddbfe6ee57e628cf2ddebd21afdc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 30 Dec 2020 11:31:58 +0100 Subject: [PATCH 05/43] [RUMF-804] implement DeflateSegmentWriter --- .../src/domain/deflateSegmentWriter.spec.ts | 89 +++++++++++++++++++ .../src/domain/deflateSegmentWriter.ts | 37 ++++++++ 2 files changed, 126 insertions(+) create mode 100644 packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts create mode 100644 packages/rum-recorder/src/domain/deflateSegmentWriter.ts diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts new file mode 100644 index 0000000000..180ad348b9 --- /dev/null +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts @@ -0,0 +1,89 @@ +import { noop } from '@datadog/browser-core' + +import { SegmentMeta } from '../types' +import { DeflateSegmentWriter } from './deflateSegmentWriter' +import { DeflateWorker, DeflateWorkerAction, DeflateWorkerListener } from './deflateWorker' + +describe('DeflateWriter', () => { + let worker: MockWorker + + beforeEach(() => { + worker = new MockWorker() + }) + + it('calls the onWrote callback when data is written', () => { + const onWroteSpy = jasmine.createSpy<(size: number) => void>() + const writer = new DeflateSegmentWriter(worker, onWroteSpy, noop) + writer.write('foo') + worker.process() + expect(onWroteSpy.calls.allArgs()).toEqual([[3]]) + }) + + it('calls the onCompleted callback when data is complete', () => { + const onCompletedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() + const writer = new DeflateSegmentWriter(worker, noop, onCompletedSpy) + const meta: SegmentMeta = { start: 12 } as any + writer.complete(undefined, meta) + worker.process() + expect(onCompletedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta]]) + }) + + it('calls the onCompleted callback with the correct meta even if a previous action failed somehow', () => { + const onCompletedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() + const writer = new DeflateSegmentWriter(worker, noop, onCompletedSpy) + const meta1: SegmentMeta = { start: 12 } as any + const meta2: SegmentMeta = { start: 13 } as any + writer.complete(undefined, meta1) + writer.complete(undefined, meta2) + worker.process(0) + expect(onCompletedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta2]]) + }) +}) + +class MockWorker implements DeflateWorker { + private listener: DeflateWorkerListener | undefined + private messages: DeflateWorkerAction[] = [] + private pendingDataSize = 0 + + addEventListener(_: 'message', listener: DeflateWorkerListener): void { + if (this.listener) { + throw new Error('MockWorker supports only one listener') + } + this.listener = listener + } + + removeEventListener(): void { + this.listener = undefined + } + + postMessage(message: DeflateWorkerAction): void { + this.messages.push(message) + } + + terminate(): void { + // do nothing + } + + process(ignoreMessageWithId?: number): void { + if (this.listener) { + for (const message of this.messages) { + if (ignoreMessageWithId === message.id) { + continue + } + switch (message.action) { + case 'write': + this.pendingDataSize += message.data.length + this.listener({ data: { id: message.id, size: this.pendingDataSize } }) + break + case 'complete': + if (message.data) { + this.pendingDataSize += message.data.length + } + this.listener({ data: { id: message.id, result: new Uint8Array(this.pendingDataSize) } }) + this.pendingDataSize = 0 + } + } + } + this.messages.length = 0 + } +} diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts new file mode 100644 index 0000000000..edb077a425 --- /dev/null +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts @@ -0,0 +1,37 @@ +import { SegmentMeta } from '../types' +import { DeflateWorker } from './deflateWorker' +import { SegmentWriter } from './segmentCollection' + +export class DeflateSegmentWriter implements SegmentWriter { + private nextId = 0 + private pendingMeta: Array<{ id: number; meta: SegmentMeta }> = [] + + constructor( + private worker: DeflateWorker, + private onWrote: (size: number) => void, + private onCompleted: (data: Uint8Array, meta: SegmentMeta) => void + ) { + worker.addEventListener('message', ({ data }) => { + if ('result' in data) { + let pendingMeta + do { + pendingMeta = this.pendingMeta.shift()! + } while (pendingMeta.id < data.id) + this.onCompleted(data.result, pendingMeta.meta) + } else { + this.onWrote(data.size) + } + }) + } + + write(data: string): void { + this.worker.postMessage({ data, id: this.nextId, action: 'write' }) + this.nextId += 1 + } + + complete(data: string | undefined, meta: SegmentMeta): void { + this.worker.postMessage({ data, id: this.nextId, action: 'complete' }) + this.pendingMeta.push({ meta, id: this.nextId }) + this.nextId += 1 + } +} From c4fc8eeb9105f5eaaa9e6a0396f071a912f0bcc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 7 Jan 2021 14:35:52 +0100 Subject: [PATCH 06/43] [RUMF-804] implement the recorder entry point --- packages/core/src/transport/transport.ts | 2 +- packages/rum-core/src/boot/rum.ts | 3 + .../rum-core/src/boot/rumPublicApi.spec.ts | 5 +- packages/rum-core/src/index.ts | 2 + packages/rum-core/src/rawRumEvent.types.ts | 1 + packages/rum-recorder/package.json | 2 +- .../rum-recorder/src/boot/recorder.entry.ts | 30 +++- .../rum-recorder/src/boot/recorder.spec.ts | 136 ++++++++++++++++++ packages/rum-recorder/src/boot/recorder.ts | 67 +++++++++ packages/rum-recorder/src/index.ts | 16 ++- packages/rum-recorder/src/transport/send.ts | 31 ++++ packages/rum/src/index.ts | 1 + tsconfig.base.json | 3 +- 13 files changed, 292 insertions(+), 7 deletions(-) create mode 100644 packages/rum-recorder/src/boot/recorder.spec.ts create mode 100644 packages/rum-recorder/src/boot/recorder.ts create mode 100644 packages/rum-recorder/src/transport/send.ts diff --git a/packages/core/src/transport/transport.ts b/packages/core/src/transport/transport.ts index 19fcadd0e8..5e76c6b979 100644 --- a/packages/core/src/transport/transport.ts +++ b/packages/core/src/transport/transport.ts @@ -16,7 +16,7 @@ const HAS_MULTI_BYTES_CHARACTERS = /[^\u0000-\u007F]/ export class HttpRequest { constructor(private endpointUrl: string, private bytesLimit: number, private withBatchTime: boolean = false) {} - send(data: string, size: number) { + send(data: string | FormData, size: number) { const url = this.withBatchTime ? addBatchTime(this.endpointUrl) : this.endpointUrl if (navigator.sendBeacon && size < this.bytesLimit) { const isQueued = navigator.sendBeacon(url, data) diff --git a/packages/rum-core/src/boot/rum.ts b/packages/rum-core/src/boot/rum.ts index 717436cca7..8a5ab4b86c 100644 --- a/packages/rum-core/src/boot/rum.ts +++ b/packages/rum-core/src/boot/rum.ts @@ -52,6 +52,9 @@ export function startRum(userConfiguration: RumUserConfiguration, getCommonConte return { addAction, addError, + configuration, + lifeCycle, + parentContexts, getInternalContext: internalContext.get, } } diff --git a/packages/rum-core/src/boot/rumPublicApi.spec.ts b/packages/rum-core/src/boot/rumPublicApi.spec.ts index 705a1fb6b7..ef1ab4b66c 100644 --- a/packages/rum-core/src/boot/rumPublicApi.spec.ts +++ b/packages/rum-core/src/boot/rumPublicApi.spec.ts @@ -3,10 +3,13 @@ import { setup, TestSetupBuilder } from '../../test/specHelper' import { ActionType } from '../domain/rumEventsCollection/action/trackActions' import { makeRumPublicApi, RumPublicApi, RumUserConfiguration, StartRum } from './rumPublicApi' -const noopStartRum = () => ({ +const noopStartRum = (): ReturnType => ({ addAction: () => undefined, addError: () => undefined, + configuration: {} as any, getInternalContext: () => undefined, + lifeCycle: {} as any, + parentContexts: {} as any, }) const DEFAULT_INIT_CONFIGURATION = { applicationId: 'xxx', clientToken: 'xxx' } diff --git a/packages/rum-core/src/index.ts b/packages/rum-core/src/index.ts index 7e68340242..1c5977995c 100644 --- a/packages/rum-core/src/index.ts +++ b/packages/rum-core/src/index.ts @@ -9,5 +9,7 @@ export { RumResourceEvent, RumLongTaskEvent, } from './rumEvent.types' +export { CommonContext } from './rawRumEvent.types' export { startRum } from './boot/rum' export { LifeCycle, LifeCycleEventType } from './domain/lifeCycle' +export { ParentContexts } from './domain/parentContexts' diff --git a/packages/rum-core/src/rawRumEvent.types.ts b/packages/rum-core/src/rawRumEvent.types.ts index 8f3013fb88..9349d7b9f7 100644 --- a/packages/rum-core/src/rawRumEvent.types.ts +++ b/packages/rum-core/src/rawRumEvent.types.ts @@ -173,4 +173,5 @@ export interface User { export interface CommonContext { user: User context: Context + hasReplay?: boolean } diff --git a/packages/rum-recorder/package.json b/packages/rum-recorder/package.json index 94e5eaaf99..3b7fc324cb 100644 --- a/packages/rum-recorder/package.json +++ b/packages/rum-recorder/package.json @@ -13,7 +13,7 @@ }, "dependencies": { "@datadog/browser-core": "2.1.2", - "@datadog/browser-rum": "2.1.2", + "@datadog/browser-rum-core": "2.1.2", "@types/css-font-loading-module": "0.0.4", "rrweb-snapshot": "1.0.1", "tslib": "^1.10.0" diff --git a/packages/rum-recorder/src/boot/recorder.entry.ts b/packages/rum-recorder/src/boot/recorder.entry.ts index efe246f3e7..0299f5728d 100644 --- a/packages/rum-recorder/src/boot/recorder.entry.ts +++ b/packages/rum-recorder/src/boot/recorder.entry.ts @@ -1 +1,29 @@ -export * from '@datadog/browser-rum' +import { defineGlobal, getGlobalObject } from '@datadog/browser-core' +import { + CommonContext, + makeRumPublicApi, + RumPublicApi, + RumUserConfiguration, + startRum, +} from '@datadog/browser-rum-core' + +import { startRecording } from './recorder' + +function startRumAndRecording(userConfiguration: RumUserConfiguration, getCommonContext: () => CommonContext) { + const startRumResult = startRum(userConfiguration, () => ({ + ...getCommonContext(), + hasReplay: true, + })) + + const { lifeCycle, parentContexts, configuration } = startRumResult + startRecording(lifeCycle, userConfiguration.applicationId, configuration, parentContexts) + + return startRumResult +} + +export const datadogRum = makeRumPublicApi(startRumAndRecording) + +interface BrowserWindow extends Window { + DD_RUM?: RumPublicApi +} +defineGlobal(getGlobalObject(), 'DD_RUM', datadogRum) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts new file mode 100644 index 0000000000..41d2f887c3 --- /dev/null +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -0,0 +1,136 @@ +import { createNewEvent, HttpRequest, isIE, throttle } from '@datadog/browser-core' +import { LifeCycle, LifeCycleEventType } from '@datadog/browser-rum-core' + +import { setup, TestSetupBuilder } from '../../../rum-core/test/specHelper' + +import { startRecording } from './recorder' + +describe('startRecording', () => { + let setupBuilder: TestSetupBuilder + let sessionId: string | undefined + + beforeEach(() => { + if (isIE()) { + pending('IE not supported') + } + sessionId = 'session-id' + setupBuilder = setup() + .withParentContexts({ + findView() { + return { + session: { + id: sessionId, + }, + view: { + id: 'view-id', + referrer: '', + url: 'http://example.org', + }, + } + }, + }) + .beforeBuild(({ lifeCycle, applicationId, configuration, parentContexts }) => { + return startRecording(lifeCycle, applicationId, configuration, parentContexts) + }) + }) + + afterEach(() => { + setupBuilder.cleanup() + }) + + it('starts recording', (done) => { + waitRequests((requests) => { + expect(requests).toEqual([{ data: jasmine.any(FormData), size: jasmine.any(Number) }]) + expect(formDataAsObject(requests[0].data)).toEqual({ + 'application.id': 'appId', + creation_reason: 'init', + end: jasmine.stringMatching(/^\d{13}$/), + has_full_snapshot: 'true', + records_count: '2', + segment: jasmine.any(File), + 'session.id': 'session-id', + start: jasmine.stringMatching(/^\d{13}$/), + 'view.id': 'view-id', + }) + done() + }) + + const { lifeCycle } = setupBuilder.build() + renewSegment(lifeCycle) + }) + + it('renews the segment when its compressed data is getting too large', (done) => { + const clickCount = 10_000 + waitRequests((requests) => { + expect(requests.length).toBe(1) + expect(requests[0].data.get('records_count')).toBe(String(clickCount + 2)) + done() + }) + + setupBuilder.build() + const click = createNewEvent('click') + for (let i = 0; i < clickCount; i += 1) { + document.body.dispatchEvent(click) + } + }) + + it('stops sending new segment when the session is expired', (done) => { + waitRequests((requests) => { + expect(requests.length).toBe(1) + expect(requests[0].data.get('records_count')).toBe('3') + done() + }) + + const { lifeCycle } = setupBuilder.build() + + document.body.dispatchEvent(createNewEvent('click')) + + sessionId = undefined + renewSegment(lifeCycle) + document.body.dispatchEvent(createNewEvent('click')) + + renewSegment(lifeCycle) + }) + + it('restarts sending segments when the session is renewed', (done) => { + waitRequests((requests) => { + expect(requests.length).toBe(1) + expect(requests[0].data.get('records_count')).toBe('1') + expect(requests[0].data.get('session.id')).toBe('new-session-id') + done() + }) + + sessionId = undefined + const { lifeCycle } = setupBuilder.build() + + document.body.dispatchEvent(createNewEvent('click')) + + sessionId = 'new-session-id' + renewSegment(lifeCycle) + document.body.dispatchEvent(createNewEvent('click')) + + renewSegment(lifeCycle) + }) +}) + +function renewSegment(lifeCycle: LifeCycle) { + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) +} + +function formDataAsObject(data: FormData) { + const result: { [key: string]: unknown } = {} + data.forEach((value, key) => { + result[key] = value + }) + return result +} + +function waitRequests(callback: (requests: Array<{ data: FormData; size: number }>) => void) { + const requests: Array<{ data: FormData; size: number }> = [] + // Throttle the callback, so it is called only after the last request being sent + const { throttled: throttledCallback } = throttle(() => callback(requests), 300, { leading: false }) + spyOn(HttpRequest.prototype, 'send').and.callFake((data: FormData, size) => { + requests.push({ data, size }) + throttledCallback() + }) +} diff --git a/packages/rum-recorder/src/boot/recorder.ts b/packages/rum-recorder/src/boot/recorder.ts new file mode 100644 index 0000000000..136621f5ff --- /dev/null +++ b/packages/rum-recorder/src/boot/recorder.ts @@ -0,0 +1,67 @@ +import { Configuration } from '@datadog/browser-core' +import { LifeCycle, ParentContexts } from '@datadog/browser-rum-core' + +import { DeflateSegmentWriter } from '../domain/deflateSegmentWriter' +import { createDeflateWorker } from '../domain/deflateWorker' +import { record } from '../domain/rrweb' +import { startSegmentCollection } from '../domain/segmentCollection' +import { trackSegmentRenewal } from '../domain/trackSegmentRenewal' +import { send, SEND_BEACON_BYTE_LENGTH_LIMIT } from '../transport/send' + +export function startRecording( + lifeCycle: LifeCycle, + applicationId: string, + configuration: Configuration, + parentContexts: ParentContexts +) { + const worker = createDeflateWorker() + + const writer = new DeflateSegmentWriter( + worker, + (size) => { + if (size > SEND_BEACON_BYTE_LENGTH_LIMIT) { + renewSegment('max_size') + } + }, + (data, meta) => { + send(configuration.sessionReplayEndpoint, data, meta) + } + ) + + const { addRecord, renewSegment } = startSegmentCollection( + () => getSegmentContext(applicationId, parentContexts), + writer + ) + + const { stop: stopSegmentRenewal } = trackSegmentRenewal(lifeCycle, renewSegment) + + const stopRecording = record({ + emit: addRecord, + })! + + return { + stop() { + stopSegmentRenewal() + stopRecording() + worker.terminate() + }, + } +} + +function getSegmentContext(applicationId: string, parentContexts: ParentContexts) { + const viewContext = parentContexts.findView() + if (!viewContext?.session.id) { + return undefined + } + return { + application: { + id: applicationId, + }, + session: { + id: viewContext.session.id, + }, + view: { + id: viewContext.view.id, + }, + } +} diff --git a/packages/rum-recorder/src/index.ts b/packages/rum-recorder/src/index.ts index 0c7941c4bf..76b3b7832a 100644 --- a/packages/rum-recorder/src/index.ts +++ b/packages/rum-recorder/src/index.ts @@ -1 +1,15 @@ -export * from './boot/recorder.entry' +// Keep the following in sync with packages/rum/src/index.ts +export { datadogRum } from './boot/recorder.entry' +export { + CommonProperties, + ProvidedSource, + RumPublicApi as RumGlobal, + RumUserConfiguration, + // Events + RumEvent, + RumActionEvent, + RumErrorEvent, + RumLongTaskEvent, + RumResourceEvent, + RumViewEvent, +} from '@datadog/browser-rum-core' diff --git a/packages/rum-recorder/src/transport/send.ts b/packages/rum-recorder/src/transport/send.ts new file mode 100644 index 0000000000..38a1fa9e9d --- /dev/null +++ b/packages/rum-recorder/src/transport/send.ts @@ -0,0 +1,31 @@ +import { HttpRequest, objectEntries } from '@datadog/browser-core' +import { SegmentMeta } from '../types' + +export const SEND_BEACON_BYTE_LENGTH_LIMIT = 60_000 + +export function send(endpointUrl: string, data: Uint8Array, meta: SegmentMeta): void { + const formData = new FormData() + + formData.set( + 'segment', + new Blob([data], { + type: 'application/octet-stream', + }), + `${meta.session.id}-${meta.start}` + ) + + toFormEntries(meta, (key, value) => formData.set(key, value)) + + const request = new HttpRequest(endpointUrl, SEND_BEACON_BYTE_LENGTH_LIMIT) + request.send(formData, data.byteLength) +} + +function toFormEntries(input: object, onEntry: (key: string, value: string) => void, prefix = '') { + objectEntries(input as { [key: string]: unknown }).forEach(([key, value]) => { + if (typeof value === 'object' && value !== null) { + toFormEntries(value, onEntry, `${prefix}${key}.`) + } else { + onEntry(`${prefix}${key}`, String(value)) + } + }) +} diff --git a/packages/rum/src/index.ts b/packages/rum/src/index.ts index 116108156a..543c091dc0 100644 --- a/packages/rum/src/index.ts +++ b/packages/rum/src/index.ts @@ -1,3 +1,4 @@ +// Keep the following in sync with packages/rum-recorder/src/index.ts export { datadogRum } from './boot/rum.entry' export { CommonProperties, diff --git a/tsconfig.base.json b/tsconfig.base.json index 3c8a0c67ab..422116cbb0 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -19,8 +19,7 @@ "paths": { "@datadog/browser-core": ["./packages/core/src"], - "@datadog/browser-rum-core": ["./packages/rum-core/src"], - "@datadog/browser-rum": ["./packages/rum/src"] + "@datadog/browser-rum-core": ["./packages/rum-core/src"] } } } From a52d702e4a5f4c82c672ab2755aa9fdbaf67af6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 31 Dec 2020 16:55:11 +0100 Subject: [PATCH 07/43] =?UTF-8?q?=E2=9C=85=20[RUMF-804]=20add=20support=20?= =?UTF-8?q?for=20session=20replay=20in=20E2E=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit and add a recorder test --- LICENSE-3rdparty.csv | 2 + package.json | 2 + .../core/src/domain/configuration.spec.ts | 1 + packages/core/src/domain/configuration.ts | 1 + packages/rum-recorder/src/index.ts | 7 ++ test/app/yarn.lock | 4 +- test/e2e/lib/framework/createTest.ts | 8 +++ test/e2e/lib/framework/eventsRegistry.ts | 6 +- test/e2e/lib/framework/pageSetups.ts | 26 +++++--- test/e2e/lib/framework/sdkBuilds.ts | 7 ++ test/e2e/lib/framework/serverApps/intake.ts | 60 ++++++++++++++++- test/e2e/lib/framework/serverApps/mock.ts | 6 +- test/e2e/lib/types/serverEvents.ts | 13 ++++ test/e2e/scenario/recorder.scenario.ts | 65 +++++++++++++++++++ yarn.lock | 41 ++++++++++++ 15 files changed, 235 insertions(+), 14 deletions(-) create mode 100644 test/e2e/scenario/recorder.scenario.ts diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 397b05e40c..e7fe16a247 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -4,6 +4,7 @@ file,rrweb,MIT,Copyright (c) 2018 Contributors (https://github.com/rrweb-io/rrwe file,tracekit,MIT,Copyright 2013 Onur Can Cakmak and all TraceKit contributors prod,@types/css-font-loading-module,MIT,Copyright Microsoft Corporation prod,rrweb-snapshot,MIT,Copyright (c) 2018 Contributors (https://github.com/rrweb-io/rrweb-snapshot/graphs/contributors) and SmartX Inc. +dev,@types/connect-busboy,MIT,Copyright Microsoft Corporation dev,@types/cors,MIT,Copyright Microsoft Corporation dev,@types/express,MIT,Copyright Microsoft Corporation dev,@types/jasmine,MIT,Copyright Microsoft Corporation @@ -19,6 +20,7 @@ dev,@wdio/sync,MIT,Copyright JS Foundation and other contributors dev,ajv,MIT,Copyright 2015-2017 Evgeny Poberezkin dev,browserstack-local,MIT,Copyright 2016 BrowserStack dev,codecov,MIT,Copyright 2014 Gregg Caines +dev,connect-busboy,MIT,Copyright Brian White dev,cors,MIT,Copyright 2013 Troy Goode dev,emoji-name-map,MIT,Copyright 2016-19 Ionică Bizău (https://ionicabizau.net) dev,express,MIT,Copyright 2009-2014 TJ Holowaychuk 2013-2014 Roman Shtylman 2014-2015 Douglas Christopher Wilson diff --git a/package.json b/package.json index 63f62526d7..161d81d27a 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "rum-events-format:sync": "scripts/cli update_submodule && scripts/cli build_json2type && node scripts/generate-schema-types.js" }, "devDependencies": { + "@types/connect-busboy": "0.0.2", "@types/cors": "2.8.7", "@types/express": "4.17.8", "@types/jasmine": "3.5.10", @@ -42,6 +43,7 @@ "ajv": "6.12.6", "browserstack-local": "1.4.5", "codecov": "3.7.1", + "connect-busboy": "0.0.2", "cors": "2.8.5", "emoji-name-map": "1.2.8", "express": "4.17.1", diff --git a/packages/core/src/domain/configuration.spec.ts b/packages/core/src/domain/configuration.spec.ts index bff5832ce9..375cb436d3 100644 --- a/packages/core/src/domain/configuration.spec.ts +++ b/packages/core/src/domain/configuration.spec.ts @@ -30,6 +30,7 @@ describe('configuration', () => { expect(configuration.rumEndpoint).toEqual('<<< E2E RUM ENDPOINT >>>') expect(configuration.logsEndpoint).toEqual('<<< E2E LOGS ENDPOINT >>>') expect(configuration.internalMonitoringEndpoint).toEqual('<<< E2E INTERNAL MONITORING ENDPOINT >>>') + expect(configuration.sessionReplayEndpoint).toEqual('<<< E2E SESSION REPLAY ENDPOINT >>>') }) }) diff --git a/packages/core/src/domain/configuration.ts b/packages/core/src/domain/configuration.ts index 6bccc30060..4d8ff3461b 100644 --- a/packages/core/src/domain/configuration.ts +++ b/packages/core/src/domain/configuration.ts @@ -190,6 +190,7 @@ export function buildConfiguration(userConfiguration: UserConfiguration, buildEn configuration.internalMonitoringEndpoint = '<<< E2E INTERNAL MONITORING ENDPOINT >>>' configuration.logsEndpoint = '<<< E2E LOGS ENDPOINT >>>' configuration.rumEndpoint = '<<< E2E RUM ENDPOINT >>>' + configuration.sessionReplayEndpoint = '<<< E2E SESSION REPLAY ENDPOINT >>>' } if (transportConfiguration.buildMode === BuildMode.STAGING) { diff --git a/packages/rum-recorder/src/index.ts b/packages/rum-recorder/src/index.ts index 76b3b7832a..d54d9d9fa7 100644 --- a/packages/rum-recorder/src/index.ts +++ b/packages/rum-recorder/src/index.ts @@ -13,3 +13,10 @@ export { RumResourceEvent, RumViewEvent, } from '@datadog/browser-rum-core' + +export { + Segment as internal_Segment, + CreationReason as internal_CreationReason, + IncrementalSource as internal_IncrementalSource, + RecordType as internal_RecordType, +} from './types' diff --git a/test/app/yarn.lock b/test/app/yarn.lock index a57b1b4cd1..8080a1a584 100644 --- a/test/app/yarn.lock +++ b/test/app/yarn.lock @@ -23,12 +23,12 @@ version "2.1.2" dependencies: "@datadog/browser-core" "2.1.2" - "@datadog/browser-rum" "2.1.2" + "@datadog/browser-rum-core" "2.1.2" "@types/css-font-loading-module" "0.0.4" rrweb-snapshot "1.0.1" tslib "^1.10.0" -"@datadog/browser-rum@2.1.2", "@datadog/browser-rum@file:../../packages/rum": +"@datadog/browser-rum@file:../../packages/rum": version "2.1.2" dependencies: "@datadog/browser-core" "2.1.2" diff --git a/test/e2e/lib/framework/createTest.ts b/test/e2e/lib/framework/createTest.ts index 59083eca16..ac5aa468fa 100644 --- a/test/e2e/lib/framework/createTest.ts +++ b/test/e2e/lib/framework/createTest.ts @@ -34,6 +34,7 @@ type TestRunner = (testContext: TestContext) => Promise class TestBuilder { private rumOptions: RumSetupOptions | undefined = undefined + private rumRecorderOptions: RumSetupOptions | undefined = undefined private logsOptions: LogsSetupOptions | undefined = undefined private head: string = '' private body: string = '' @@ -46,6 +47,11 @@ class TestBuilder { return this } + withRumRecorder(rumRecorderOptions?: RumSetupOptions) { + this.rumRecorderOptions = { ...DEFAULT_RUM_OPTIONS, ...rumRecorderOptions } + return this + } + withLogs(logsOptions?: LogsSetupOptions) { this.logsOptions = { ...DEFAULT_LOGS_OPTIONS, ...logsOptions } return this @@ -77,6 +83,7 @@ class TestBuilder { head: this.head, logs: this.logsOptions, rum: this.rumOptions, + rumRecorder: this.rumRecorderOptions, } if (setups.length > 1) { @@ -131,6 +138,7 @@ function createTestContext(servers: Servers): TestContext { internalMonitoring: `${servers.intake.url}/v1/input/internalMonitoring`, logs: `${servers.intake.url}/v1/input/logs`, rum: `${servers.intake.url}/v1/input/rum`, + sessionReplay: `${servers.intake.url}/v1/input/sessionReplay`, }, events: new EventRegistry(), } diff --git a/test/e2e/lib/framework/eventsRegistry.ts b/test/e2e/lib/framework/eventsRegistry.ts index a83f26fe1a..64f68b69a8 100644 --- a/test/e2e/lib/framework/eventsRegistry.ts +++ b/test/e2e/lib/framework/eventsRegistry.ts @@ -5,14 +5,16 @@ import { isRumResourceEvent, isRumUserActionEvent, isRumViewEvent, + SerssionReplayCall, ServerInternalMonitoringMessage, } from '../types/serverEvents' -type IntakeType = 'logs' | 'rum' | 'internalMonitoring' +type IntakeType = 'logs' | 'rum' | 'internalMonitoring' | 'sessionReplay' export class EventRegistry { readonly rum: RumEvent[] = [] readonly logs: LogsEvent[] = [] + readonly sessionReplay: SerssionReplayCall[] = [] readonly internalMonitoring: ServerInternalMonitoringMessage[] = [] push(type: IntakeType, event: any) { @@ -21,7 +23,7 @@ export class EventRegistry { } get count() { - return this.logs.length + this.rum.length + this.internalMonitoring.length + return this.logs.length + this.rum.length + this.internalMonitoring.length + this.sessionReplay.length } get rumActions() { diff --git a/test/e2e/lib/framework/pageSetups.ts b/test/e2e/lib/framework/pageSetups.ts index 8b30bb6531..1fbf8b2304 100644 --- a/test/e2e/lib/framework/pageSetups.ts +++ b/test/e2e/lib/framework/pageSetups.ts @@ -15,6 +15,7 @@ export interface LogsSetupOptions { export interface SetupOptions { rum?: RumSetupOptions + rumRecorder?: RumSetupOptions logs?: LogsSetupOptions head?: string body?: string @@ -56,12 +57,13 @@ n=o.getElementsByTagName(u)[0];n.parentNode.insertBefore(d,n) ` } - if (options.rum) { + const rumOptions = options.rumRecorder || options.rum + if (rumOptions) { body += html` ` @@ -84,11 +86,16 @@ export function bundleSetup(options: SetupOptions) { ` } - if (options.rum) { + + const rumOptions = options.rumRecorder || options.rum + if (rumOptions) { header += html` - + ` } @@ -109,10 +116,12 @@ export function npmSetup(options: SetupOptions) { ` } - if (options.rum) { + + const rumOptions = options.rumRecorder || options.rum + if (rumOptions) { header += html` ` } @@ -147,6 +156,7 @@ export function html(parts: ReadonlyArray, ...vars: string[]) { function formatLogsOptions(options: LogsSetupOptions) { return JSON.stringify(options) } + function formatRumOptions(options: RumSetupOptions) { return JSON.stringify(options).replace('"LOCATION_ORIGIN"', 'location.origin') } diff --git a/test/e2e/lib/framework/sdkBuilds.ts b/test/e2e/lib/framework/sdkBuilds.ts index 617a79bf62..632c787764 100644 --- a/test/e2e/lib/framework/sdkBuilds.ts +++ b/test/e2e/lib/framework/sdkBuilds.ts @@ -7,18 +7,24 @@ const readFile = promisify(fs.readFile) const ROOT = path.join(__dirname, '../../../..') const RUM_BUNDLE = path.join(ROOT, 'packages/rum/bundle/datadog-rum.js') const LOGS_BUNDLE = path.join(ROOT, 'packages/logs/bundle/datadog-logs.js') +const RUM_RECORDER_BUNDLE = path.join(ROOT, 'packages/rum-recorder/bundle/datadog-rum-recorder.js') const NPM_BUNDLE = path.join(ROOT, 'test/app/dist/app.js') export interface Endpoints { rum: string logs: string internalMonitoring: string + sessionReplay: string } export async function buildRum(endpoints: Endpoints) { return replaceEndpoints(await readFile(RUM_BUNDLE), endpoints) } +export async function buildRumRecorder(endpoints: Endpoints) { + return replaceEndpoints(await readFile(RUM_RECORDER_BUNDLE), endpoints) +} + export async function buildLogs(endpoints: Endpoints) { return replaceEndpoints(await readFile(LOGS_BUNDLE), endpoints) } @@ -32,6 +38,7 @@ function replaceEndpoints(content: Buffer, endpoints: Endpoints) { '<<< E2E INTERNAL MONITORING ENDPOINT >>>': endpoints.internalMonitoring, '<<< E2E LOGS ENDPOINT >>>': endpoints.logs, '<<< E2E RUM ENDPOINT >>>': endpoints.rum, + '<<< E2E SESSION REPLAY ENDPOINT >>>': endpoints.sessionReplay, }) } diff --git a/test/e2e/lib/framework/serverApps/intake.ts b/test/e2e/lib/framework/serverApps/intake.ts index 35a3505b70..4995532e45 100644 --- a/test/e2e/lib/framework/serverApps/intake.ts +++ b/test/e2e/lib/framework/serverApps/intake.ts @@ -1,18 +1,76 @@ +import connectBusboy from 'connect-busboy' import express from 'express' +import { createInflate } from 'zlib' + +import { SegmentFile, SerssionReplayCall } from '../../types/serverEvents' import { EventRegistry } from '../eventsRegistry' export function createIntakeServerApp(events: EventRegistry) { const app = express() app.use(express.text()) + app.use(connectBusboy({ immediate: true })) - app.post('/v1/input/:endpoint', (req, res) => { + app.post('/v1/input/:endpoint', async (req, res) => { const endpoint = req.params.endpoint if (endpoint === 'rum' || endpoint === 'logs' || endpoint === 'internalMonitoring') { ;(req.body as string).split('\n').map((rawEvent) => events.push(endpoint, JSON.parse(rawEvent) as any)) } + + if (endpoint === 'sessionReplay' && req.busboy) { + events.push('sessionReplay', await readSessionReplay(req)) + } + res.end() }) return app } + +async function readSessionReplay(req: express.Request): Promise { + return new Promise((resolve, reject) => { + const meta: { + [field: string]: string + } = {} + let segmentPromise: Promise + + req.busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + if (fieldname === 'segment') { + segmentPromise = readStream(file.pipe(createInflate())).then((data) => ({ + encoding, + filename, + mimetype, + data: JSON.parse(data.toString()), + })) + } + }) + + req.busboy.on('field', (key: string, value: string) => { + meta[key] = value + }) + + req.busboy.on('finish', async () => { + try { + const segment = await segmentPromise + resolve({ meta, segment }) + } catch (e) { + reject(e) + } + }) + }) +} + +async function readStream(stream: NodeJS.ReadableStream): Promise { + return new Promise((resolve, reject) => { + const buffers: Buffer[] = [] + stream.on('data', (data: Buffer) => { + buffers.push(data) + }) + stream.on('error', (error) => { + reject(error) + }) + stream.on('end', () => { + resolve(Buffer.concat(buffers)) + }) + }) +} diff --git a/test/e2e/lib/framework/serverApps/mock.ts b/test/e2e/lib/framework/serverApps/mock.ts index 88da0e6cc9..8889f0b1ae 100644 --- a/test/e2e/lib/framework/serverApps/mock.ts +++ b/test/e2e/lib/framework/serverApps/mock.ts @@ -1,7 +1,7 @@ import cors from 'cors' import express from 'express' import * as url from 'url' -import { buildLogs, buildNpm, buildRum, Endpoints } from '../sdkBuilds' +import { buildLogs, buildNpm, buildRum, buildRumRecorder, Endpoints } from '../sdkBuilds' export function createMockServerApp(endpoints: Endpoints, setup: string) { const app = express() @@ -59,6 +59,10 @@ export function createMockServerApp(endpoints: Endpoints, setup: string) { res.header('content-type', 'application/javascript').send(await buildRum(endpoints)) }) + app.get('/datadog-rum-recorder.js', async (req, res) => { + res.header('content-type', 'application/javascript').send(await buildRumRecorder(endpoints)) + }) + app.get('/app.js', async (req, res) => { res.header('content-type', 'application/javascript').send(await buildNpm(endpoints)) }) diff --git a/test/e2e/lib/types/serverEvents.ts b/test/e2e/lib/types/serverEvents.ts index 23f574db94..923072201b 100644 --- a/test/e2e/lib/types/serverEvents.ts +++ b/test/e2e/lib/types/serverEvents.ts @@ -1,4 +1,5 @@ import { RumActionEvent, RumErrorEvent, RumEvent, RumResourceEvent, RumViewEvent } from '@datadog/browser-rum' +import { internal_Segment as Segment } from '@datadog/browser-rum-recorder' export interface ServerInternalMonitoringMessage { message: string @@ -23,3 +24,15 @@ export function isRumViewEvent(event: RumEvent): event is RumViewEvent { export function isRumErrorEvent(event: RumEvent): event is RumErrorEvent { return event.type === 'error' } + +export interface SegmentFile { + filename: string + encoding: string + mimetype: string + data: Segment +} + +export interface SerssionReplayCall { + segment: SegmentFile + meta: { [key: string]: string } +} diff --git a/test/e2e/scenario/recorder.scenario.ts b/test/e2e/scenario/recorder.scenario.ts new file mode 100644 index 0000000000..bb81a1f28e --- /dev/null +++ b/test/e2e/scenario/recorder.scenario.ts @@ -0,0 +1,65 @@ +import { + internal_CreationReason as CreationReason, + internal_IncrementalSource as IncrementalSource, + internal_RecordType as RecordType, +} from '@datadog/browser-rum-recorder' + +import { createTest } from '../lib/framework' +import { browserExecute } from '../lib/helpers/browser' +import { flushEvents } from '../lib/helpers/sdk' + +const INTEGER_RE = /^\d+$/ +const TIMESTAMP_RE = /^\d{13}$/ +const UUID_RE = /^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$/ + +describe('recorder', () => { + createTest('record mouse move') + .withRumRecorder() + .run(async ({ events }) => { + await browserExecute(() => { + return document.documentElement.outerHTML + }) + const html = await $('html') + await html.click() + await flushEvents() + + expect(events.sessionReplay.length).toBe(1) + const { segment, meta } = events.sessionReplay[0] + expect(meta).toEqual({ + 'application.id': jasmine.stringMatching(UUID_RE), + creation_reason: 'init', + end: jasmine.stringMatching(TIMESTAMP_RE), + has_full_snapshot: 'true', + records_count: jasmine.stringMatching(INTEGER_RE), + 'session.id': jasmine.stringMatching(UUID_RE), + start: jasmine.stringMatching(TIMESTAMP_RE), + 'view.id': jasmine.stringMatching(UUID_RE), + }) + expect(segment).toEqual({ + data: { + application: { id: meta['application.id'] }, + creation_reason: meta.creation_reason as CreationReason, + end: Number(meta.end), + has_full_snapshot: true, + records: jasmine.any(Array), + records_count: Number(meta.records_count), + session: { id: meta['session.id'] }, + start: Number(meta.start), + view: { id: meta['view.id'] }, + }, + encoding: jasmine.any(String), + filename: `${meta['session.id']}-${meta.start}`, + mimetype: 'application/octet-stream', + }) + expect(segment.data.records.find((record) => record.type === RecordType.Meta)).toBeTruthy('have a Meta record') + expect(segment.data.records.find((record) => record.type === RecordType.FullSnapshot)).toBeTruthy( + 'have a FullSnapshot record' + ) + expect( + segment.data.records.find( + (record) => + record.type === RecordType.IncrementalSnapshot && record.data.source === IncrementalSource.MouseInteraction + ) + ).toBeTruthy('have a IncrementalSnapshot/MouseInteraction record') + }) +}) diff --git a/yarn.lock b/yarn.lock index b484c5009c..574b361536 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1032,6 +1032,13 @@ "@types/connect" "*" "@types/node" "*" +"@types/busboy@*": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@types/busboy/-/busboy-0.2.3.tgz#6697ad29873246c530f09a3ff5a40861824230d5" + integrity sha1-ZpetKYcyRsUw8Jo/9aQIYYJCMNU= + dependencies: + "@types/node" "*" + "@types/cacheable-request@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.1.tgz#5d22f3dded1fd3a84c0bbeb5039a7419c2c91976" @@ -1047,6 +1054,14 @@ resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0" integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ== +"@types/connect-busboy@0.0.2": + version "0.0.2" + resolved "https://registry.yarnpkg.com/@types/connect-busboy/-/connect-busboy-0.0.2.tgz#2a17ab167984e8430d1def368c25f402ffa292aa" + integrity sha512-d0tPVq7z2knefTgdJ2IuWAavWSz3NUaToIq/DMWyUzG3wSK2lCpqfmHV/1J79BHjaz4xKvOi+JTK7JB+1LxQig== + dependencies: + "@types/busboy" "*" + "@types/express" "*" + "@types/connect@*": version "3.4.33" resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.33.tgz#31610c901eca573b8713c3330abc6e6b9f588546" @@ -2394,6 +2409,13 @@ builtins@^1.0.3: resolved "https://registry.yarnpkg.com/builtins/-/builtins-1.0.3.tgz#cb94faeb61c8696451db36534e1422f94f0aee88" integrity sha1-y5T662HIaWRR2zZTThQi+U8K7og= +busboy@*: + version "0.3.1" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-0.3.1.tgz#170899274c5bf38aae27d5c62b71268cd585fd1b" + integrity sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw== + dependencies: + dicer "0.3.0" + byline@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1" @@ -2932,6 +2954,13 @@ config-chain@^1.1.11: ini "^1.3.4" proto-list "~1.2.1" +connect-busboy@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/connect-busboy/-/connect-busboy-0.0.2.tgz#ac5c9c96672171885e576c66b2bfd95d3bb11097" + integrity sha1-rFyclmchcYheV2xmsr/ZXTuxEJc= + dependencies: + busboy "*" + connect@^3.6.0: version "3.7.0" resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" @@ -3478,6 +3507,13 @@ di@^0.0.1: resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha1-gGZJMmzqp8qjMG112YXqJ0i6kTw= +dicer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.3.0.tgz#eacd98b3bfbf92e8ab5c2fdb71aaac44bb06b872" + integrity sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA== + dependencies: + streamsearch "0.1.2" + diff-sequences@^25.2.6: version "25.2.6" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-25.2.6.tgz#5f467c00edd35352b7bca46d7927d60e687a76dd" @@ -8703,6 +8739,11 @@ streamroller@^1.0.6: fs-extra "^7.0.1" lodash "^4.17.14" +streamsearch@0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a" + integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo= + string-replace-loader@2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/string-replace-loader/-/string-replace-loader-2.2.0.tgz#0a0e6543fcec783d85c353a3e96a23872d45a94f" From 583a557ee03dcad3ad1a8be13e4f86fe91c1df3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 7 Jan 2021 10:55:47 +0100 Subject: [PATCH 08/43] =?UTF-8?q?=F0=9F=8F=B7=EF=B8=8F=20remove=20the=20@t?= =?UTF-8?q?ypes/css-font-loading-module=20dependency?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This dependency (only used for typings) was using `Set`, making the rum-recorder package requiring the es2015 typescript lib. This commit removes the dependency and replaces it with lighter typings. --- LICENSE-3rdparty.csv | 1 - packages/rum-recorder/package.json | 1 - .../rum-recorder/src/domain/rrweb/observer.ts | 54 ++++++++++++------- .../rum-recorder/src/domain/rrweb/types.ts | 11 +++- test/app/yarn.lock | 6 --- yarn.lock | 5 -- 6 files changed, 44 insertions(+), 34 deletions(-) diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index e7fe16a247..71b54c7bbd 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -2,7 +2,6 @@ Component,Origin,License,Copyright require,tslib,Apache-2.0,Copyright Microsoft Corporation file,rrweb,MIT,Copyright (c) 2018 Contributors (https://github.com/rrweb-io/rrweb/graphs/contributors) and SmartX Inc. file,tracekit,MIT,Copyright 2013 Onur Can Cakmak and all TraceKit contributors -prod,@types/css-font-loading-module,MIT,Copyright Microsoft Corporation prod,rrweb-snapshot,MIT,Copyright (c) 2018 Contributors (https://github.com/rrweb-io/rrweb-snapshot/graphs/contributors) and SmartX Inc. dev,@types/connect-busboy,MIT,Copyright Microsoft Corporation dev,@types/cors,MIT,Copyright Microsoft Corporation diff --git a/packages/rum-recorder/package.json b/packages/rum-recorder/package.json index 3b7fc324cb..9992502f14 100644 --- a/packages/rum-recorder/package.json +++ b/packages/rum-recorder/package.json @@ -14,7 +14,6 @@ "dependencies": { "@datadog/browser-core": "2.1.2", "@datadog/browser-rum-core": "2.1.2", - "@types/css-font-loading-module": "0.0.4", "rrweb-snapshot": "1.0.1", "tslib": "^1.10.0" }, diff --git a/packages/rum-recorder/src/domain/rrweb/observer.ts b/packages/rum-recorder/src/domain/rrweb/observer.ts index 1b0ea4c6c0..fe6555b79a 100644 --- a/packages/rum-recorder/src/domain/rrweb/observer.ts +++ b/packages/rum-recorder/src/domain/rrweb/observer.ts @@ -1,7 +1,5 @@ /* tslint:disable:no-null-keyword */ import { noop } from '@datadog/browser-core' -// tslint:disable-next-line: no-implicit-dependencies -import { FontFaceDescriptors, FontFaceSet } from 'css-font-loading-module' import { INode, MaskInputOptions, SlimDOMOptions } from 'rrweb-snapshot' import { MutationBuffer } from './mutation' import { @@ -9,6 +7,7 @@ import { BlockClass, CanvasMutationCallback, FontCallback, + FontFaceDescriptors, FontParam, HookResetter, HooksParam, @@ -380,18 +379,31 @@ function initCanvasMutationObserver(cb: CanvasMutationCallback, blockClass: Bloc } } +declare class FontFace { + constructor(family: string, source: string | ArrayBufferView, descriptors?: FontFaceDescriptors) +} + +type WindowWithFontFace = typeof window & { + FontFace: typeof FontFace +} + +type DocumentWithFonts = Document & { + fonts: { add(fontFace: FontFace): void } +} + function initFontObserver(cb: FontCallback): ListenerHandler { const handlers: ListenerHandler[] = [] - const fontMap = new WeakMap() + const fontMap = new WeakMap() + + const originalFontFace = (window as WindowWithFontFace).FontFace - const originalFontFace = FontFace - // tslint:disable-next-line: no-any - ;(window as any).FontFace = function FontFace( + // tslint:disable-next-line: no-shadowed-variable + ;(window as WindowWithFontFace).FontFace = (function FontFace( family: string, source: string | ArrayBufferView, descriptors?: FontFaceDescriptors - ) { + ): FontFace { const fontFace = new originalFontFace(family, source, descriptors) fontMap.set(fontFace, { descriptors, @@ -404,20 +416,24 @@ function initFontObserver(cb: FontCallback): ListenerHandler { JSON.stringify(Array.from(new Uint8Array(source as any))), }) return fontFace - } + } as unknown) as typeof FontFace - const restoreHandler = patch(document.fonts, 'add', (original: (fontFace: FontFace) => unknown) => { - return function (this: FontFaceSet, fontFace: FontFace) { - setTimeout(() => { - const p = fontMap.get(fontFace) - if (p) { - cb(p) - fontMap.delete(fontFace) - } - }, 0) - return original.apply(this, [fontFace]) + const restoreHandler = patch( + (document as DocumentWithFonts).fonts, + 'add', + (original: (fontFace: FontFace) => unknown) => { + return function (this: unknown, fontFace: FontFace) { + setTimeout(() => { + const p = fontMap.get(fontFace) + if (p) { + cb(p) + fontMap.delete(fontFace) + } + }, 0) + return original.apply(this, [fontFace]) + } } - }) + ) handlers.push(() => { // tslint:disable-next-line: no-any diff --git a/packages/rum-recorder/src/domain/rrweb/types.ts b/packages/rum-recorder/src/domain/rrweb/types.ts index 5332ce1028..eeba910161 100644 --- a/packages/rum-recorder/src/domain/rrweb/types.ts +++ b/packages/rum-recorder/src/domain/rrweb/types.ts @@ -1,5 +1,3 @@ -// tslint:disable-next-line: no-implicit-dependencies -import { FontFaceDescriptors } from 'css-font-loading-module' import { idNodeMap, INode, MaskInputOptions, serializedNodeWithId, SlimDOMOptions } from 'rrweb-snapshot' export enum EventType { @@ -337,6 +335,15 @@ export interface CanvasMutationParam { setter?: true } +export interface FontFaceDescriptors { + style?: string + weight?: string + stretch?: string + unicodeRange?: string + variant?: string + featureSettings?: string +} + export interface FontParam { family: string fontSource: string diff --git a/test/app/yarn.lock b/test/app/yarn.lock index 8080a1a584..a12a463411 100644 --- a/test/app/yarn.lock +++ b/test/app/yarn.lock @@ -24,7 +24,6 @@ dependencies: "@datadog/browser-core" "2.1.2" "@datadog/browser-rum-core" "2.1.2" - "@types/css-font-loading-module" "0.0.4" rrweb-snapshot "1.0.1" tslib "^1.10.0" @@ -35,11 +34,6 @@ "@datadog/browser-rum-core" "2.1.2" tslib "^1.10.0" -"@types/css-font-loading-module@0.0.4": - version "0.0.4" - resolved "https://registry.yarnpkg.com/@types/css-font-loading-module/-/css-font-loading-module-0.0.4.tgz#94a835e27d1af444c65cba88523533c174463d64" - integrity sha512-ENdXf7MW4m9HeDojB2Ukbi7lYMIuQNBHVf98dbzaiG4EEJREBd6oleVAjrLRCrp7dm6CK1mmdmU9tcgF61acbw== - "@webassemblyjs/ast@1.8.5": version "1.8.5" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" diff --git a/yarn.lock b/yarn.lock index 574b361536..5c5522e3de 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1076,11 +1076,6 @@ dependencies: "@types/express" "*" -"@types/css-font-loading-module@0.0.4": - version "0.0.4" - resolved "https://registry.yarnpkg.com/@types/css-font-loading-module/-/css-font-loading-module-0.0.4.tgz#94a835e27d1af444c65cba88523533c174463d64" - integrity sha512-ENdXf7MW4m9HeDojB2Ukbi7lYMIuQNBHVf98dbzaiG4EEJREBd6oleVAjrLRCrp7dm6CK1mmdmU9tcgF61acbw== - "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" From 81373d024be110dbc1c32246b77c4c647f141952 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 11:43:23 +0100 Subject: [PATCH 09/43] fix merge typo occured while resolving merge conflicts --- packages/rum-recorder/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/rum-recorder/package.json b/packages/rum-recorder/package.json index 71d3f7a8fe..195a93a3d6 100644 --- a/packages/rum-recorder/package.json +++ b/packages/rum-recorder/package.json @@ -13,7 +13,7 @@ }, "dependencies": { "@datadog/browser-core": "2.2.0", - "@datadog/browser-rum": "2.2.0", + "@datadog/browser-rum-core": "2.2.0", "rrweb-snapshot": "1.0.1", "tslib": "^1.10.0" }, From 8a76146c6a991a46d630886343b919aeda3339d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt?= Date: Mon, 11 Jan 2021 12:28:45 +0100 Subject: [PATCH 10/43] =?UTF-8?q?=F0=9F=91=8C=20fix=20comments=20typos=20a?= =?UTF-8?q?nd=20adjust=20test=20descriptions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bastien Caudan --- .../rum-recorder/src/domain/deflateWorker.spec.ts | 4 ++-- .../src/domain/segmentCollection.spec.ts | 12 ++++++------ .../rum-recorder/src/domain/segmentCollection.ts | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/rum-recorder/src/domain/deflateWorker.spec.ts b/packages/rum-recorder/src/domain/deflateWorker.spec.ts index 9876675a87..b934addca8 100644 --- a/packages/rum-recorder/src/domain/deflateWorker.spec.ts +++ b/packages/rum-recorder/src/domain/deflateWorker.spec.ts @@ -1,7 +1,7 @@ import { createDeflateWorker, DeflateWorker, DeflateWorkerResponse } from './deflateWorker' describe('deflateWorker', () => { - it('responds with the deflated size when pushing some data', (done) => { + it('buffers data and responds with the buffer deflated size when writing', (done) => { const deflateWorker = createDeflateWorker() listen(deflateWorker, 3, (events) => { expect(events).toEqual([ @@ -32,7 +32,7 @@ describe('deflateWorker', () => { deflateWorker.postMessage({ id: 1, action: 'complete' }) }) - it('pushes the remaining data specified by "complete"', (done) => { + it('writes the remaining data specified by "complete"', (done) => { const deflateWorker = createDeflateWorker() listen(deflateWorker, 1, (events) => { expect(events).toEqual([ diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index fbdc5d84aa..525614ac11 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -35,7 +35,7 @@ describe('startSegmentCollection', () => { jasmine.clock().uninstall() }) - it('immediatly starts a new segment', () => { + it('immediately starts a new segment', () => { const { addRecord } = startSegmentCollection(() => CONTEXT, writer) expect(writer.output).toBe('') addRecord(RECORD) @@ -43,14 +43,14 @@ describe('startSegmentCollection', () => { expect(writer.completed.length).toBe(0) }) - it('writes a segment when renewing it', () => { + it('completes a segment when renewing it', () => { const { renewSegment, addRecord } = startSegmentCollection(() => CONTEXT, writer) addRecord(RECORD) renewSegment('before_unload') expect(writer.completed.length).toBe(1) }) - it('writes a segment after MAX_SEGMENT_DURATION', () => { + it('completes a segment after MAX_SEGMENT_DURATION', () => { jasmine.clock().install() const { addRecord } = startSegmentCollection(() => CONTEXT, writer) addRecord(RECORD) @@ -58,7 +58,7 @@ describe('startSegmentCollection', () => { expect(writer.completed.length).toBe(1) }) - it('does not write a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { + it('does not complete a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { jasmine.clock().install() const { renewSegment, addRecord } = startSegmentCollection(() => CONTEXT, writer) addRecord(RECORD) @@ -177,13 +177,13 @@ describe('RecordsIncrementalState', () => { expect(state.hasFullSnapshot).toBe(false) }) - it("doesn't set hasFullSnapshot to true if a FullSnapshot is not directly preceeded by a Meta record", () => { + it("doesn't set hasFullSnapshot to true if a FullSnapshot is not directly preceded by a Meta record", () => { const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) expect(state.hasFullSnapshot).toBe(false) }) - it('sets hasFullSnapshot to true if a FullSnapshot is preceeded by a Meta record', () => { + it('sets hasFullSnapshot to true if a FullSnapshot is preceded by a Meta record', () => { const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index ab505eabc4..c548a50c5f 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -128,7 +128,7 @@ export class RecordsIncrementalState { this.end = Math.max(this.end, end) if (!this.hasFullSnapshot) { // Note: to be exploitable by the replay, this field should be true only if the FullSnapshot - // is preceeded by a Meta record. Because rrweb is emiting both records synchronously and + // is preceded by a Meta record. Because rrweb is emitting both records synchronously and // contiguously, it should always be the case, but check it nonetheless. this.hasFullSnapshot = record.type === RecordType.FullSnapshot && this.lastRecordType === RecordType.Meta } From e72bb53696e18e51091d6df60b3187554058308d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 14:23:43 +0100 Subject: [PATCH 11/43] =?UTF-8?q?=E2=9C=85=20fix=20`isIntakeUrl`=20tests?= =?UTF-8?q?=20broken=20by=20merging=20master?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/core/src/domain/configuration.spec.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/core/src/domain/configuration.spec.ts b/packages/core/src/domain/configuration.spec.ts index 842db8ab48..be1fe9e864 100644 --- a/packages/core/src/domain/configuration.spec.ts +++ b/packages/core/src/domain/configuration.spec.ts @@ -143,7 +143,7 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://public-trace-http-intake.logs.foo.datadoghq.com/v1/input/xxx')).toBe( true ) - expect(configuration.isIntakeUrl('https://session-replay.browser-intake-us3-datadoghq.com/v1/input/xxx')).toBe( + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-foo-datadoghq.com/v1/input/xxx')).toBe( true ) }) @@ -156,6 +156,9 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum.browser-intake-foo-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://logs.browser-intake-foo-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://trace.browser-intake-foo-datadoghq.com/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-foo-datadoghq.com/v1/input/xxx')).toBe( + true + ) }) it('should force alternate intake for us3', () => { @@ -166,6 +169,9 @@ describe('configuration', () => { expect(configuration.isIntakeUrl('https://rum.browser-intake-us3-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://logs.browser-intake-us3-datadoghq.com/v1/input/xxx')).toBe(true) expect(configuration.isIntakeUrl('https://trace.browser-intake-us3-datadoghq.com/v1/input/xxx')).toBe(true) + expect(configuration.isIntakeUrl('https://session-replay.browser-intake-us3-datadoghq.com/v1/input/xxx')).toBe( + true + ) }) it('should detect proxy intake request', () => { From a296e8fb6ef5a19bb06362a8c35b10e896f307bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 14:24:56 +0100 Subject: [PATCH 12/43] =?UTF-8?q?=F0=9F=91=B7=20transpile=20'deflateWorker?= =?UTF-8?q?.js'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Not sure why I missed this, but .js files were not transplide in the webpack bundle simply because ts-loader was not applied. --- .../rum-recorder/src/domain/deflateWorker.js | 23 ++++++++----------- webpack.base.js | 2 +- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/packages/rum-recorder/src/domain/deflateWorker.js b/packages/rum-recorder/src/domain/deflateWorker.js index 466da2a6b5..68c1e6cf75 100644 --- a/packages/rum-recorder/src/domain/deflateWorker.js +++ b/packages/rum-recorder/src/domain/deflateWorker.js @@ -1,42 +1,37 @@ -// Warning: this file is *not* transpiled. It should use a syntax supported by our supported -// browsers. - let workerURL export function createDeflateWorker() { // Lazily compute the worker URL to allow importing the SDK in NodeJS if (!workerURL) { - workerURL = URL.createObjectURL(new Blob(['(' + workerCodeFn + ')(self)'])) + workerURL = URL.createObjectURL(new Blob([`(${workerCodeFn})(self)`])) } return new Worker(workerURL) } function workerCodeFn() { - const pako = makePakoDeflate() + const { Deflate, constants } = makePakoDeflate() - let deflate = new pako.Deflate() - self.addEventListener('message', function (event) { + let deflate = new Deflate() + self.addEventListener('message', (event) => { const data = event.data switch (data.action) { case 'write': - deflate.push(data.data, pako.constants.Z_SYNC_FLUSH) + deflate.push(data.data, constants.Z_SYNC_FLUSH) self.postMessage({ id: data.id, - size: deflate.chunks.reduce(function (total, chunk) { - return total + chunk.length - }, 0), + size: deflate.chunks.reduce((total, chunk) => total + chunk.length, 0), }) break case 'complete': if (data.data) { - deflate.push(data.data, pako.constants.Z_SYNC_FLUSH) + deflate.push(data.data, constants.Z_SYNC_FLUSH) } - deflate.push('', pako.constants.Z_FINISH) + deflate.push('', constants.Z_FINISH) self.postMessage({ id: data.id, result: deflate.result, }) - deflate = new pako.Deflate() + deflate = new Deflate() break } }) diff --git a/webpack.base.js b/webpack.base.js index f5ffbdfa3f..1c44faca4e 100644 --- a/webpack.base.js +++ b/webpack.base.js @@ -17,7 +17,7 @@ module.exports = ({ entry, mode, filename, datacenter }) => ({ module: { rules: [ { - test: /\.ts$/, + test: /\.(ts|js)$/, loader: 'ts-loader', exclude: /node_modules/, options: { From f3d0b4e571ef2d5f2a8cfe3ce6c567b4cdfba5d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 14:31:41 +0100 Subject: [PATCH 13/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20add=20a=20Deflate?= =?UTF-8?q?Worker=20test=20where=20two=20deflates=20are=20completed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/domain/deflateWorker.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/rum-recorder/src/domain/deflateWorker.spec.ts b/packages/rum-recorder/src/domain/deflateWorker.spec.ts index b934addca8..028b8e3bcb 100644 --- a/packages/rum-recorder/src/domain/deflateWorker.spec.ts +++ b/packages/rum-recorder/src/domain/deflateWorker.spec.ts @@ -46,6 +46,35 @@ describe('deflateWorker', () => { deflateWorker.postMessage({ id: 0, action: 'complete', data: 'foo' }) }) + it('completes several deflates one after the other', (done) => { + const deflateWorker = createDeflateWorker() + listen(deflateWorker, 4, (events) => { + expect(events).toEqual([ + { + id: 0, + size: 11, + }, + { + id: 1, + result: new Uint8Array([120, 156, 74, 203, 207, 7, 0, 0, 0, 255, 255, 3, 0, 2, 130, 1, 69]), + }, + { + id: 2, + size: 11, + }, + { + id: 3, + result: new Uint8Array([120, 156, 74, 74, 44, 2, 0, 0, 0, 255, 255, 3, 0, 2, 93, 1, 54]), + }, + ]) + done() + }) + deflateWorker.postMessage({ id: 0, action: 'write', data: 'foo' }) + deflateWorker.postMessage({ id: 1, action: 'complete' }) + deflateWorker.postMessage({ id: 2, action: 'write', data: 'bar' }) + deflateWorker.postMessage({ id: 3, action: 'complete' }) + }) + function listen( deflateWorker: DeflateWorker, expectedResponseCount: number, From f8516ef80935646b051e801a773747ef051ceed2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 14:51:14 +0100 Subject: [PATCH 14/43] =?UTF-8?q?=F0=9F=91=8C=20monitor=20setTimeout=20cal?= =?UTF-8?q?lback?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/domain/segmentCollection.ts | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index c548a50c5f..7a10ebf45b 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -1,3 +1,4 @@ +import { monitor } from '@datadog/browser-core' import { CreationReason, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' import { getRecordStartEnd, groupMouseMoves, isMouseMoveRecord } from './recordUtils' @@ -28,11 +29,14 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | const localSegment = (currentSegment = new Segment(writer, context, creationReason)) // Replace the newly created segment after MAX_SEGMENT_DURATION - setTimeout(() => { - if (currentSegment === localSegment) { - renewSegment('max_duration') - } - }, MAX_SEGMENT_DURATION) + setTimeout( + monitor(() => { + if (currentSegment === localSegment) { + renewSegment('max_duration') + } + }), + MAX_SEGMENT_DURATION + ) } return { From a239c1679abac9250b933b2f4826d95931440948 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 15:38:14 +0100 Subject: [PATCH 15/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20replace=20'flushe?= =?UTF-8?q?s'=20by=20'writes'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/domain/segmentCollection.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index 525614ac11..f0e18b73fd 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -132,7 +132,7 @@ describe('Segment', () => { ]) }) - it('flushes the mousemove records batch after a max number of records', () => { + it('writes the mousemove records batch after a max number of records', () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init') for (let i = 0; i < MAX_MOUSE_MOVE_BATCH + 2; i += 1) { From 272fb57c4c4d77a9f31f060d11807cac2f3bcaef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 17:03:48 +0100 Subject: [PATCH 16/43] =?UTF-8?q?=F0=9F=9A=9A=F0=9F=91=8C=20rename=20segme?= =?UTF-8?q?nt.finish()=20to=20segment.complete()?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/domain/segmentCollection.spec.ts | 10 +++++----- packages/rum-recorder/src/domain/segmentCollection.ts | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index f0e18b73fd..b6ac4c40a1 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -84,7 +84,7 @@ describe('Segment', () => { segment.addRecord({ type: RecordType.Load, timestamp: 10, data: {} }) expect(writer.output).toEqual('{"records":[{"type":1,"timestamp":10,"data":{}}') expect(writer.completed).toEqual([]) - segment.finish() + segment.complete() expect(writer.completed).toEqual([ { @@ -121,7 +121,7 @@ describe('Segment', () => { segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) segment.addRecord(makeMouseMoveRecord(20, [{ id: 1 }])) segment.addRecord(makeMouseMoveRecord(30, [{ id: 2 }])) - segment.finish() + segment.complete() expect(writer.completed[0].segment.records).toEqual([ makeMouseMoveRecord(30, [ @@ -138,7 +138,7 @@ describe('Segment', () => { for (let i = 0; i < MAX_MOUSE_MOVE_BATCH + 2; i += 1) { segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) } - segment.finish() + segment.complete() const records = writer.completed[0].segment.records as MouseMoveRecord[] expect(records.length).toBe(2) @@ -146,10 +146,10 @@ describe('Segment', () => { expect(records[1].data.positions.length).toBe(2) }) - it('ignores the "finish" call if no record have been added', () => { + it('ignores the "complete" call if no record have been added', () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init') - segment.finish() + segment.complete() expect(writer.completed).toEqual([]) }) }) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 7a10ebf45b..7ab4b53a96 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -17,7 +17,7 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | function renewSegment(creationReason: CreationReason) { if (currentSegment) { - currentSegment.finish() + currentSegment.complete() currentSegment = undefined } @@ -71,7 +71,7 @@ export class Segment { } } - finish() { + complete() { this.writeMouseMoves() if (!this.state) { From b7a7fdee0cf132f8663133ce20370641017fc08b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 17:11:47 +0100 Subject: [PATCH 17/43] =?UTF-8?q?=F0=9F=93=9D=20add=20a=20bit=20of=20docum?= =?UTF-8?q?entation=20on=20segments=20collection=20and=20mouse=20move=20ba?= =?UTF-8?q?tching?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/domain/segmentCollection.ts | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 7ab4b53a96..8c56cf3b77 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -10,6 +10,30 @@ export interface SegmentWriter { complete(data: string, meta: SegmentMeta): void } +// Segments are the main data structure for session replays. They contain context information used +// for indexing or UI needs, and a list of records (RRWeb 'events', renamed to avoid confusing +// namings). They are stored without any processing from the intake, and fetched one after the +// other while a session is being replayed. Their encoding (deflate) are carefully crafted to allow +// concatenating multiple segments together. Their approximative size limits how often they are +// created have an impact on the replay. +// +// When the recording starts, a segment is initially created. The segment is renewed (finalized, +// sent and replaced by a new one) based on various events (non-exhaustive list): +// +// * the page visibility change or becomes to unload +// * the segment duration reaches a limit +// * the encoded segment size reaches a limit +// * ... +// +// A segment cannot be created without its context. If the RUM session ends and no session id is +// available when creating a new segment, records will be ignored, until the session is renewed and +// a new session id is available. +// +// Empty segments (segments with no record) aren't useful and should be ignored. +// +// To help investigate session replays issues, each segment is created with a "creation reason", +// indicating why the session has been created. + export function startSegmentCollection(getSegmentContext: () => SegmentContext | undefined, writer: SegmentWriter) { let currentSegment: Segment | undefined @@ -53,6 +77,15 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | export class Segment { private state?: RecordsIncrementalState + + // Mouse positions are being generated quite quickly (up to 1 every 50ms by default). Using a + // separate record for each position can add a consequent overhead to the segment encoded size. + // To avoid this, we batch Mouse Move records coming from RRWeb and regroup them in a single + // record. + // + // Note: the original RRWeb library does this internally, without exposing a way to control this. + // To make sure mouse positions are correctly stored inside the Segment active when they occured, + // we removed RRWeb batching strategy and recreated it at the Segment level. private batchedMouseMove: MouseMoveRecord[] = [] constructor( From 5c7c4fe47e0ac7eb6d11b2700a26004722c43c7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Mon, 11 Jan 2021 17:23:42 +0100 Subject: [PATCH 18/43] =?UTF-8?q?=F0=9F=9A=9A=F0=9F=91=8C=20move=20record?= =?UTF-8?q?=20utility=20functions=20directly=20in=20segment=20collection?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/domain/recordUtils.spec.ts | 67 ------------------- .../rum-recorder/src/domain/recordUtils.ts | 32 --------- .../src/domain/segmentCollection.spec.ts | 61 +++++++++++++++++ .../src/domain/segmentCollection.ts | 42 +++++++++++- 4 files changed, 101 insertions(+), 101 deletions(-) delete mode 100644 packages/rum-recorder/src/domain/recordUtils.spec.ts delete mode 100644 packages/rum-recorder/src/domain/recordUtils.ts diff --git a/packages/rum-recorder/src/domain/recordUtils.spec.ts b/packages/rum-recorder/src/domain/recordUtils.spec.ts deleted file mode 100644 index 903d4d3055..0000000000 --- a/packages/rum-recorder/src/domain/recordUtils.spec.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { makeMouseMoveRecord } from '../../test/utils' -import { IncrementalSource, Record, RecordType } from '../types' -import { getRecordStartEnd, groupMouseMoves, isMouseMoveRecord } from './recordUtils' - -const domContentLoadedRecord: Record = { - data: {}, - timestamp: 100, - type: RecordType.DomContentLoaded, -} - -const inputRecord: Record = { - data: { - id: 123, - isChecked: true, - source: IncrementalSource.Input, - text: '123', - }, - timestamp: 123, - type: RecordType.IncrementalSnapshot, -} - -describe('isMouseMoveRecord', () => { - it('returns false for non-MouseMove records', () => { - expect(isMouseMoveRecord(domContentLoadedRecord)).toBe(false) - expect(isMouseMoveRecord(inputRecord)).toBe(false) - }) - - it('returns true for MouseMove records', () => { - expect(isMouseMoveRecord(makeMouseMoveRecord(100, []))).toBe(true) - }) -}) - -describe('groupMouseMoves', () => { - it('returns the same event if a single event is provided', () => { - const event = makeMouseMoveRecord(10, [{ id: 0 }]) - expect(groupMouseMoves([event])).toEqual(event) - }) - - it('groups mouse events in a single mouse event', () => { - expect( - groupMouseMoves([ - makeMouseMoveRecord(10, [{ id: 0 }]), - makeMouseMoveRecord(14, [{ id: 1 }]), - makeMouseMoveRecord(20, [{ id: 2 }]), - ]) - ).toEqual( - makeMouseMoveRecord(20, [ - { id: 0, timeOffset: -10 }, - { id: 1, timeOffset: -6 }, - { id: 2, timeOffset: 0 }, - ]) - ) - }) -}) - -describe('getRecordStartEnd', () => { - it("returns the timestamp as 'start' and 'end' for non-MouseMove records", () => { - expect(getRecordStartEnd(domContentLoadedRecord)).toEqual([100, 100]) - expect(getRecordStartEnd(inputRecord)).toEqual([123, 123]) - }) - - it("returns the time from the first mouse position as 'start' for MouseMove records", () => { - expect( - getRecordStartEnd(makeMouseMoveRecord(150, [{ timeOffset: -50 }, { timeOffset: -30 }, { timeOffset: 0 }])) - ).toEqual([100, 150]) - }) -}) diff --git a/packages/rum-recorder/src/domain/recordUtils.ts b/packages/rum-recorder/src/domain/recordUtils.ts deleted file mode 100644 index 6ee403346c..0000000000 --- a/packages/rum-recorder/src/domain/recordUtils.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { IncrementalSource, MouseMoveRecord, Record, RecordType } from '../types' - -export function isMouseMoveRecord(record: Record): record is MouseMoveRecord { - return ( - record.type === RecordType.IncrementalSnapshot && - (record.data.source === IncrementalSource.MouseMove || record.data.source === IncrementalSource.TouchMove) - ) -} - -export function groupMouseMoves(records: MouseMoveRecord[]): MouseMoveRecord { - const mostRecentTimestamp = records[records.length - 1]!.timestamp - return { - data: { - // Because we disabled mouse move batching from RRWeb, there will be only one position in each - // record, and its timeOffset will be 0. - positions: records.map(({ timestamp, data: { positions: [position] } }) => ({ - ...position, - timeOffset: timestamp - mostRecentTimestamp, - })), - source: records[0]!.data.source, - }, - timestamp: mostRecentTimestamp, - type: RecordType.IncrementalSnapshot, - } -} - -export function getRecordStartEnd(record: Record): [number, number] { - if (isMouseMoveRecord(record)) { - return [record.timestamp + record.data.positions[0]!.timeOffset, record.timestamp] - } - return [record.timestamp, record.timestamp] -} diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index b6ac4c40a1..9a8eb20cc0 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -1,6 +1,9 @@ import { makeMouseMoveRecord } from '../../test/utils' import { IncrementalSource, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' import { + getRecordStartEnd, + groupMouseMoves, + isMouseMoveRecord, MAX_MOUSE_MOVE_BATCH, MAX_SEGMENT_DURATION, RecordsIncrementalState, @@ -24,6 +27,17 @@ class StringWriter implements SegmentWriter { const CONTEXT: SegmentContext = { application: { id: 'a' }, view: { id: 'b' }, session: { id: 'c' } } const RECORD: Record = { type: RecordType.Load, timestamp: 10, data: {} } +const INPUT_RECORD: Record = { + data: { + id: 123, + isChecked: true, + source: IncrementalSource.Input, + text: '123', + }, + timestamp: 123, + type: RecordType.IncrementalSnapshot, +} + describe('startSegmentCollection', () => { let writer: StringWriter @@ -209,3 +223,50 @@ describe('RecordsIncrementalState', () => { expect(state.end).toBe(11) }) }) + +describe('isMouseMoveRecord', () => { + it('returns false for non-MouseMove records', () => { + expect(isMouseMoveRecord(RECORD)).toBe(false) + expect(isMouseMoveRecord(INPUT_RECORD)).toBe(false) + }) + + it('returns true for MouseMove records', () => { + expect(isMouseMoveRecord(makeMouseMoveRecord(100, []))).toBe(true) + }) +}) + +describe('groupMouseMoves', () => { + it('returns the same event if a single event is provided', () => { + const event = makeMouseMoveRecord(10, [{ id: 0 }]) + expect(groupMouseMoves([event])).toEqual(event) + }) + + it('groups mouse events in a single mouse event', () => { + expect( + groupMouseMoves([ + makeMouseMoveRecord(10, [{ id: 0 }]), + makeMouseMoveRecord(14, [{ id: 1 }]), + makeMouseMoveRecord(20, [{ id: 2 }]), + ]) + ).toEqual( + makeMouseMoveRecord(20, [ + { id: 0, timeOffset: -10 }, + { id: 1, timeOffset: -6 }, + { id: 2, timeOffset: 0 }, + ]) + ) + }) +}) + +describe('getRecordStartEnd', () => { + it("returns the timestamp as 'start' and 'end' for non-MouseMove records", () => { + expect(getRecordStartEnd(RECORD)).toEqual([10, 10]) + expect(getRecordStartEnd(INPUT_RECORD)).toEqual([123, 123]) + }) + + it("returns the time from the first mouse position as 'start' for MouseMove records", () => { + expect( + getRecordStartEnd(makeMouseMoveRecord(150, [{ timeOffset: -50 }, { timeOffset: -30 }, { timeOffset: 0 }])) + ).toEqual([100, 150]) + }) +}) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 8c56cf3b77..4daf0a2531 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -1,6 +1,13 @@ import { monitor } from '@datadog/browser-core' -import { CreationReason, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' -import { getRecordStartEnd, groupMouseMoves, isMouseMoveRecord } from './recordUtils' +import { + CreationReason, + IncrementalSource, + MouseMoveRecord, + Record, + RecordType, + SegmentContext, + SegmentMeta, +} from '../types' export const MAX_SEGMENT_DURATION = 30_000 export const MAX_MOUSE_MOVE_BATCH = 100 @@ -173,3 +180,34 @@ export class RecordsIncrementalState { this.recordsCount += 1 } } + +export function isMouseMoveRecord(record: Record): record is MouseMoveRecord { + return ( + record.type === RecordType.IncrementalSnapshot && + (record.data.source === IncrementalSource.MouseMove || record.data.source === IncrementalSource.TouchMove) + ) +} + +export function groupMouseMoves(records: MouseMoveRecord[]): MouseMoveRecord { + const mostRecentTimestamp = records[records.length - 1]!.timestamp + return { + data: { + // Because we disabled mouse move batching from RRWeb, there will be only one position in each + // record, and its timeOffset will be 0. + positions: records.map(({ timestamp, data: { positions: [position] } }) => ({ + ...position, + timeOffset: timestamp - mostRecentTimestamp, + })), + source: records[0]!.data.source, + }, + timestamp: mostRecentTimestamp, + type: RecordType.IncrementalSnapshot, + } +} + +export function getRecordStartEnd(record: Record): [number, number] { + if (isMouseMoveRecord(record)) { + return [record.timestamp + record.data.positions[0]!.timeOffset, record.timestamp] + } + return [record.timestamp, record.timestamp] +} From 6750054df3b40aa4aab51e1da2805c699c77d105 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 12 Jan 2021 11:07:42 +0100 Subject: [PATCH 19/43] =?UTF-8?q?=F0=9F=91=8C=E2=9C=85=20add=20toFormEntri?= =?UTF-8?q?es=20unit=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/transport/send.spec.ts | 34 +++++++++++++++++++ packages/rum-recorder/src/transport/send.ts | 2 +- 2 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 packages/rum-recorder/src/transport/send.spec.ts diff --git a/packages/rum-recorder/src/transport/send.spec.ts b/packages/rum-recorder/src/transport/send.spec.ts new file mode 100644 index 0000000000..d9d42f8041 --- /dev/null +++ b/packages/rum-recorder/src/transport/send.spec.ts @@ -0,0 +1,34 @@ +import { toFormEntries } from './send' + +describe('toFormEntries', () => { + let callbackSpy: jasmine.Spy<(key: string, value: string) => void> + + beforeEach(() => { + callbackSpy = jasmine.createSpy() + }) + + it('handles top level properties', () => { + toFormEntries({ foo: 'bar', zig: 'zag' }, callbackSpy) + expect(callbackSpy.calls.allArgs()).toEqual([ + ['foo', 'bar'], + ['zig', 'zag'], + ]) + }) + + it('handles nested properties', () => { + toFormEntries({ foo: { bar: 'baz', zig: { zag: 'zug' } } }, callbackSpy) + expect(callbackSpy.calls.allArgs()).toEqual([ + ['foo.bar', 'baz'], + ['foo.zig.zag', 'zug'], + ]) + }) + + it('converts values to string', () => { + // tslint:disable-next-line: no-null-keyword + toFormEntries({ foo: 42, bar: null }, callbackSpy) + expect(callbackSpy.calls.allArgs()).toEqual([ + ['foo', '42'], + ['bar', 'null'], + ]) + }) +}) diff --git a/packages/rum-recorder/src/transport/send.ts b/packages/rum-recorder/src/transport/send.ts index 38a1fa9e9d..2702b64a62 100644 --- a/packages/rum-recorder/src/transport/send.ts +++ b/packages/rum-recorder/src/transport/send.ts @@ -20,7 +20,7 @@ export function send(endpointUrl: string, data: Uint8Array, meta: SegmentMeta): request.send(formData, data.byteLength) } -function toFormEntries(input: object, onEntry: (key: string, value: string) => void, prefix = '') { +export function toFormEntries(input: object, onEntry: (key: string, value: string) => void, prefix = '') { objectEntries(input as { [key: string]: unknown }).forEach(([key, value]) => { if (typeof value === 'object' && value !== null) { toFormEntries(value, onEntry, `${prefix}${key}.`) From ddd26fec179cf3a87b1b8d58472107aeb7eb0e7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 13 Jan 2021 11:01:32 +0100 Subject: [PATCH 20/43] =?UTF-8?q?=F0=9F=91=B7=F0=9F=91=8C=20move=20'allowJ?= =?UTF-8?q?s'=20from=20tsconfig=20base=20to=20ts-loader=20config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tsconfig.base.json | 1 - webpack.base.js | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/tsconfig.base.json b/tsconfig.base.json index 422116cbb0..937bdff875 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -9,7 +9,6 @@ "strict": true, "target": "es5", "sourceMap": true, - "allowJs": true, "plugins": [ { diff --git a/webpack.base.js b/webpack.base.js index 3a646e2fd0..8d333ce6d3 100644 --- a/webpack.base.js +++ b/webpack.base.js @@ -37,6 +37,7 @@ module.exports = ({ entry, mode, filename, datacenter }) => ({ onlyCompileBundledFiles: true, compilerOptions: { module: 'es6', + allowJs: true, }, }, }, From 31ef6d2a7dd297376fb290da84d65093a93a8e1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt?= Date: Wed, 13 Jan 2021 12:23:09 +0100 Subject: [PATCH 21/43] Update packages/rum-recorder/src/domain/segmentCollection.ts Co-authored-by: Bastien Caudan --- packages/rum-recorder/src/domain/segmentCollection.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 4daf0a2531..70d5d9bab7 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -17,11 +17,11 @@ export interface SegmentWriter { complete(data: string, meta: SegmentMeta): void } -// Segments are the main data structure for session replays. They contain context information used +// Segments are the main data structure for session replays. They contain context information used // for indexing or UI needs, and a list of records (RRWeb 'events', renamed to avoid confusing -// namings). They are stored without any processing from the intake, and fetched one after the -// other while a session is being replayed. Their encoding (deflate) are carefully crafted to allow -// concatenating multiple segments together. Their approximative size limits how often they are +// namings). They are stored without any processing from the intake, and fetched one after the +// other while a session is being replayed. Their encoding (deflate) are carefully crafted to allow +// concatenating multiple segments together. Their approximative size limits how often they are // created have an impact on the replay. // // When the recording starts, a segment is initially created. The segment is renewed (finalized, From 85a1bb127d5549a5d12e96c353fda6826cdf20eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt?= Date: Wed, 13 Jan 2021 12:23:38 +0100 Subject: [PATCH 22/43] Update packages/rum-recorder/src/domain/segmentCollection.ts Co-authored-by: Bastien Caudan --- packages/rum-recorder/src/domain/segmentCollection.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 70d5d9bab7..edf7f55453 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -91,7 +91,7 @@ export class Segment { // record. // // Note: the original RRWeb library does this internally, without exposing a way to control this. - // To make sure mouse positions are correctly stored inside the Segment active when they occured, + // To make sure mouse positions are correctly stored inside the Segment active when they occurred, // we removed RRWeb batching strategy and recreated it at the Segment level. private batchedMouseMove: MouseMoveRecord[] = [] From 5d169970c5b1f3d73190fb88f5b9893030e8aaf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 13 Jan 2021 11:34:51 +0100 Subject: [PATCH 23/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20improve=20startSe?= =?UTF-8?q?gmentCollection=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Instead of using `writer.completed`, spy on the `segment.complete` method. This allows `startSegmentCollection` to test without worrying about empty segments, and improve the segment completion prevention after MAX_SEGMENT_DURATION if another segment is created in between. --- .../src/domain/segmentCollection.spec.ts | 28 ++++++++++--------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index 9a8eb20cc0..c8403b001d 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -38,11 +38,15 @@ const INPUT_RECORD: Record = { type: RecordType.IncrementalSnapshot, } +const BEFORE_MAX_SEGMENT_DURATION = MAX_SEGMENT_DURATION * 0.9 + describe('startSegmentCollection', () => { let writer: StringWriter + let segmentCompleteSpy: jasmine.Spy<() => void> beforeEach(() => { writer = new StringWriter() + segmentCompleteSpy = spyOn(Segment.prototype, 'complete').and.callThrough() }) afterEach(() => { @@ -54,32 +58,30 @@ describe('startSegmentCollection', () => { expect(writer.output).toBe('') addRecord(RECORD) expect(writer.output).toBe('{"records":[{"type":1,"timestamp":10,"data":{}}') - expect(writer.completed.length).toBe(0) + expect(segmentCompleteSpy).not.toHaveBeenCalled() }) it('completes a segment when renewing it', () => { - const { renewSegment, addRecord } = startSegmentCollection(() => CONTEXT, writer) - addRecord(RECORD) + const { renewSegment } = startSegmentCollection(() => CONTEXT, writer) renewSegment('before_unload') - expect(writer.completed.length).toBe(1) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) }) it('completes a segment after MAX_SEGMENT_DURATION', () => { jasmine.clock().install() - const { addRecord } = startSegmentCollection(() => CONTEXT, writer) - addRecord(RECORD) + startSegmentCollection(() => CONTEXT, writer) jasmine.clock().tick(MAX_SEGMENT_DURATION) - expect(writer.completed.length).toBe(1) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) }) it('does not complete a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { jasmine.clock().install() - const { renewSegment, addRecord } = startSegmentCollection(() => CONTEXT, writer) - addRecord(RECORD) + const { renewSegment } = startSegmentCollection(() => CONTEXT, writer) + jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) renewSegment('before_unload') - expect(writer.completed.length).toBe(1) - jasmine.clock().tick(MAX_SEGMENT_DURATION) - expect(writer.completed.length).toBe(1) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) }) it("ignores calls to addRecord if context can't be get", () => { @@ -87,7 +89,7 @@ describe('startSegmentCollection', () => { addRecord(RECORD) renewSegment('before_unload') expect(writer.output).toBe('') - expect(writer.completed.length).toBe(0) + expect(segmentCompleteSpy).not.toHaveBeenCalled() }) }) From 4bfd755a0c8b9f69b6a3b4b0dff5d7a86993d8f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 13 Jan 2021 12:13:14 +0100 Subject: [PATCH 24/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20clear=20the=20seg?= =?UTF-8?q?ment=20expiration=20timeout?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/domain/segmentCollection.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index edf7f55453..a54de9c275 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -43,6 +43,7 @@ export interface SegmentWriter { export function startSegmentCollection(getSegmentContext: () => SegmentContext | undefined, writer: SegmentWriter) { let currentSegment: Segment | undefined + let currentSegmentExpirationTimeoutId: ReturnType renewSegment('init') @@ -50,6 +51,7 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | if (currentSegment) { currentSegment.complete() currentSegment = undefined + clearTimeout(currentSegmentExpirationTimeoutId) } const context = getSegmentContext() @@ -57,14 +59,12 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | return } - const localSegment = (currentSegment = new Segment(writer, context, creationReason)) + currentSegment = new Segment(writer, context, creationReason) // Replace the newly created segment after MAX_SEGMENT_DURATION - setTimeout( + currentSegmentExpirationTimeoutId = setTimeout( monitor(() => { - if (currentSegment === localSegment) { - renewSegment('max_duration') - } + renewSegment('max_duration') }), MAX_SEGMENT_DURATION ) From bf2420d6d2f17705c994ba096a5b12300052ffef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 13 Jan 2021 12:34:01 +0100 Subject: [PATCH 25/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20replace=20'thrott?= =?UTF-8?q?le'=20with=20a=20simpler=20'setTimeout'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/boot/recorder.spec.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index 41d2f887c3..21f7fd5e90 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -127,10 +127,13 @@ function formDataAsObject(data: FormData) { function waitRequests(callback: (requests: Array<{ data: FormData; size: number }>) => void) { const requests: Array<{ data: FormData; size: number }> = [] - // Throttle the callback, so it is called only after the last request being sent - const { throttled: throttledCallback } = throttle(() => callback(requests), 300, { leading: false }) + let isWaiting = false spyOn(HttpRequest.prototype, 'send').and.callFake((data: FormData, size) => { requests.push({ data, size }) - throttledCallback() + if (!isWaiting) { + isWaiting = true + // Delay the callback, so it is called only after the last request being sent + setTimeout(() => callback(requests), 300) + } }) } From e4d0272958e22097c94a8b7343871535d824e255 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 13 Jan 2021 12:36:58 +0100 Subject: [PATCH 26/43] =?UTF-8?q?=F0=9F=91=8C=E2=9C=85=20remove=20internal?= =?UTF-8?q?=5F=20exports=20and=20replace=20with=20direct=20import?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/index.ts | 7 ------- test/e2e/lib/types/serverEvents.ts | 2 +- test/e2e/scenario/recorder.scenario.ts | 6 +----- 3 files changed, 2 insertions(+), 13 deletions(-) diff --git a/packages/rum-recorder/src/index.ts b/packages/rum-recorder/src/index.ts index d54d9d9fa7..76b3b7832a 100644 --- a/packages/rum-recorder/src/index.ts +++ b/packages/rum-recorder/src/index.ts @@ -13,10 +13,3 @@ export { RumResourceEvent, RumViewEvent, } from '@datadog/browser-rum-core' - -export { - Segment as internal_Segment, - CreationReason as internal_CreationReason, - IncrementalSource as internal_IncrementalSource, - RecordType as internal_RecordType, -} from './types' diff --git a/test/e2e/lib/types/serverEvents.ts b/test/e2e/lib/types/serverEvents.ts index 923072201b..e400871f41 100644 --- a/test/e2e/lib/types/serverEvents.ts +++ b/test/e2e/lib/types/serverEvents.ts @@ -1,5 +1,5 @@ import { RumActionEvent, RumErrorEvent, RumEvent, RumResourceEvent, RumViewEvent } from '@datadog/browser-rum' -import { internal_Segment as Segment } from '@datadog/browser-rum-recorder' +import { Segment } from '../../../../packages/rum-recorder/src/types' export interface ServerInternalMonitoringMessage { message: string diff --git a/test/e2e/scenario/recorder.scenario.ts b/test/e2e/scenario/recorder.scenario.ts index bb81a1f28e..8505a5a039 100644 --- a/test/e2e/scenario/recorder.scenario.ts +++ b/test/e2e/scenario/recorder.scenario.ts @@ -1,8 +1,4 @@ -import { - internal_CreationReason as CreationReason, - internal_IncrementalSource as IncrementalSource, - internal_RecordType as RecordType, -} from '@datadog/browser-rum-recorder' +import { CreationReason, IncrementalSource, RecordType } from '../../../packages/rum-recorder/src/types' import { createTest } from '../lib/framework' import { browserExecute } from '../lib/helpers/browser' From af31724045c2cba688a0d7bf772b0a2ba7ba1384 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 13 Jan 2021 14:11:11 +0100 Subject: [PATCH 27/43] =?UTF-8?q?=F0=9F=91=8C=F0=9F=8F=97=20improve=20reco?= =?UTF-8?q?rder=20architecture?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * inline 'trackSegmentRenewal' in 'segmentCollection' * move 'getSegmentContext' in 'segmentCollection' * extract 'Segment' in its own file * create workers and writers directly in 'segmentCollection' --- packages/rum-core/src/index.ts | 2 +- .../rum-recorder/src/boot/recorder.spec.ts | 2 +- packages/rum-recorder/src/boot/recorder.ts | 50 +-- .../src/domain/deflateSegmentWriter.spec.ts | 50 +-- .../src/domain/deflateSegmentWriter.ts | 2 +- .../rum-recorder/src/domain/segment.spec.ts | 217 +++++++++++ packages/rum-recorder/src/domain/segment.ts | 146 +++++++ .../src/domain/segmentCollection.spec.ts | 357 +++++++----------- .../src/domain/segmentCollection.ts | 224 ++++------- .../src/domain/trackSegmentRenewal.spec.ts | 50 --- .../src/domain/trackSegmentRenewal.ts | 45 --- packages/rum-recorder/test/utils.ts | 53 +++ 12 files changed, 639 insertions(+), 559 deletions(-) create mode 100644 packages/rum-recorder/src/domain/segment.spec.ts create mode 100644 packages/rum-recorder/src/domain/segment.ts delete mode 100644 packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts delete mode 100644 packages/rum-recorder/src/domain/trackSegmentRenewal.ts diff --git a/packages/rum-core/src/index.ts b/packages/rum-core/src/index.ts index 1c5977995c..d49d206d8c 100644 --- a/packages/rum-core/src/index.ts +++ b/packages/rum-core/src/index.ts @@ -9,7 +9,7 @@ export { RumResourceEvent, RumLongTaskEvent, } from './rumEvent.types' -export { CommonContext } from './rawRumEvent.types' +export { ViewContext, CommonContext } from './rawRumEvent.types' export { startRum } from './boot/rum' export { LifeCycle, LifeCycleEventType } from './domain/lifeCycle' export { ParentContexts } from './domain/parentContexts' diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index 21f7fd5e90..b2c3689fa8 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -1,4 +1,4 @@ -import { createNewEvent, HttpRequest, isIE, throttle } from '@datadog/browser-core' +import { createNewEvent, HttpRequest, isIE } from '@datadog/browser-core' import { LifeCycle, LifeCycleEventType } from '@datadog/browser-rum-core' import { setup, TestSetupBuilder } from '../../../rum-core/test/specHelper' diff --git a/packages/rum-recorder/src/boot/recorder.ts b/packages/rum-recorder/src/boot/recorder.ts index 136621f5ff..da6bed946f 100644 --- a/packages/rum-recorder/src/boot/recorder.ts +++ b/packages/rum-recorder/src/boot/recorder.ts @@ -1,12 +1,9 @@ import { Configuration } from '@datadog/browser-core' import { LifeCycle, ParentContexts } from '@datadog/browser-rum-core' -import { DeflateSegmentWriter } from '../domain/deflateSegmentWriter' -import { createDeflateWorker } from '../domain/deflateWorker' import { record } from '../domain/rrweb' import { startSegmentCollection } from '../domain/segmentCollection' -import { trackSegmentRenewal } from '../domain/trackSegmentRenewal' -import { send, SEND_BEACON_BYTE_LENGTH_LIMIT } from '../transport/send' +import { send } from '../transport/send' export function startRecording( lifeCycle: LifeCycle, @@ -14,54 +11,21 @@ export function startRecording( configuration: Configuration, parentContexts: ParentContexts ) { - const worker = createDeflateWorker() - - const writer = new DeflateSegmentWriter( - worker, - (size) => { - if (size > SEND_BEACON_BYTE_LENGTH_LIMIT) { - renewSegment('max_size') - } - }, - (data, meta) => { - send(configuration.sessionReplayEndpoint, data, meta) - } - ) - - const { addRecord, renewSegment } = startSegmentCollection( - () => getSegmentContext(applicationId, parentContexts), - writer + const { addRecord, stop: stopSegmentCollection } = startSegmentCollection( + lifeCycle, + applicationId, + parentContexts, + (data, meta) => send(configuration.sessionReplayEndpoint, data, meta) ) - const { stop: stopSegmentRenewal } = trackSegmentRenewal(lifeCycle, renewSegment) - const stopRecording = record({ emit: addRecord, })! return { stop() { - stopSegmentRenewal() stopRecording() - worker.terminate() - }, - } -} - -function getSegmentContext(applicationId: string, parentContexts: ParentContexts) { - const viewContext = parentContexts.findView() - if (!viewContext?.session.id) { - return undefined - } - return { - application: { - id: applicationId, - }, - session: { - id: viewContext.session.id, - }, - view: { - id: viewContext.view.id, + stopSegmentCollection() }, } } diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts index 180ad348b9..7a9c930e1c 100644 --- a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts @@ -1,8 +1,8 @@ import { noop } from '@datadog/browser-core' +import { MockWorker } from '../../test/utils' import { SegmentMeta } from '../types' import { DeflateSegmentWriter } from './deflateSegmentWriter' -import { DeflateWorker, DeflateWorkerAction, DeflateWorkerListener } from './deflateWorker' describe('DeflateWriter', () => { let worker: MockWorker @@ -39,51 +39,3 @@ describe('DeflateWriter', () => { expect(onCompletedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta2]]) }) }) - -class MockWorker implements DeflateWorker { - private listener: DeflateWorkerListener | undefined - private messages: DeflateWorkerAction[] = [] - private pendingDataSize = 0 - - addEventListener(_: 'message', listener: DeflateWorkerListener): void { - if (this.listener) { - throw new Error('MockWorker supports only one listener') - } - this.listener = listener - } - - removeEventListener(): void { - this.listener = undefined - } - - postMessage(message: DeflateWorkerAction): void { - this.messages.push(message) - } - - terminate(): void { - // do nothing - } - - process(ignoreMessageWithId?: number): void { - if (this.listener) { - for (const message of this.messages) { - if (ignoreMessageWithId === message.id) { - continue - } - switch (message.action) { - case 'write': - this.pendingDataSize += message.data.length - this.listener({ data: { id: message.id, size: this.pendingDataSize } }) - break - case 'complete': - if (message.data) { - this.pendingDataSize += message.data.length - } - this.listener({ data: { id: message.id, result: new Uint8Array(this.pendingDataSize) } }) - this.pendingDataSize = 0 - } - } - } - this.messages.length = 0 - } -} diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts index edb077a425..51995dac59 100644 --- a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts @@ -1,6 +1,6 @@ import { SegmentMeta } from '../types' import { DeflateWorker } from './deflateWorker' -import { SegmentWriter } from './segmentCollection' +import { SegmentWriter } from './segment' export class DeflateSegmentWriter implements SegmentWriter { private nextId = 0 diff --git a/packages/rum-recorder/src/domain/segment.spec.ts b/packages/rum-recorder/src/domain/segment.spec.ts new file mode 100644 index 0000000000..06f5e97812 --- /dev/null +++ b/packages/rum-recorder/src/domain/segment.spec.ts @@ -0,0 +1,217 @@ +import { makeMouseMoveRecord } from '../../test/utils' +import { IncrementalSource, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' +import { + getRecordStartEnd, + groupMouseMoves, + isMouseMoveRecord, + MAX_MOUSE_MOVE_BATCH, + RecordsIncrementalState, + Segment, + SegmentWriter, +} from './segment' + +class StringWriter implements SegmentWriter { + output = '' + completed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] + write(data: string) { + this.output += data + } + complete(data: string, meta: SegmentMeta) { + this.completed.push({ meta, segment: JSON.parse(this.output + data) as any }) + this.output = '' + } +} + +const CONTEXT: SegmentContext = { application: { id: 'a' }, view: { id: 'b' }, session: { id: 'c' } } +const RECORD: Record = { type: RecordType.Load, timestamp: 10, data: {} } + +const INPUT_RECORD: Record = { + data: { + id: 123, + isChecked: true, + source: IncrementalSource.Input, + text: '123', + }, + timestamp: 123, + type: RecordType.IncrementalSnapshot, +} + +describe('Segment', () => { + it('writes a segment', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + segment.addRecord({ type: RecordType.Load, timestamp: 10, data: {} }) + expect(writer.output).toEqual('{"records":[{"type":1,"timestamp":10,"data":{}}') + expect(writer.completed).toEqual([]) + segment.complete() + + expect(writer.completed).toEqual([ + { + meta: { + creation_reason: 'init' as const, + end: 10, + has_full_snapshot: false, + records_count: 1, + start: 10, + ...CONTEXT, + }, + segment: { + creation_reason: 'init' as const, + end: 10, + has_full_snapshot: false, + records: [ + { + data: {}, + timestamp: 10, + type: RecordType.Load, + }, + ], + records_count: 1, + start: 10, + ...CONTEXT, + }, + }, + ]) + }) + + it('batches mousemove records', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) + segment.addRecord(makeMouseMoveRecord(20, [{ id: 1 }])) + segment.addRecord(makeMouseMoveRecord(30, [{ id: 2 }])) + segment.complete() + + expect(writer.completed[0].segment.records).toEqual([ + makeMouseMoveRecord(30, [ + { id: 0, timeOffset: -20 }, + { id: 1, timeOffset: -10 }, + { id: 2, timeOffset: 0 }, + ]), + ]) + }) + + it('writes the mousemove records batch after a max number of records', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + for (let i = 0; i < MAX_MOUSE_MOVE_BATCH + 2; i += 1) { + segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) + } + segment.complete() + + const records = writer.completed[0].segment.records as MouseMoveRecord[] + expect(records.length).toBe(2) + expect(records[0].data.positions.length).toBe(MAX_MOUSE_MOVE_BATCH) + expect(records[1].data.positions.length).toBe(2) + }) + + it('ignores the "complete" call if no record have been added', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init') + segment.complete() + expect(writer.completed).toEqual([]) + }) +}) + +describe('RecordsIncrementalState', () => { + it('initializes with the data of the first record', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + expect(state.start).toBe(10) + expect(state.end).toBe(10) + expect(state.hasFullSnapshot).toBe(false) + expect(state.recordsCount).toBe(1) + }) + + it('adjusts the state when adding a record', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 15, data: {} }) + expect(state.start).toBe(10) + expect(state.end).toBe(15) + expect(state.hasFullSnapshot).toBe(false) + expect(state.recordsCount).toBe(2) + }) + + it("doesn't set hasFullSnapshot to true if a FullSnapshot is the first record", () => { + const state = new RecordsIncrementalState({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(false) + }) + + it("doesn't set hasFullSnapshot to true if a FullSnapshot is not directly preceded by a Meta record", () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(false) + }) + + it('sets hasFullSnapshot to true if a FullSnapshot is preceded by a Meta record', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) + state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(true) + }) + + it("doesn't overrides hasFullSnapshot to false once it has been set to true", () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) + state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) + state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 10, data: {} as any }) + expect(state.hasFullSnapshot).toBe(true) + }) + + it('use records start/end for mouse moves', () => { + const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) + state.addRecord({ + data: { source: IncrementalSource.MouseMove, positions: [{ timeOffset: -2, x: 0, y: 0, id: 0 }] }, + timestamp: 11, + type: RecordType.IncrementalSnapshot, + }) + expect(state.start).toBe(9) + expect(state.end).toBe(11) + }) +}) + +describe('isMouseMoveRecord', () => { + it('returns false for non-MouseMove records', () => { + expect(isMouseMoveRecord(RECORD)).toBe(false) + expect(isMouseMoveRecord(INPUT_RECORD)).toBe(false) + }) + + it('returns true for MouseMove records', () => { + expect(isMouseMoveRecord(makeMouseMoveRecord(100, []))).toBe(true) + }) +}) + +describe('groupMouseMoves', () => { + it('returns the same event if a single event is provided', () => { + const event = makeMouseMoveRecord(10, [{ id: 0 }]) + expect(groupMouseMoves([event])).toEqual(event) + }) + + it('groups mouse events in a single mouse event', () => { + expect( + groupMouseMoves([ + makeMouseMoveRecord(10, [{ id: 0 }]), + makeMouseMoveRecord(14, [{ id: 1 }]), + makeMouseMoveRecord(20, [{ id: 2 }]), + ]) + ).toEqual( + makeMouseMoveRecord(20, [ + { id: 0, timeOffset: -10 }, + { id: 1, timeOffset: -6 }, + { id: 2, timeOffset: 0 }, + ]) + ) + }) +}) + +describe('getRecordStartEnd', () => { + it("returns the timestamp as 'start' and 'end' for non-MouseMove records", () => { + expect(getRecordStartEnd(RECORD)).toEqual([10, 10]) + expect(getRecordStartEnd(INPUT_RECORD)).toEqual([123, 123]) + }) + + it("returns the time from the first mouse position as 'start' for MouseMove records", () => { + expect( + getRecordStartEnd(makeMouseMoveRecord(150, [{ timeOffset: -50 }, { timeOffset: -30 }, { timeOffset: 0 }])) + ).toEqual([100, 150]) + }) +}) diff --git a/packages/rum-recorder/src/domain/segment.ts b/packages/rum-recorder/src/domain/segment.ts new file mode 100644 index 0000000000..efaa8095b3 --- /dev/null +++ b/packages/rum-recorder/src/domain/segment.ts @@ -0,0 +1,146 @@ +import { + CreationReason, + IncrementalSource, + MouseMoveRecord, + Record, + RecordType, + SegmentContext, + SegmentMeta, +} from '../types' + +export interface SegmentWriter { + write(data: string): void + complete(data: string, meta: SegmentMeta): void +} + +export const MAX_MOUSE_MOVE_BATCH = 100 + +export class Segment { + private state?: RecordsIncrementalState + + // Mouse positions are being generated quite quickly (up to 1 every 50ms by default). Using a + // separate record for each position can add a consequent overhead to the segment encoded size. + // To avoid this, we batch Mouse Move records coming from RRWeb and regroup them in a single + // record. + // + // Note: the original RRWeb library does this internally, without exposing a way to control this. + // To make sure mouse positions are correctly stored inside the Segment active when they occurred, + // we removed RRWeb batching strategy and recreated it at the Segment level. + private batchedMouseMove: MouseMoveRecord[] = [] + + constructor( + private writer: SegmentWriter, + readonly context: SegmentContext, + private creationReason: CreationReason + ) {} + + addRecord(record: Record): void { + if (isMouseMoveRecord(record)) { + if (this.batchedMouseMove.push(record) === MAX_MOUSE_MOVE_BATCH) { + this.writeMouseMoves() + } + } else { + this.writeRecord(record) + } + } + + complete() { + this.writeMouseMoves() + + if (!this.state) { + return + } + + const meta: SegmentMeta = { + creation_reason: this.creationReason, + end: this.state.end, + has_full_snapshot: this.state.hasFullSnapshot, + records_count: this.state.recordsCount, + start: this.state.start, + ...this.context, + } + this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) + } + + private writeMouseMoves() { + if (this.batchedMouseMove.length === 0) { + return + } + + this.writeRecord(groupMouseMoves(this.batchedMouseMove)) + + this.batchedMouseMove.length = 0 + } + + private writeRecord(record: Record): void { + if (!this.state) { + this.writer.write(`{"records":[${JSON.stringify(record)}`) + this.state = new RecordsIncrementalState(record) + } else { + this.writer.write(`,${JSON.stringify(record)}`) + this.state.addRecord(record) + } + } +} + +export class RecordsIncrementalState { + start: number + end: number + recordsCount: number + hasFullSnapshot: boolean + private lastRecordType: RecordType + + constructor(initialRecord: Record) { + const [start, end] = getRecordStartEnd(initialRecord) + this.start = start + this.end = end + this.lastRecordType = initialRecord.type + this.hasFullSnapshot = false + this.recordsCount = 1 + } + + addRecord(record: Record) { + const [start, end] = getRecordStartEnd(record) + this.start = Math.min(this.start, start) + this.end = Math.max(this.end, end) + if (!this.hasFullSnapshot) { + // Note: to be exploitable by the replay, this field should be true only if the FullSnapshot + // is preceded by a Meta record. Because rrweb is emitting both records synchronously and + // contiguously, it should always be the case, but check it nonetheless. + this.hasFullSnapshot = record.type === RecordType.FullSnapshot && this.lastRecordType === RecordType.Meta + } + this.lastRecordType = record.type + this.recordsCount += 1 + } +} + +export function isMouseMoveRecord(record: Record): record is MouseMoveRecord { + return ( + record.type === RecordType.IncrementalSnapshot && + (record.data.source === IncrementalSource.MouseMove || record.data.source === IncrementalSource.TouchMove) + ) +} + +export function groupMouseMoves(records: MouseMoveRecord[]): MouseMoveRecord { + const mostRecentTimestamp = records[records.length - 1]!.timestamp + return { + data: { + // Because we disabled mouse move batching from RRWeb, there will be only one position in each + // record, and its timeOffset will be 0. + positions: records.map(({ timestamp, data: { positions: [position] } }) => ({ + ...position, + timeOffset: timestamp - mostRecentTimestamp, + })), + source: records[0]!.data.source, + }, + timestamp: mostRecentTimestamp, + type: RecordType.IncrementalSnapshot, + } +} + +export function getRecordStartEnd(record: Record): [number, number] { + if (isMouseMoveRecord(record)) { + return [record.timestamp + record.data.positions[0]!.timeOffset, record.timestamp] + } + return [record.timestamp, record.timestamp] +} diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index c8403b001d..04d3d3af18 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -1,274 +1,177 @@ -import { makeMouseMoveRecord } from '../../test/utils' -import { IncrementalSource, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' -import { - getRecordStartEnd, - groupMouseMoves, - isMouseMoveRecord, - MAX_MOUSE_MOVE_BATCH, - MAX_SEGMENT_DURATION, - RecordsIncrementalState, - Segment, - SegmentWriter, - startSegmentCollection, -} from './segmentCollection' +import { createNewEvent, DOM_EVENT, restorePageVisibility, setPageVisibility } from '@datadog/browser-core' +import { LifeCycle, LifeCycleEventType, ParentContexts, ViewContext } from '@datadog/browser-rum-core' +import { Record, RecordType, SegmentContext, SegmentMeta } from '../types' +import { Segment } from './segment' +import { doGetSegmentContext, doStartSegmentCollection, MAX_SEGMENT_DURATION } from './segmentCollection' -class StringWriter implements SegmentWriter { - output = '' - completed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] - write(data: string) { - this.output += data - } - complete(data: string, meta: SegmentMeta) { - this.completed.push({ meta, segment: JSON.parse(this.output + data) as any }) - this.output = '' - } -} +import { MockWorker } from '../../test/utils' +import { SEND_BEACON_BYTE_LENGTH_LIMIT } from '../transport/send' const CONTEXT: SegmentContext = { application: { id: 'a' }, view: { id: 'b' }, session: { id: 'c' } } const RECORD: Record = { type: RecordType.Load, timestamp: 10, data: {} } -const INPUT_RECORD: Record = { - data: { - id: 123, - isChecked: true, - source: IncrementalSource.Input, - text: '123', - }, - timestamp: 123, - type: RecordType.IncrementalSnapshot, -} - const BEFORE_MAX_SEGMENT_DURATION = MAX_SEGMENT_DURATION * 0.9 describe('startSegmentCollection', () => { - let writer: StringWriter - let segmentCompleteSpy: jasmine.Spy<() => void> - - beforeEach(() => { - writer = new StringWriter() - segmentCompleteSpy = spyOn(Segment.prototype, 'complete').and.callThrough() - }) + let stopErrorCollection: () => void + + function startSegmentCollection(context: SegmentContext | undefined) { + const lifeCycle = new LifeCycle() + const worker = new MockWorker() + const eventEmitter = document.createElement('div') + const sendSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() + + const { stop, addRecord } = doStartSegmentCollection(lifeCycle, () => context, sendSpy, worker, eventEmitter) + stopErrorCollection = stop + const segmentCompleteSpy = spyOn(Segment.prototype, 'complete').and.callThrough() + return { + addRecord, + eventEmitter, + lifeCycle, + segmentCompleteSpy, + worker, + sendCurrentSegment() { + // Make sure the segment is not empty + addRecord(RECORD) + // Renew segment + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) + worker.process() + return sendSpy.calls.mostRecent().args[1] + }, + } + } afterEach(() => { jasmine.clock().uninstall() + stopErrorCollection() }) it('immediately starts a new segment', () => { - const { addRecord } = startSegmentCollection(() => CONTEXT, writer) - expect(writer.output).toBe('') + const { addRecord, worker, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + expect(worker.pendingData).toBe('') addRecord(RECORD) - expect(writer.output).toBe('{"records":[{"type":1,"timestamp":10,"data":{}}') + expect(worker.pendingData).toBe('{"records":[{"type":1,"timestamp":10,"data":{}}') expect(segmentCompleteSpy).not.toHaveBeenCalled() + expect(sendCurrentSegment().creation_reason).toBe('init') }) it('completes a segment when renewing it', () => { - const { renewSegment } = startSegmentCollection(() => CONTEXT, writer) - renewSegment('before_unload') - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) - }) - - it('completes a segment after MAX_SEGMENT_DURATION', () => { - jasmine.clock().install() - startSegmentCollection(() => CONTEXT, writer) - jasmine.clock().tick(MAX_SEGMENT_DURATION) - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) - }) - - it('does not complete a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { - jasmine.clock().install() - const { renewSegment } = startSegmentCollection(() => CONTEXT, writer) - jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) - renewSegment('before_unload') - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) - jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) + const { lifeCycle, segmentCompleteSpy } = startSegmentCollection(CONTEXT) + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) }) it("ignores calls to addRecord if context can't be get", () => { - const { renewSegment, addRecord } = startSegmentCollection(() => undefined, writer) + const { worker, lifeCycle, addRecord, segmentCompleteSpy } = startSegmentCollection(undefined) addRecord(RECORD) - renewSegment('before_unload') - expect(writer.output).toBe('') + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) + expect(worker.pendingData).toBe('') expect(segmentCompleteSpy).not.toHaveBeenCalled() }) -}) - -describe('Segment', () => { - it('writes a segment', () => { - const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - segment.addRecord({ type: RecordType.Load, timestamp: 10, data: {} }) - expect(writer.output).toEqual('{"records":[{"type":1,"timestamp":10,"data":{}}') - expect(writer.completed).toEqual([]) - segment.complete() - - expect(writer.completed).toEqual([ - { - meta: { - creation_reason: 'init' as const, - end: 10, - has_full_snapshot: false, - records_count: 1, - start: 10, - ...CONTEXT, - }, - segment: { - creation_reason: 'init' as const, - end: 10, - has_full_snapshot: false, - records: [ - { - data: {}, - timestamp: 10, - type: RecordType.Load, - }, - ], - records_count: 1, - start: 10, - ...CONTEXT, - }, - }, - ]) - }) - - it('batches mousemove records', () => { - const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) - segment.addRecord(makeMouseMoveRecord(20, [{ id: 1 }])) - segment.addRecord(makeMouseMoveRecord(30, [{ id: 2 }])) - segment.complete() - - expect(writer.completed[0].segment.records).toEqual([ - makeMouseMoveRecord(30, [ - { id: 0, timeOffset: -20 }, - { id: 1, timeOffset: -10 }, - { id: 2, timeOffset: 0 }, - ]), - ]) - }) - - it('writes the mousemove records batch after a max number of records', () => { - const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - for (let i = 0; i < MAX_MOUSE_MOVE_BATCH + 2; i += 1) { - segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) - } - segment.complete() - - const records = writer.completed[0].segment.records as MouseMoveRecord[] - expect(records.length).toBe(2) - expect(records[0].data.positions.length).toBe(MAX_MOUSE_MOVE_BATCH) - expect(records[1].data.positions.length).toBe(2) - }) - it('ignores the "complete" call if no record have been added', () => { - const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - segment.complete() - expect(writer.completed).toEqual([]) - }) -}) + describe('segment renewal', () => { + afterEach(() => { + restorePageVisibility() + }) -describe('RecordsIncrementalState', () => { - it('initializes with the data of the first record', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - expect(state.start).toBe(10) - expect(state.end).toBe(10) - expect(state.hasFullSnapshot).toBe(false) - expect(state.recordsCount).toBe(1) - }) + it('renews segment on unload', () => { + const { lifeCycle, sendCurrentSegment } = startSegmentCollection(CONTEXT) + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) + expect(sendCurrentSegment().creation_reason).toBe('before_unload') + }) - it('adjusts the state when adding a record', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 15, data: {} }) - expect(state.start).toBe(10) - expect(state.end).toBe(15) - expect(state.hasFullSnapshot).toBe(false) - expect(state.recordsCount).toBe(2) - }) + it('renews segment on view change', () => { + const { lifeCycle, sendCurrentSegment } = startSegmentCollection(CONTEXT) + lifeCycle.notify(LifeCycleEventType.VIEW_CREATED, {} as any) + expect(sendCurrentSegment().creation_reason).toBe('view_change') + }) - it("doesn't set hasFullSnapshot to true if a FullSnapshot is the first record", () => { - const state = new RecordsIncrementalState({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(false) - }) + it('renews segment on session renew', () => { + const { lifeCycle, sendCurrentSegment } = startSegmentCollection(CONTEXT) + lifeCycle.notify(LifeCycleEventType.SESSION_RENEWED) + expect(sendCurrentSegment().creation_reason).toBe('session_renewed') + }) - it("doesn't set hasFullSnapshot to true if a FullSnapshot is not directly preceded by a Meta record", () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(false) - }) + it('renews segment when the page become hidden', () => { + setPageVisibility('hidden') + const { eventEmitter, sendCurrentSegment } = startSegmentCollection(CONTEXT) + eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) + expect(sendCurrentSegment().creation_reason).toBe('visibility_change') + }) - it('sets hasFullSnapshot to true if a FullSnapshot is preceded by a Meta record', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) - state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(true) - }) + it('does not renew segment when the page become visible', () => { + setPageVisibility('visible') + const { eventEmitter, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) + expect(segmentCompleteSpy).not.toHaveBeenCalled() + expect(sendCurrentSegment().creation_reason).not.toBe('visibility_change') + }) - it("doesn't overrides hasFullSnapshot to false once it has been set to true", () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) - state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(true) - }) + it('renews segment when the current segment deflate size reaches SEND_BEACON_BYTE_LENGTH_LIMIT', () => { + const { worker, addRecord, sendCurrentSegment } = startSegmentCollection(CONTEXT) + worker.deflatedSize = SEND_BEACON_BYTE_LENGTH_LIMIT + addRecord(RECORD) + worker.process() - it('use records start/end for mouse moves', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ - data: { source: IncrementalSource.MouseMove, positions: [{ timeOffset: -2, x: 0, y: 0, id: 0 }] }, - timestamp: 11, - type: RecordType.IncrementalSnapshot, + expect(sendCurrentSegment().creation_reason).toBe('max_size') }) - expect(state.start).toBe(9) - expect(state.end).toBe(11) - }) -}) -describe('isMouseMoveRecord', () => { - it('returns false for non-MouseMove records', () => { - expect(isMouseMoveRecord(RECORD)).toBe(false) - expect(isMouseMoveRecord(INPUT_RECORD)).toBe(false) - }) + it('renews a segment after MAX_SEGMENT_DURATION', () => { + jasmine.clock().install() + const { segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + jasmine.clock().tick(MAX_SEGMENT_DURATION) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + expect(sendCurrentSegment().creation_reason).toBe('max_duration') + }) - it('returns true for MouseMove records', () => { - expect(isMouseMoveRecord(makeMouseMoveRecord(100, []))).toBe(true) + it('does not renew a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { + jasmine.clock().install() + const { lifeCycle, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) + lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + expect(sendCurrentSegment().creation_reason).not.toBe('max_duration') + }) }) }) -describe('groupMouseMoves', () => { - it('returns the same event if a single event is provided', () => { - const event = makeMouseMoveRecord(10, [{ id: 0 }]) - expect(groupMouseMoves([event])).toEqual(event) - }) +describe('getSegmentContext', () => { + const DEFAULT_VIEW_CONTEXT: ViewContext = { + session: { id: '456' }, + view: { id: '123', url: 'http://foo.com', referrer: 'http://bar.com' }, + } - it('groups mouse events in a single mouse event', () => { - expect( - groupMouseMoves([ - makeMouseMoveRecord(10, [{ id: 0 }]), - makeMouseMoveRecord(14, [{ id: 1 }]), - makeMouseMoveRecord(20, [{ id: 2 }]), - ]) - ).toEqual( - makeMouseMoveRecord(20, [ - { id: 0, timeOffset: -10 }, - { id: 1, timeOffset: -6 }, - { id: 2, timeOffset: 0 }, - ]) - ) + it('returns a segment context', () => { + expect(doGetSegmentContext('appid', mockParentContexts(DEFAULT_VIEW_CONTEXT))).toEqual({ + application: { id: 'appid' }, + session: { id: '456' }, + view: { id: '123' }, + }) }) -}) -describe('getRecordStartEnd', () => { - it("returns the timestamp as 'start' and 'end' for non-MouseMove records", () => { - expect(getRecordStartEnd(RECORD)).toEqual([10, 10]) - expect(getRecordStartEnd(INPUT_RECORD)).toEqual([123, 123]) + it('returns undefined if there is no current view', () => { + expect(doGetSegmentContext('appid', mockParentContexts(undefined))).toBeUndefined() }) - it("returns the time from the first mouse position as 'start' for MouseMove records", () => { + it('returns undefined if there is no session id', () => { expect( - getRecordStartEnd(makeMouseMoveRecord(150, [{ timeOffset: -50 }, { timeOffset: -30 }, { timeOffset: 0 }])) - ).toEqual([100, 150]) - }) + doGetSegmentContext( + 'appid', + mockParentContexts({ + ...DEFAULT_VIEW_CONTEXT, + session: { id: undefined }, + }) + ) + ).toBeUndefined() + }) + + function mockParentContexts(view: ViewContext | undefined): ParentContexts { + return { + findView() { + return view + }, + } as any + } }) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index a54de9c275..19c0dfb185 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -1,21 +1,12 @@ -import { monitor } from '@datadog/browser-core' -import { - CreationReason, - IncrementalSource, - MouseMoveRecord, - Record, - RecordType, - SegmentContext, - SegmentMeta, -} from '../types' +import { addEventListener, DOM_EVENT, EventEmitter, monitor } from '@datadog/browser-core' +import { LifeCycle, LifeCycleEventType, ParentContexts } from '@datadog/browser-rum-core' +import { SEND_BEACON_BYTE_LENGTH_LIMIT } from '../transport/send' +import { CreationReason, Record, SegmentContext, SegmentMeta } from '../types' +import { DeflateSegmentWriter } from './deflateSegmentWriter' +import { createDeflateWorker, DeflateWorker } from './deflateWorker' +import { Segment } from './segment' export const MAX_SEGMENT_DURATION = 30_000 -export const MAX_MOUSE_MOVE_BATCH = 100 - -export interface SegmentWriter { - write(data: string): void - complete(data: string, meta: SegmentMeta): void -} // Segments are the main data structure for session replays. They contain context information used // for indexing or UI needs, and a list of records (RRWeb 'events', renamed to avoid confusing @@ -41,12 +32,67 @@ export interface SegmentWriter { // To help investigate session replays issues, each segment is created with a "creation reason", // indicating why the session has been created. -export function startSegmentCollection(getSegmentContext: () => SegmentContext | undefined, writer: SegmentWriter) { +export function startSegmentCollection( + lifeCycle: LifeCycle, + applicationId: string, + parentContexts: ParentContexts, + send: (data: Uint8Array, meta: SegmentMeta) => void +) { + const worker = createDeflateWorker() + return doStartSegmentCollection(lifeCycle, () => doGetSegmentContext(applicationId, parentContexts), send, worker) +} + +export function doStartSegmentCollection( + lifeCycle: LifeCycle, + getSegmentContext: () => SegmentContext | undefined, + send: (data: Uint8Array, meta: SegmentMeta) => void, + worker: DeflateWorker, + emitter: EventEmitter = window +) { let currentSegment: Segment | undefined let currentSegmentExpirationTimeoutId: ReturnType + const writer = new DeflateSegmentWriter( + worker, + (size) => { + if (size > SEND_BEACON_BYTE_LENGTH_LIMIT) { + renewSegment('max_size') + } + }, + (data, meta) => { + send(data, meta) + } + ) + renewSegment('init') + // Renew when the RUM view changes + const { unsubscribe: unsubscribeViewCreated } = lifeCycle.subscribe(LifeCycleEventType.VIEW_CREATED, () => { + renewSegment('view_change') + }) + + // Renew when the session is renewed + const { unsubscribe: unsubscribeSessionRenewed } = lifeCycle.subscribe(LifeCycleEventType.SESSION_RENEWED, () => { + renewSegment('session_renewed') + }) + + // Renew when leaving the page + const { unsubscribe: unsubscribeBeforeUnload } = lifeCycle.subscribe(LifeCycleEventType.BEFORE_UNLOAD, () => { + renewSegment('before_unload') + }) + + // Renew when visibility changes + const { stop: unsubscribeVisibilityChange } = addEventListener( + emitter, + DOM_EVENT.VISIBILITY_CHANGE, + () => { + if (document.visibilityState === 'hidden') { + renewSegment('visibility_change') + } + }, + { capture: true } + ) + function renewSegment(creationReason: CreationReason) { if (currentSegment) { currentSegment.complete() @@ -71,7 +117,6 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | } return { - renewSegment, addRecord(record: Record) { if (!currentSegment) { return @@ -79,135 +124,30 @@ export function startSegmentCollection(getSegmentContext: () => SegmentContext | currentSegment.addRecord(record) }, + stop() { + unsubscribeViewCreated() + unsubscribeBeforeUnload() + unsubscribeVisibilityChange() + unsubscribeSessionRenewed() + worker.terminate() + }, } } -export class Segment { - private state?: RecordsIncrementalState - - // Mouse positions are being generated quite quickly (up to 1 every 50ms by default). Using a - // separate record for each position can add a consequent overhead to the segment encoded size. - // To avoid this, we batch Mouse Move records coming from RRWeb and regroup them in a single - // record. - // - // Note: the original RRWeb library does this internally, without exposing a way to control this. - // To make sure mouse positions are correctly stored inside the Segment active when they occurred, - // we removed RRWeb batching strategy and recreated it at the Segment level. - private batchedMouseMove: MouseMoveRecord[] = [] - - constructor( - private writer: SegmentWriter, - readonly context: SegmentContext, - private creationReason: CreationReason - ) {} - - addRecord(record: Record): void { - if (isMouseMoveRecord(record)) { - if (this.batchedMouseMove.push(record) === MAX_MOUSE_MOVE_BATCH) { - this.writeMouseMoves() - } - } else { - this.writeRecord(record) - } - } - - complete() { - this.writeMouseMoves() - - if (!this.state) { - return - } - - const meta: SegmentMeta = { - creation_reason: this.creationReason, - end: this.state.end, - has_full_snapshot: this.state.hasFullSnapshot, - records_count: this.state.recordsCount, - start: this.state.start, - ...this.context, - } - this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) - } - - private writeMouseMoves() { - if (this.batchedMouseMove.length === 0) { - return - } - - this.writeRecord(groupMouseMoves(this.batchedMouseMove)) - - this.batchedMouseMove.length = 0 - } - - private writeRecord(record: Record): void { - if (!this.state) { - this.writer.write(`{"records":[${JSON.stringify(record)}`) - this.state = new RecordsIncrementalState(record) - } else { - this.writer.write(`,${JSON.stringify(record)}`) - this.state.addRecord(record) - } - } -} - -export class RecordsIncrementalState { - start: number - end: number - recordsCount: number - hasFullSnapshot: boolean - private lastRecordType: RecordType - - constructor(initialRecord: Record) { - const [start, end] = getRecordStartEnd(initialRecord) - this.start = start - this.end = end - this.lastRecordType = initialRecord.type - this.hasFullSnapshot = false - this.recordsCount = 1 - } - - addRecord(record: Record) { - const [start, end] = getRecordStartEnd(record) - this.start = Math.min(this.start, start) - this.end = Math.max(this.end, end) - if (!this.hasFullSnapshot) { - // Note: to be exploitable by the replay, this field should be true only if the FullSnapshot - // is preceded by a Meta record. Because rrweb is emitting both records synchronously and - // contiguously, it should always be the case, but check it nonetheless. - this.hasFullSnapshot = record.type === RecordType.FullSnapshot && this.lastRecordType === RecordType.Meta - } - this.lastRecordType = record.type - this.recordsCount += 1 +export function doGetSegmentContext(applicationId: string, parentContexts: ParentContexts) { + const viewContext = parentContexts.findView() + if (!viewContext?.session.id) { + return undefined } -} - -export function isMouseMoveRecord(record: Record): record is MouseMoveRecord { - return ( - record.type === RecordType.IncrementalSnapshot && - (record.data.source === IncrementalSource.MouseMove || record.data.source === IncrementalSource.TouchMove) - ) -} - -export function groupMouseMoves(records: MouseMoveRecord[]): MouseMoveRecord { - const mostRecentTimestamp = records[records.length - 1]!.timestamp return { - data: { - // Because we disabled mouse move batching from RRWeb, there will be only one position in each - // record, and its timeOffset will be 0. - positions: records.map(({ timestamp, data: { positions: [position] } }) => ({ - ...position, - timeOffset: timestamp - mostRecentTimestamp, - })), - source: records[0]!.data.source, + application: { + id: applicationId, + }, + session: { + id: viewContext.session.id, + }, + view: { + id: viewContext.view.id, }, - timestamp: mostRecentTimestamp, - type: RecordType.IncrementalSnapshot, - } -} - -export function getRecordStartEnd(record: Record): [number, number] { - if (isMouseMoveRecord(record)) { - return [record.timestamp + record.data.positions[0]!.timeOffset, record.timestamp] } - return [record.timestamp, record.timestamp] } diff --git a/packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts b/packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts deleted file mode 100644 index bd9d525887..0000000000 --- a/packages/rum-recorder/src/domain/trackSegmentRenewal.spec.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { createNewEvent, DOM_EVENT, restorePageVisibility, setPageVisibility } from '@datadog/browser-core' -import { LifeCycle, LifeCycleEventType } from '@datadog/browser-rum-core' -import { CreationReason } from '../types' -import { trackSegmentRenewal } from './trackSegmentRenewal' - -describe('trackSegmentRenewal', () => { - let renewSegmentSpy: jasmine.Spy<(reason: CreationReason) => void> - let lifeCycle: LifeCycle - let eventEmitter: HTMLDivElement - let stopSegmentRenewal: () => void - - beforeEach(() => { - renewSegmentSpy = jasmine.createSpy() - lifeCycle = new LifeCycle() - eventEmitter = document.createElement('div') - ;({ stop: stopSegmentRenewal } = trackSegmentRenewal(lifeCycle, renewSegmentSpy, eventEmitter)) - }) - - afterEach(() => { - stopSegmentRenewal() - restorePageVisibility() - }) - - it('renews segment on unload', () => { - lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) - expect(renewSegmentSpy).toHaveBeenCalledWith('before_unload') - }) - - it('renews segment on view change', () => { - lifeCycle.notify(LifeCycleEventType.VIEW_CREATED, {} as any) - expect(renewSegmentSpy).toHaveBeenCalledWith('view_change') - }) - - it('renews segment on session renew', () => { - lifeCycle.notify(LifeCycleEventType.SESSION_RENEWED) - expect(renewSegmentSpy).toHaveBeenCalledWith('session_renewed') - }) - - it('renews segment when the page become hidden', () => { - setPageVisibility('hidden') - eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) - expect(renewSegmentSpy).toHaveBeenCalledWith('visibility_change') - }) - - it('does not renew segment when the page become visible', () => { - setPageVisibility('visible') - eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) - expect(renewSegmentSpy).not.toHaveBeenCalled() - }) -}) diff --git a/packages/rum-recorder/src/domain/trackSegmentRenewal.ts b/packages/rum-recorder/src/domain/trackSegmentRenewal.ts deleted file mode 100644 index c5bbe64364..0000000000 --- a/packages/rum-recorder/src/domain/trackSegmentRenewal.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { addEventListener, DOM_EVENT, EventEmitter } from '@datadog/browser-core' -import { LifeCycle, LifeCycleEventType } from '@datadog/browser-rum-core' -import { CreationReason } from '../types' - -export function trackSegmentRenewal( - lifeCycle: LifeCycle, - renewSegment: (creationReason: CreationReason) => void, - emitter: EventEmitter = window -) { - // Flush when the RUM view changes - const { unsubscribe: unsubscribeViewCreated } = lifeCycle.subscribe(LifeCycleEventType.VIEW_CREATED, () => { - renewSegment('view_change') - }) - - // Flush when the session is renewed - const { unsubscribe: unsubscribeSessionRenewed } = lifeCycle.subscribe(LifeCycleEventType.SESSION_RENEWED, () => { - renewSegment('session_renewed') - }) - - // Flush when leaving the page - const { unsubscribe: unsubscribeBeforeUnload } = lifeCycle.subscribe(LifeCycleEventType.BEFORE_UNLOAD, () => { - renewSegment('before_unload') - }) - - // Flush when visibility changes - const { stop: unsubscribeVisibilityChange } = addEventListener( - emitter, - DOM_EVENT.VISIBILITY_CHANGE, - () => { - if (document.visibilityState === 'hidden') { - renewSegment('visibility_change') - } - }, - { capture: true } - ) - - return { - stop() { - unsubscribeViewCreated() - unsubscribeBeforeUnload() - unsubscribeVisibilityChange() - unsubscribeSessionRenewed() - }, - } -} diff --git a/packages/rum-recorder/test/utils.ts b/packages/rum-recorder/test/utils.ts index 242fa3e1ab..a9aa8f4c9e 100644 --- a/packages/rum-recorder/test/utils.ts +++ b/packages/rum-recorder/test/utils.ts @@ -1,3 +1,4 @@ +import { DeflateWorker, DeflateWorkerAction, DeflateWorkerListener } from '../src/domain/deflateWorker' import { IncrementalSource, MouseMoveRecord, MousePosition, RecordType } from '../src/types' export function makeMouseMoveRecord(timestamp: number, positions: Array>): MouseMoveRecord { @@ -10,3 +11,55 @@ export function makeMouseMoveRecord(timestamp: number, positions: Array message.data || '').join('') + } + + addEventListener(_: 'message', listener: DeflateWorkerListener): void { + if (this.listener) { + throw new Error('MockWorker supports only one listener') + } + this.listener = listener + } + + removeEventListener(): void { + this.listener = undefined + } + + postMessage(message: DeflateWorkerAction): void { + this.pendingMessages.push(message) + } + + terminate(): void { + // do nothing + } + + process(ignoreMessageWithId?: number): void { + if (this.listener) { + for (const message of this.pendingMessages) { + if (ignoreMessageWithId === message.id) { + continue + } + switch (message.action) { + case 'write': + this.deflatedSize += message.data.length + this.listener({ data: { id: message.id, size: this.deflatedSize } }) + break + case 'complete': + if (message.data) { + this.deflatedSize += message.data.length + } + this.listener({ data: { id: message.id, result: new Uint8Array(this.deflatedSize) } }) + this.deflatedSize = 0 + } + } + } + this.pendingMessages.length = 0 + } +} From 9aeb99044b31e6c8f35572de2dd1efe6e89362af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 12:03:39 +0100 Subject: [PATCH 28/43] =?UTF-8?q?=F0=9F=91=8C=20don't=20create=20a=20segme?= =?UTF-8?q?nt=20if=20the=20session=20is=20not=20tracked?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-core/src/boot/rum.ts | 1 + .../rum-core/src/boot/rumPublicApi.spec.ts | 1 + packages/rum-core/src/index.ts | 1 + .../rum-recorder/src/boot/recorder.entry.ts | 4 +-- .../rum-recorder/src/boot/recorder.spec.ts | 4 +-- packages/rum-recorder/src/boot/recorder.ts | 4 ++- .../src/domain/segmentCollection.spec.ts | 26 ++++++++++++++++--- .../src/domain/segmentCollection.ts | 15 ++++++++--- 8 files changed, 45 insertions(+), 11 deletions(-) diff --git a/packages/rum-core/src/boot/rum.ts b/packages/rum-core/src/boot/rum.ts index 3d432a4ecb..76874225bd 100644 --- a/packages/rum-core/src/boot/rum.ts +++ b/packages/rum-core/src/boot/rum.ts @@ -56,6 +56,7 @@ export function startRum(userConfiguration: RumUserConfiguration, getCommonConte configuration, lifeCycle, parentContexts, + session, getInternalContext: internalContext.get, } } diff --git a/packages/rum-core/src/boot/rumPublicApi.spec.ts b/packages/rum-core/src/boot/rumPublicApi.spec.ts index eb72fcaae4..9870ab733f 100644 --- a/packages/rum-core/src/boot/rumPublicApi.spec.ts +++ b/packages/rum-core/src/boot/rumPublicApi.spec.ts @@ -14,6 +14,7 @@ const noopStartRum = (): ReturnType => ({ getInternalContext: () => undefined, lifeCycle: {} as any, parentContexts: {} as any, + session: {} as any, }) const DEFAULT_INIT_CONFIGURATION = { applicationId: 'xxx', clientToken: 'xxx' } diff --git a/packages/rum-core/src/index.ts b/packages/rum-core/src/index.ts index d49d206d8c..0e93fb94b0 100644 --- a/packages/rum-core/src/index.ts +++ b/packages/rum-core/src/index.ts @@ -13,3 +13,4 @@ export { ViewContext, CommonContext } from './rawRumEvent.types' export { startRum } from './boot/rum' export { LifeCycle, LifeCycleEventType } from './domain/lifeCycle' export { ParentContexts } from './domain/parentContexts' +export { RumSession } from './domain/rumSession' diff --git a/packages/rum-recorder/src/boot/recorder.entry.ts b/packages/rum-recorder/src/boot/recorder.entry.ts index 0299f5728d..6ab06e16f5 100644 --- a/packages/rum-recorder/src/boot/recorder.entry.ts +++ b/packages/rum-recorder/src/boot/recorder.entry.ts @@ -15,8 +15,8 @@ function startRumAndRecording(userConfiguration: RumUserConfiguration, getCommon hasReplay: true, })) - const { lifeCycle, parentContexts, configuration } = startRumResult - startRecording(lifeCycle, userConfiguration.applicationId, configuration, parentContexts) + const { lifeCycle, parentContexts, configuration, session } = startRumResult + startRecording(lifeCycle, userConfiguration.applicationId, configuration, session, parentContexts) return startRumResult } diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index b2c3689fa8..47f87257c9 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -29,8 +29,8 @@ describe('startRecording', () => { } }, }) - .beforeBuild(({ lifeCycle, applicationId, configuration, parentContexts }) => { - return startRecording(lifeCycle, applicationId, configuration, parentContexts) + .beforeBuild(({ lifeCycle, applicationId, configuration, parentContexts, session }) => { + return startRecording(lifeCycle, applicationId, configuration, session, parentContexts) }) }) diff --git a/packages/rum-recorder/src/boot/recorder.ts b/packages/rum-recorder/src/boot/recorder.ts index da6bed946f..c700f27a55 100644 --- a/packages/rum-recorder/src/boot/recorder.ts +++ b/packages/rum-recorder/src/boot/recorder.ts @@ -1,5 +1,5 @@ import { Configuration } from '@datadog/browser-core' -import { LifeCycle, ParentContexts } from '@datadog/browser-rum-core' +import { LifeCycle, ParentContexts, RumSession } from '@datadog/browser-rum-core' import { record } from '../domain/rrweb' import { startSegmentCollection } from '../domain/segmentCollection' @@ -9,11 +9,13 @@ export function startRecording( lifeCycle: LifeCycle, applicationId: string, configuration: Configuration, + session: RumSession, parentContexts: ParentContexts ) { const { addRecord, stop: stopSegmentCollection } = startSegmentCollection( lifeCycle, applicationId, + session, parentContexts, (data, meta) => send(configuration.sessionReplayEndpoint, data, meta) ) diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index 04d3d3af18..733e760ed0 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -1,5 +1,5 @@ import { createNewEvent, DOM_EVENT, restorePageVisibility, setPageVisibility } from '@datadog/browser-core' -import { LifeCycle, LifeCycleEventType, ParentContexts, ViewContext } from '@datadog/browser-rum-core' +import { LifeCycle, LifeCycleEventType, ParentContexts, RumSession, ViewContext } from '@datadog/browser-rum-core' import { Record, RecordType, SegmentContext, SegmentMeta } from '../types' import { Segment } from './segment' import { doGetSegmentContext, doStartSegmentCollection, MAX_SEGMENT_DURATION } from './segmentCollection' @@ -143,8 +143,14 @@ describe('getSegmentContext', () => { view: { id: '123', url: 'http://foo.com', referrer: 'http://bar.com' }, } + const DEFAULT_SESSION: RumSession = { + getId: () => 'session-id', + isTracked: () => true, + isTrackedWithResource: () => true, + } + it('returns a segment context', () => { - expect(doGetSegmentContext('appid', mockParentContexts(DEFAULT_VIEW_CONTEXT))).toEqual({ + expect(doGetSegmentContext('appid', DEFAULT_SESSION, mockParentContexts(DEFAULT_VIEW_CONTEXT))).toEqual({ application: { id: 'appid' }, session: { id: '456' }, view: { id: '123' }, @@ -152,13 +158,14 @@ describe('getSegmentContext', () => { }) it('returns undefined if there is no current view', () => { - expect(doGetSegmentContext('appid', mockParentContexts(undefined))).toBeUndefined() + expect(doGetSegmentContext('appid', DEFAULT_SESSION, mockParentContexts(undefined))).toBeUndefined() }) it('returns undefined if there is no session id', () => { expect( doGetSegmentContext( 'appid', + DEFAULT_SESSION, mockParentContexts({ ...DEFAULT_VIEW_CONTEXT, session: { id: undefined }, @@ -167,6 +174,19 @@ describe('getSegmentContext', () => { ).toBeUndefined() }) + it('returns undefined if the session is not tracked', () => { + expect( + doGetSegmentContext( + 'appid', + { + ...DEFAULT_SESSION, + isTracked: () => false, + }, + mockParentContexts(DEFAULT_VIEW_CONTEXT) + ) + ).toBeUndefined() + }) + function mockParentContexts(view: ViewContext | undefined): ParentContexts { return { findView() { diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 19c0dfb185..68d08fb7b8 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -1,5 +1,5 @@ import { addEventListener, DOM_EVENT, EventEmitter, monitor } from '@datadog/browser-core' -import { LifeCycle, LifeCycleEventType, ParentContexts } from '@datadog/browser-rum-core' +import { LifeCycle, LifeCycleEventType, ParentContexts, RumSession } from '@datadog/browser-rum-core' import { SEND_BEACON_BYTE_LENGTH_LIMIT } from '../transport/send' import { CreationReason, Record, SegmentContext, SegmentMeta } from '../types' import { DeflateSegmentWriter } from './deflateSegmentWriter' @@ -35,11 +35,17 @@ export const MAX_SEGMENT_DURATION = 30_000 export function startSegmentCollection( lifeCycle: LifeCycle, applicationId: string, + session: RumSession, parentContexts: ParentContexts, send: (data: Uint8Array, meta: SegmentMeta) => void ) { const worker = createDeflateWorker() - return doStartSegmentCollection(lifeCycle, () => doGetSegmentContext(applicationId, parentContexts), send, worker) + return doStartSegmentCollection( + lifeCycle, + () => doGetSegmentContext(applicationId, session, parentContexts), + send, + worker + ) } export function doStartSegmentCollection( @@ -134,7 +140,10 @@ export function doStartSegmentCollection( } } -export function doGetSegmentContext(applicationId: string, parentContexts: ParentContexts) { +export function doGetSegmentContext(applicationId: string, session: RumSession, parentContexts: ParentContexts) { + if (!session.isTracked()) { + return undefined + } const viewContext = parentContexts.findView() if (!viewContext?.session.id) { return undefined From 7ffabe901a6401dbbf21a089e3ec18f28e09e68c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 16:03:07 +0100 Subject: [PATCH 29/43] =?UTF-8?q?=F0=9F=94=A5=F0=9F=91=8C=20remove=20mouse?= =?UTF-8?q?move=20batching=20logic?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/domain/segment.spec.ts | 114 +----------------- packages/rum-recorder/src/domain/segment.ts | 94 ++------------- packages/rum-recorder/test/utils.ts | 12 -- 3 files changed, 11 insertions(+), 209 deletions(-) diff --git a/packages/rum-recorder/src/domain/segment.spec.ts b/packages/rum-recorder/src/domain/segment.spec.ts index 06f5e97812..9d935e3a67 100644 --- a/packages/rum-recorder/src/domain/segment.spec.ts +++ b/packages/rum-recorder/src/domain/segment.spec.ts @@ -1,14 +1,5 @@ -import { makeMouseMoveRecord } from '../../test/utils' -import { IncrementalSource, MouseMoveRecord, Record, RecordType, SegmentContext, SegmentMeta } from '../types' -import { - getRecordStartEnd, - groupMouseMoves, - isMouseMoveRecord, - MAX_MOUSE_MOVE_BATCH, - RecordsIncrementalState, - Segment, - SegmentWriter, -} from './segment' +import { Record, RecordType, SegmentContext, SegmentMeta } from '../types' +import { RecordsIncrementalState, Segment, SegmentWriter } from './segment' class StringWriter implements SegmentWriter { output = '' @@ -23,18 +14,6 @@ class StringWriter implements SegmentWriter { } const CONTEXT: SegmentContext = { application: { id: 'a' }, view: { id: 'b' }, session: { id: 'c' } } -const RECORD: Record = { type: RecordType.Load, timestamp: 10, data: {} } - -const INPUT_RECORD: Record = { - data: { - id: 123, - isChecked: true, - source: IncrementalSource.Input, - text: '123', - }, - timestamp: 123, - type: RecordType.IncrementalSnapshot, -} describe('Segment', () => { it('writes a segment', () => { @@ -74,37 +53,6 @@ describe('Segment', () => { ]) }) - it('batches mousemove records', () => { - const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) - segment.addRecord(makeMouseMoveRecord(20, [{ id: 1 }])) - segment.addRecord(makeMouseMoveRecord(30, [{ id: 2 }])) - segment.complete() - - expect(writer.completed[0].segment.records).toEqual([ - makeMouseMoveRecord(30, [ - { id: 0, timeOffset: -20 }, - { id: 1, timeOffset: -10 }, - { id: 2, timeOffset: 0 }, - ]), - ]) - }) - - it('writes the mousemove records batch after a max number of records', () => { - const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - for (let i = 0; i < MAX_MOUSE_MOVE_BATCH + 2; i += 1) { - segment.addRecord(makeMouseMoveRecord(10, [{ id: 0 }])) - } - segment.complete() - - const records = writer.completed[0].segment.records as MouseMoveRecord[] - expect(records.length).toBe(2) - expect(records[0].data.positions.length).toBe(MAX_MOUSE_MOVE_BATCH) - expect(records[1].data.positions.length).toBe(2) - }) - it('ignores the "complete" call if no record have been added', () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init') @@ -156,62 +104,4 @@ describe('RecordsIncrementalState', () => { state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 10, data: {} as any }) expect(state.hasFullSnapshot).toBe(true) }) - - it('use records start/end for mouse moves', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ - data: { source: IncrementalSource.MouseMove, positions: [{ timeOffset: -2, x: 0, y: 0, id: 0 }] }, - timestamp: 11, - type: RecordType.IncrementalSnapshot, - }) - expect(state.start).toBe(9) - expect(state.end).toBe(11) - }) -}) - -describe('isMouseMoveRecord', () => { - it('returns false for non-MouseMove records', () => { - expect(isMouseMoveRecord(RECORD)).toBe(false) - expect(isMouseMoveRecord(INPUT_RECORD)).toBe(false) - }) - - it('returns true for MouseMove records', () => { - expect(isMouseMoveRecord(makeMouseMoveRecord(100, []))).toBe(true) - }) -}) - -describe('groupMouseMoves', () => { - it('returns the same event if a single event is provided', () => { - const event = makeMouseMoveRecord(10, [{ id: 0 }]) - expect(groupMouseMoves([event])).toEqual(event) - }) - - it('groups mouse events in a single mouse event', () => { - expect( - groupMouseMoves([ - makeMouseMoveRecord(10, [{ id: 0 }]), - makeMouseMoveRecord(14, [{ id: 1 }]), - makeMouseMoveRecord(20, [{ id: 2 }]), - ]) - ).toEqual( - makeMouseMoveRecord(20, [ - { id: 0, timeOffset: -10 }, - { id: 1, timeOffset: -6 }, - { id: 2, timeOffset: 0 }, - ]) - ) - }) -}) - -describe('getRecordStartEnd', () => { - it("returns the timestamp as 'start' and 'end' for non-MouseMove records", () => { - expect(getRecordStartEnd(RECORD)).toEqual([10, 10]) - expect(getRecordStartEnd(INPUT_RECORD)).toEqual([123, 123]) - }) - - it("returns the time from the first mouse position as 'start' for MouseMove records", () => { - expect( - getRecordStartEnd(makeMouseMoveRecord(150, [{ timeOffset: -50 }, { timeOffset: -30 }, { timeOffset: 0 }])) - ).toEqual([100, 150]) - }) }) diff --git a/packages/rum-recorder/src/domain/segment.ts b/packages/rum-recorder/src/domain/segment.ts index efaa8095b3..6cdaa26e83 100644 --- a/packages/rum-recorder/src/domain/segment.ts +++ b/packages/rum-recorder/src/domain/segment.ts @@ -1,33 +1,13 @@ -import { - CreationReason, - IncrementalSource, - MouseMoveRecord, - Record, - RecordType, - SegmentContext, - SegmentMeta, -} from '../types' +import { CreationReason, Record, RecordType, SegmentContext, SegmentMeta } from '../types' export interface SegmentWriter { write(data: string): void complete(data: string, meta: SegmentMeta): void } -export const MAX_MOUSE_MOVE_BATCH = 100 - export class Segment { private state?: RecordsIncrementalState - // Mouse positions are being generated quite quickly (up to 1 every 50ms by default). Using a - // separate record for each position can add a consequent overhead to the segment encoded size. - // To avoid this, we batch Mouse Move records coming from RRWeb and regroup them in a single - // record. - // - // Note: the original RRWeb library does this internally, without exposing a way to control this. - // To make sure mouse positions are correctly stored inside the Segment active when they occurred, - // we removed RRWeb batching strategy and recreated it at the Segment level. - private batchedMouseMove: MouseMoveRecord[] = [] - constructor( private writer: SegmentWriter, readonly context: SegmentContext, @@ -35,18 +15,16 @@ export class Segment { ) {} addRecord(record: Record): void { - if (isMouseMoveRecord(record)) { - if (this.batchedMouseMove.push(record) === MAX_MOUSE_MOVE_BATCH) { - this.writeMouseMoves() - } + if (!this.state) { + this.writer.write(`{"records":[${JSON.stringify(record)}`) + this.state = new RecordsIncrementalState(record) } else { - this.writeRecord(record) + this.writer.write(`,${JSON.stringify(record)}`) + this.state.addRecord(record) } } complete() { - this.writeMouseMoves() - if (!this.state) { return } @@ -61,26 +39,6 @@ export class Segment { } this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) } - - private writeMouseMoves() { - if (this.batchedMouseMove.length === 0) { - return - } - - this.writeRecord(groupMouseMoves(this.batchedMouseMove)) - - this.batchedMouseMove.length = 0 - } - - private writeRecord(record: Record): void { - if (!this.state) { - this.writer.write(`{"records":[${JSON.stringify(record)}`) - this.state = new RecordsIncrementalState(record) - } else { - this.writer.write(`,${JSON.stringify(record)}`) - this.state.addRecord(record) - } - } } export class RecordsIncrementalState { @@ -91,18 +49,15 @@ export class RecordsIncrementalState { private lastRecordType: RecordType constructor(initialRecord: Record) { - const [start, end] = getRecordStartEnd(initialRecord) - this.start = start - this.end = end + this.start = initialRecord.timestamp + this.end = initialRecord.timestamp this.lastRecordType = initialRecord.type this.hasFullSnapshot = false this.recordsCount = 1 } addRecord(record: Record) { - const [start, end] = getRecordStartEnd(record) - this.start = Math.min(this.start, start) - this.end = Math.max(this.end, end) + this.end = record.timestamp if (!this.hasFullSnapshot) { // Note: to be exploitable by the replay, this field should be true only if the FullSnapshot // is preceded by a Meta record. Because rrweb is emitting both records synchronously and @@ -113,34 +68,3 @@ export class RecordsIncrementalState { this.recordsCount += 1 } } - -export function isMouseMoveRecord(record: Record): record is MouseMoveRecord { - return ( - record.type === RecordType.IncrementalSnapshot && - (record.data.source === IncrementalSource.MouseMove || record.data.source === IncrementalSource.TouchMove) - ) -} - -export function groupMouseMoves(records: MouseMoveRecord[]): MouseMoveRecord { - const mostRecentTimestamp = records[records.length - 1]!.timestamp - return { - data: { - // Because we disabled mouse move batching from RRWeb, there will be only one position in each - // record, and its timeOffset will be 0. - positions: records.map(({ timestamp, data: { positions: [position] } }) => ({ - ...position, - timeOffset: timestamp - mostRecentTimestamp, - })), - source: records[0]!.data.source, - }, - timestamp: mostRecentTimestamp, - type: RecordType.IncrementalSnapshot, - } -} - -export function getRecordStartEnd(record: Record): [number, number] { - if (isMouseMoveRecord(record)) { - return [record.timestamp + record.data.positions[0]!.timeOffset, record.timestamp] - } - return [record.timestamp, record.timestamp] -} diff --git a/packages/rum-recorder/test/utils.ts b/packages/rum-recorder/test/utils.ts index a9aa8f4c9e..ffece77ffe 100644 --- a/packages/rum-recorder/test/utils.ts +++ b/packages/rum-recorder/test/utils.ts @@ -1,16 +1,4 @@ import { DeflateWorker, DeflateWorkerAction, DeflateWorkerListener } from '../src/domain/deflateWorker' -import { IncrementalSource, MouseMoveRecord, MousePosition, RecordType } from '../src/types' - -export function makeMouseMoveRecord(timestamp: number, positions: Array>): MouseMoveRecord { - return { - timestamp, - data: { - positions: positions.map((position) => ({ id: 0, timeOffset: 0, x: 0, y: 1, ...position })), - source: IncrementalSource.MouseMove, - }, - type: RecordType.IncrementalSnapshot, - } -} export class MockWorker implements DeflateWorker { readonly pendingMessages: DeflateWorkerAction[] = [] From 4cdf7c5cea4be780aa9c921354e906fa8a56ef54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 16:36:00 +0100 Subject: [PATCH 30/43] =?UTF-8?q?=F0=9F=91=8C=20lazily=20create=20a=20segm?= =?UTF-8?q?ent=20on=20first=20record?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This allows to simplify the Segment implementation. --- .../rum-recorder/src/domain/segment.spec.ts | 111 +++++++++--------- packages/rum-recorder/src/domain/segment.ts | 63 ++++------ .../src/domain/segmentCollection.spec.ts | 14 ++- .../src/domain/segmentCollection.ts | 36 +++--- 4 files changed, 106 insertions(+), 118 deletions(-) diff --git a/packages/rum-recorder/src/domain/segment.spec.ts b/packages/rum-recorder/src/domain/segment.spec.ts index 9d935e3a67..044372faa4 100644 --- a/packages/rum-recorder/src/domain/segment.spec.ts +++ b/packages/rum-recorder/src/domain/segment.spec.ts @@ -1,25 +1,17 @@ import { Record, RecordType, SegmentContext, SegmentMeta } from '../types' -import { RecordsIncrementalState, Segment, SegmentWriter } from './segment' - -class StringWriter implements SegmentWriter { - output = '' - completed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] - write(data: string) { - this.output += data - } - complete(data: string, meta: SegmentMeta) { - this.completed.push({ meta, segment: JSON.parse(this.output + data) as any }) - this.output = '' - } -} +import { Segment, SegmentWriter } from './segment' const CONTEXT: SegmentContext = { application: { id: 'a' }, view: { id: 'b' }, session: { id: 'c' } } +const LOAD_RECORD: Record = { type: RecordType.Load, timestamp: 10, data: {} } +const FULLSNAPSHOT_RECORD: Record = { type: RecordType.FullSnapshot, timestamp: 10, data: {} as any } +const DOM_CONTENT_LOADED_RECORD: Record = { type: RecordType.DomContentLoaded, timestamp: 10, data: {} as any } +const META_RECORD: Record = { type: RecordType.Meta, timestamp: 10, data: {} as any } + describe('Segment', () => { it('writes a segment', () => { const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') - segment.addRecord({ type: RecordType.Load, timestamp: 10, data: {} }) + const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) expect(writer.output).toEqual('{"records":[{"type":1,"timestamp":10,"data":{}}') expect(writer.completed).toEqual([]) segment.complete() @@ -53,55 +45,64 @@ describe('Segment', () => { ]) }) - it('ignores the "complete" call if no record have been added', () => { + it('adjusts meta when adding a record', () => { const writer = new StringWriter() - const segment = new Segment(writer, CONTEXT, 'init') + const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) + segment.addRecord({ type: RecordType.DomContentLoaded, timestamp: 15, data: {} }) segment.complete() - expect(writer.completed).toEqual([]) - }) -}) - -describe('RecordsIncrementalState', () => { - it('initializes with the data of the first record', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - expect(state.start).toBe(10) - expect(state.end).toBe(10) - expect(state.hasFullSnapshot).toBe(false) - expect(state.recordsCount).toBe(1) + expect(writer.completed[0].meta).toEqual({ + creation_reason: 'init', + end: 15, + has_full_snapshot: false, + records_count: 2, + start: 10, + ...CONTEXT, + }) }) - it('adjusts the state when adding a record', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 15, data: {} }) - expect(state.start).toBe(10) - expect(state.end).toBe(15) - expect(state.hasFullSnapshot).toBe(false) - expect(state.recordsCount).toBe(2) - }) - - it("doesn't set hasFullSnapshot to true if a FullSnapshot is the first record", () => { - const state = new RecordsIncrementalState({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(false) + it("doesn't set has_full_snapshot to true if a FullSnapshot is the initial record", () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init', FULLSNAPSHOT_RECORD) + segment.complete() + expect(writer.completed[0].meta.has_full_snapshot).toEqual(false) }) - it("doesn't set hasFullSnapshot to true if a FullSnapshot is not directly preceded by a Meta record", () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(false) + it("doesn't set has_full_snapshot to true if a FullSnapshot is not directly preceded by a Meta record", () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) + segment.addRecord(FULLSNAPSHOT_RECORD) + segment.complete() + expect(writer.completed[0].meta.has_full_snapshot).toEqual(false) }) - it('sets hasFullSnapshot to true if a FullSnapshot is preceded by a Meta record', () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) - state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(true) + it('sets has_full_snapshot to true if a FullSnapshot is preceded by a Meta record', () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) + segment.addRecord(META_RECORD) + segment.addRecord(FULLSNAPSHOT_RECORD) + segment.complete() + expect(writer.completed[0].meta.has_full_snapshot).toEqual(true) }) - it("doesn't overrides hasFullSnapshot to false once it has been set to true", () => { - const state = new RecordsIncrementalState({ type: RecordType.Load, timestamp: 10, data: {} }) - state.addRecord({ type: RecordType.Meta, timestamp: 10, data: {} as any }) - state.addRecord({ type: RecordType.FullSnapshot, timestamp: 10, data: {} as any }) - state.addRecord({ type: RecordType.DomContentLoaded, timestamp: 10, data: {} as any }) - expect(state.hasFullSnapshot).toBe(true) + it("doesn't overrides has_full_snapshot to false once it has been set to true", () => { + const writer = new StringWriter() + const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) + segment.addRecord(META_RECORD) + segment.addRecord(FULLSNAPSHOT_RECORD) + segment.addRecord(DOM_CONTENT_LOADED_RECORD) + segment.complete() + expect(writer.completed[0].meta.has_full_snapshot).toEqual(true) }) }) + +class StringWriter implements SegmentWriter { + output = '' + completed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] + write(data: string) { + this.output += data + } + complete(data: string, meta: SegmentMeta) { + this.completed.push({ meta, segment: JSON.parse(this.output + data) as any }) + this.output = '' + } +} diff --git a/packages/rum-recorder/src/domain/segment.ts b/packages/rum-recorder/src/domain/segment.ts index 6cdaa26e83..0c775a416a 100644 --- a/packages/rum-recorder/src/domain/segment.ts +++ b/packages/rum-recorder/src/domain/segment.ts @@ -6,57 +6,27 @@ export interface SegmentWriter { } export class Segment { - private state?: RecordsIncrementalState + private start: number + private end: number + private recordsCount: number + private hasFullSnapshot: boolean + private lastRecordType: RecordType constructor( private writer: SegmentWriter, readonly context: SegmentContext, - private creationReason: CreationReason - ) {} - - addRecord(record: Record): void { - if (!this.state) { - this.writer.write(`{"records":[${JSON.stringify(record)}`) - this.state = new RecordsIncrementalState(record) - } else { - this.writer.write(`,${JSON.stringify(record)}`) - this.state.addRecord(record) - } - } - - complete() { - if (!this.state) { - return - } - - const meta: SegmentMeta = { - creation_reason: this.creationReason, - end: this.state.end, - has_full_snapshot: this.state.hasFullSnapshot, - records_count: this.state.recordsCount, - start: this.state.start, - ...this.context, - } - this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) - } -} - -export class RecordsIncrementalState { - start: number - end: number - recordsCount: number - hasFullSnapshot: boolean - private lastRecordType: RecordType - - constructor(initialRecord: Record) { + private creationReason: CreationReason, + initialRecord: Record + ) { this.start = initialRecord.timestamp this.end = initialRecord.timestamp this.lastRecordType = initialRecord.type this.hasFullSnapshot = false this.recordsCount = 1 + this.writer.write(`{"records":[${JSON.stringify(initialRecord)}`) } - addRecord(record: Record) { + addRecord(record: Record): void { this.end = record.timestamp if (!this.hasFullSnapshot) { // Note: to be exploitable by the replay, this field should be true only if the FullSnapshot @@ -66,5 +36,18 @@ export class RecordsIncrementalState { } this.lastRecordType = record.type this.recordsCount += 1 + this.writer.write(`,${JSON.stringify(record)}`) + } + + complete() { + const meta: SegmentMeta = { + creation_reason: this.creationReason, + end: this.end, + has_full_snapshot: this.hasFullSnapshot, + records_count: this.recordsCount, + start: this.start, + ...this.context, + } + this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) } } diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index 733e760ed0..f81b14195c 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -56,8 +56,10 @@ describe('startSegmentCollection', () => { }) it('completes a segment when renewing it', () => { - const { lifeCycle, segmentCompleteSpy } = startSegmentCollection(CONTEXT) + const { lifeCycle, segmentCompleteSpy, addRecord } = startSegmentCollection(CONTEXT) + addRecord(RECORD) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) }) @@ -118,19 +120,23 @@ describe('startSegmentCollection', () => { it('renews a segment after MAX_SEGMENT_DURATION', () => { jasmine.clock().install() - const { segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + const { segmentCompleteSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) + addRecord(RECORD) jasmine.clock().tick(MAX_SEGMENT_DURATION) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) expect(sendCurrentSegment().creation_reason).toBe('max_duration') }) it('does not renew a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { jasmine.clock().install() - const { lifeCycle, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + const { lifeCycle, segmentCompleteSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) + addRecord(RECORD) jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + addRecord(RECORD) jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) + expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) expect(sendCurrentSegment().creation_reason).not.toBe('max_duration') }) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 68d08fb7b8..9d449ae292 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -57,6 +57,7 @@ export function doStartSegmentCollection( ) { let currentSegment: Segment | undefined let currentSegmentExpirationTimeoutId: ReturnType + let nextSegmentCreationReason: CreationReason = 'init' const writer = new DeflateSegmentWriter( worker, @@ -70,8 +71,6 @@ export function doStartSegmentCollection( } ) - renewSegment('init') - // Renew when the RUM view changes const { unsubscribe: unsubscribeViewCreated } = lifeCycle.subscribe(LifeCycleEventType.VIEW_CREATED, () => { renewSegment('view_change') @@ -106,29 +105,28 @@ export function doStartSegmentCollection( clearTimeout(currentSegmentExpirationTimeoutId) } - const context = getSegmentContext() - if (!context) { - return - } - - currentSegment = new Segment(writer, context, creationReason) - - // Replace the newly created segment after MAX_SEGMENT_DURATION - currentSegmentExpirationTimeoutId = setTimeout( - monitor(() => { - renewSegment('max_duration') - }), - MAX_SEGMENT_DURATION - ) + nextSegmentCreationReason = creationReason } return { addRecord(record: Record) { if (!currentSegment) { - return + const context = getSegmentContext() + if (!context) { + return + } + + currentSegment = new Segment(writer, context, nextSegmentCreationReason, record) + // Replace the newly created segment after MAX_SEGMENT_DURATION + currentSegmentExpirationTimeoutId = setTimeout( + monitor(() => { + renewSegment('max_duration') + }), + MAX_SEGMENT_DURATION + ) + } else { + currentSegment.addRecord(record) } - - currentSegment.addRecord(record) }, stop() { unsubscribeViewCreated() From 4f7b3009d9f87f85b48aebdf9320f64d6e0dbcf2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 16:55:33 +0100 Subject: [PATCH 31/43] =?UTF-8?q?=F0=9F=91=8C=20reword=20segment=20'renew'?= =?UTF-8?q?=20to=20'flush'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/boot/recorder.spec.ts | 14 +++++----- .../src/domain/segmentCollection.spec.ts | 22 ++++++++-------- .../src/domain/segmentCollection.ts | 26 +++++++++---------- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index 47f87257c9..fa42270987 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -56,10 +56,10 @@ describe('startRecording', () => { }) const { lifeCycle } = setupBuilder.build() - renewSegment(lifeCycle) + flushSegment(lifeCycle) }) - it('renews the segment when its compressed data is getting too large', (done) => { + it('flushes the segment when its compressed data is getting too large', (done) => { const clickCount = 10_000 waitRequests((requests) => { expect(requests.length).toBe(1) @@ -86,10 +86,10 @@ describe('startRecording', () => { document.body.dispatchEvent(createNewEvent('click')) sessionId = undefined - renewSegment(lifeCycle) + flushSegment(lifeCycle) document.body.dispatchEvent(createNewEvent('click')) - renewSegment(lifeCycle) + flushSegment(lifeCycle) }) it('restarts sending segments when the session is renewed', (done) => { @@ -106,14 +106,14 @@ describe('startRecording', () => { document.body.dispatchEvent(createNewEvent('click')) sessionId = 'new-session-id' - renewSegment(lifeCycle) + flushSegment(lifeCycle) document.body.dispatchEvent(createNewEvent('click')) - renewSegment(lifeCycle) + flushSegment(lifeCycle) }) }) -function renewSegment(lifeCycle: LifeCycle) { +function flushSegment(lifeCycle: LifeCycle) { lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) } diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index f81b14195c..572fa360b2 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -33,7 +33,7 @@ describe('startSegmentCollection', () => { sendCurrentSegment() { // Make sure the segment is not empty addRecord(RECORD) - // Renew segment + // Flush segment lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) worker.process() return sendSpy.calls.mostRecent().args[1] @@ -55,7 +55,7 @@ describe('startSegmentCollection', () => { expect(sendCurrentSegment().creation_reason).toBe('init') }) - it('completes a segment when renewing it', () => { + it('completes a segment when flushing it', () => { const { lifeCycle, segmentCompleteSpy, addRecord } = startSegmentCollection(CONTEXT) addRecord(RECORD) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) @@ -71,37 +71,37 @@ describe('startSegmentCollection', () => { expect(segmentCompleteSpy).not.toHaveBeenCalled() }) - describe('segment renewal', () => { + describe('segment flush strategy', () => { afterEach(() => { restorePageVisibility() }) - it('renews segment on unload', () => { + it('flushes segment on unload', () => { const { lifeCycle, sendCurrentSegment } = startSegmentCollection(CONTEXT) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) expect(sendCurrentSegment().creation_reason).toBe('before_unload') }) - it('renews segment on view change', () => { + it('flushes segment on view change', () => { const { lifeCycle, sendCurrentSegment } = startSegmentCollection(CONTEXT) lifeCycle.notify(LifeCycleEventType.VIEW_CREATED, {} as any) expect(sendCurrentSegment().creation_reason).toBe('view_change') }) - it('renews segment on session renew', () => { + it('flushes segment on session renew', () => { const { lifeCycle, sendCurrentSegment } = startSegmentCollection(CONTEXT) lifeCycle.notify(LifeCycleEventType.SESSION_RENEWED) expect(sendCurrentSegment().creation_reason).toBe('session_renewed') }) - it('renews segment when the page become hidden', () => { + it('flushes segment when the page become hidden', () => { setPageVisibility('hidden') const { eventEmitter, sendCurrentSegment } = startSegmentCollection(CONTEXT) eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) expect(sendCurrentSegment().creation_reason).toBe('visibility_change') }) - it('does not renew segment when the page become visible', () => { + it('does not flush segment when the page become visible', () => { setPageVisibility('visible') const { eventEmitter, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) @@ -109,7 +109,7 @@ describe('startSegmentCollection', () => { expect(sendCurrentSegment().creation_reason).not.toBe('visibility_change') }) - it('renews segment when the current segment deflate size reaches SEND_BEACON_BYTE_LENGTH_LIMIT', () => { + it('flushes segment when the current segment deflate size reaches SEND_BEACON_BYTE_LENGTH_LIMIT', () => { const { worker, addRecord, sendCurrentSegment } = startSegmentCollection(CONTEXT) worker.deflatedSize = SEND_BEACON_BYTE_LENGTH_LIMIT addRecord(RECORD) @@ -118,7 +118,7 @@ describe('startSegmentCollection', () => { expect(sendCurrentSegment().creation_reason).toBe('max_size') }) - it('renews a segment after MAX_SEGMENT_DURATION', () => { + it('flushes a segment after MAX_SEGMENT_DURATION', () => { jasmine.clock().install() const { segmentCompleteSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) addRecord(RECORD) @@ -128,7 +128,7 @@ describe('startSegmentCollection', () => { expect(sendCurrentSegment().creation_reason).toBe('max_duration') }) - it('does not renew a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { + it('does not flush a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { jasmine.clock().install() const { lifeCycle, segmentCompleteSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) addRecord(RECORD) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 9d449ae292..7e91d5f2eb 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -15,8 +15,8 @@ export const MAX_SEGMENT_DURATION = 30_000 // concatenating multiple segments together. Their approximative size limits how often they are // created have an impact on the replay. // -// When the recording starts, a segment is initially created. The segment is renewed (finalized, -// sent and replaced by a new one) based on various events (non-exhaustive list): +// When the recording starts, a segment is initially created. The segment is flushed (finalized and +// sent) based on various events (non-exhaustive list): // // * the page visibility change or becomes to unload // * the segment duration reaches a limit @@ -63,7 +63,7 @@ export function doStartSegmentCollection( worker, (size) => { if (size > SEND_BEACON_BYTE_LENGTH_LIMIT) { - renewSegment('max_size') + flushSegment('max_size') } }, (data, meta) => { @@ -71,34 +71,34 @@ export function doStartSegmentCollection( } ) - // Renew when the RUM view changes + // Flush when the RUM view changes const { unsubscribe: unsubscribeViewCreated } = lifeCycle.subscribe(LifeCycleEventType.VIEW_CREATED, () => { - renewSegment('view_change') + flushSegment('view_change') }) - // Renew when the session is renewed + // Flush when the session is renewed const { unsubscribe: unsubscribeSessionRenewed } = lifeCycle.subscribe(LifeCycleEventType.SESSION_RENEWED, () => { - renewSegment('session_renewed') + flushSegment('session_renewed') }) - // Renew when leaving the page + // Flush when leaving the page const { unsubscribe: unsubscribeBeforeUnload } = lifeCycle.subscribe(LifeCycleEventType.BEFORE_UNLOAD, () => { - renewSegment('before_unload') + flushSegment('before_unload') }) - // Renew when visibility changes + // Flush when visibility changes const { stop: unsubscribeVisibilityChange } = addEventListener( emitter, DOM_EVENT.VISIBILITY_CHANGE, () => { if (document.visibilityState === 'hidden') { - renewSegment('visibility_change') + flushSegment('visibility_change') } }, { capture: true } ) - function renewSegment(creationReason: CreationReason) { + function flushSegment(creationReason: CreationReason) { if (currentSegment) { currentSegment.complete() currentSegment = undefined @@ -120,7 +120,7 @@ export function doStartSegmentCollection( // Replace the newly created segment after MAX_SEGMENT_DURATION currentSegmentExpirationTimeoutId = setTimeout( monitor(() => { - renewSegment('max_duration') + flushSegment('max_duration') }), MAX_SEGMENT_DURATION ) From a68342412e8f0de580a891c226f9dd658fb27a92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 17:04:14 +0100 Subject: [PATCH 32/43] =?UTF-8?q?=F0=9F=91=8C=20reword=20'complete'=20to?= =?UTF-8?q?=20'flush'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/domain/deflateSegmentWriter.spec.ts | 22 ++++++------- .../src/domain/deflateSegmentWriter.ts | 8 ++--- .../src/domain/deflateWorker.d.ts | 2 +- .../rum-recorder/src/domain/deflateWorker.js | 2 +- .../src/domain/deflateWorker.spec.ts | 16 +++++----- .../rum-recorder/src/domain/segment.spec.ts | 32 +++++++++---------- packages/rum-recorder/src/domain/segment.ts | 6 ++-- .../src/domain/segmentCollection.spec.ts | 30 ++++++++--------- .../src/domain/segmentCollection.ts | 2 +- packages/rum-recorder/test/utils.ts | 2 +- 10 files changed, 61 insertions(+), 61 deletions(-) diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts index 7a9c930e1c..3fef166f74 100644 --- a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts @@ -19,23 +19,23 @@ describe('DeflateWriter', () => { expect(onWroteSpy.calls.allArgs()).toEqual([[3]]) }) - it('calls the onCompleted callback when data is complete', () => { - const onCompletedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() - const writer = new DeflateSegmentWriter(worker, noop, onCompletedSpy) + it('calls the onFlushed callback when data is flush', () => { + const onFlushedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() + const writer = new DeflateSegmentWriter(worker, noop, onFlushedSpy) const meta: SegmentMeta = { start: 12 } as any - writer.complete(undefined, meta) + writer.flush(undefined, meta) worker.process() - expect(onCompletedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta]]) + expect(onFlushedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta]]) }) - it('calls the onCompleted callback with the correct meta even if a previous action failed somehow', () => { - const onCompletedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() - const writer = new DeflateSegmentWriter(worker, noop, onCompletedSpy) + it('calls the onFlushed callback with the correct meta even if a previous action failed somehow', () => { + const onFlushedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() + const writer = new DeflateSegmentWriter(worker, noop, onFlushedSpy) const meta1: SegmentMeta = { start: 12 } as any const meta2: SegmentMeta = { start: 13 } as any - writer.complete(undefined, meta1) - writer.complete(undefined, meta2) + writer.flush(undefined, meta1) + writer.flush(undefined, meta2) worker.process(0) - expect(onCompletedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta2]]) + expect(onFlushedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta2]]) }) }) diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts index 51995dac59..8d58703833 100644 --- a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts @@ -9,7 +9,7 @@ export class DeflateSegmentWriter implements SegmentWriter { constructor( private worker: DeflateWorker, private onWrote: (size: number) => void, - private onCompleted: (data: Uint8Array, meta: SegmentMeta) => void + private onFlushed: (data: Uint8Array, meta: SegmentMeta) => void ) { worker.addEventListener('message', ({ data }) => { if ('result' in data) { @@ -17,7 +17,7 @@ export class DeflateSegmentWriter implements SegmentWriter { do { pendingMeta = this.pendingMeta.shift()! } while (pendingMeta.id < data.id) - this.onCompleted(data.result, pendingMeta.meta) + this.onFlushed(data.result, pendingMeta.meta) } else { this.onWrote(data.size) } @@ -29,8 +29,8 @@ export class DeflateSegmentWriter implements SegmentWriter { this.nextId += 1 } - complete(data: string | undefined, meta: SegmentMeta): void { - this.worker.postMessage({ data, id: this.nextId, action: 'complete' }) + flush(data: string | undefined, meta: SegmentMeta): void { + this.worker.postMessage({ data, id: this.nextId, action: 'flush' }) this.pendingMeta.push({ meta, id: this.nextId }) this.nextId += 1 } diff --git a/packages/rum-recorder/src/domain/deflateWorker.d.ts b/packages/rum-recorder/src/domain/deflateWorker.d.ts index ef4ffea322..f08d989910 100644 --- a/packages/rum-recorder/src/domain/deflateWorker.d.ts +++ b/packages/rum-recorder/src/domain/deflateWorker.d.ts @@ -17,7 +17,7 @@ export type DeflateWorkerAction = } | { id: number - action: 'complete' + action: 'flush' data?: string } diff --git a/packages/rum-recorder/src/domain/deflateWorker.js b/packages/rum-recorder/src/domain/deflateWorker.js index 68c1e6cf75..df6b34b64c 100644 --- a/packages/rum-recorder/src/domain/deflateWorker.js +++ b/packages/rum-recorder/src/domain/deflateWorker.js @@ -22,7 +22,7 @@ function workerCodeFn() { size: deflate.chunks.reduce((total, chunk) => total + chunk.length, 0), }) break - case 'complete': + case 'flush': if (data.data) { deflate.push(data.data, constants.Z_SYNC_FLUSH) } diff --git a/packages/rum-recorder/src/domain/deflateWorker.spec.ts b/packages/rum-recorder/src/domain/deflateWorker.spec.ts index 028b8e3bcb..bb7094c506 100644 --- a/packages/rum-recorder/src/domain/deflateWorker.spec.ts +++ b/packages/rum-recorder/src/domain/deflateWorker.spec.ts @@ -29,10 +29,10 @@ describe('deflateWorker', () => { done() }) deflateWorker.postMessage({ id: 0, action: 'write', data: 'foo' }) - deflateWorker.postMessage({ id: 1, action: 'complete' }) + deflateWorker.postMessage({ id: 1, action: 'flush' }) }) - it('writes the remaining data specified by "complete"', (done) => { + it('writes the remaining data specified by "flush"', (done) => { const deflateWorker = createDeflateWorker() listen(deflateWorker, 1, (events) => { expect(events).toEqual([ @@ -43,10 +43,10 @@ describe('deflateWorker', () => { ]) done() }) - deflateWorker.postMessage({ id: 0, action: 'complete', data: 'foo' }) + deflateWorker.postMessage({ id: 0, action: 'flush', data: 'foo' }) }) - it('completes several deflates one after the other', (done) => { + it('flushes several deflates one after the other', (done) => { const deflateWorker = createDeflateWorker() listen(deflateWorker, 4, (events) => { expect(events).toEqual([ @@ -70,22 +70,22 @@ describe('deflateWorker', () => { done() }) deflateWorker.postMessage({ id: 0, action: 'write', data: 'foo' }) - deflateWorker.postMessage({ id: 1, action: 'complete' }) + deflateWorker.postMessage({ id: 1, action: 'flush' }) deflateWorker.postMessage({ id: 2, action: 'write', data: 'bar' }) - deflateWorker.postMessage({ id: 3, action: 'complete' }) + deflateWorker.postMessage({ id: 3, action: 'flush' }) }) function listen( deflateWorker: DeflateWorker, expectedResponseCount: number, - onComplete: (responses: DeflateWorkerResponse[]) => void + onDone: (responses: DeflateWorkerResponse[]) => void ) { const responses: DeflateWorkerResponse[] = [] const listener = (event: { data: DeflateWorkerResponse }) => { const responsesCount = responses.push(event.data) if (responsesCount === expectedResponseCount) { deflateWorker.removeEventListener('message', listener) - onComplete(responses) + onDone(responses) } } deflateWorker.addEventListener('message', listener) diff --git a/packages/rum-recorder/src/domain/segment.spec.ts b/packages/rum-recorder/src/domain/segment.spec.ts index 044372faa4..d062de55ad 100644 --- a/packages/rum-recorder/src/domain/segment.spec.ts +++ b/packages/rum-recorder/src/domain/segment.spec.ts @@ -13,10 +13,10 @@ describe('Segment', () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) expect(writer.output).toEqual('{"records":[{"type":1,"timestamp":10,"data":{}}') - expect(writer.completed).toEqual([]) - segment.complete() + expect(writer.flushed).toEqual([]) + segment.flush() - expect(writer.completed).toEqual([ + expect(writer.flushed).toEqual([ { meta: { creation_reason: 'init' as const, @@ -49,8 +49,8 @@ describe('Segment', () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) segment.addRecord({ type: RecordType.DomContentLoaded, timestamp: 15, data: {} }) - segment.complete() - expect(writer.completed[0].meta).toEqual({ + segment.flush() + expect(writer.flushed[0].meta).toEqual({ creation_reason: 'init', end: 15, has_full_snapshot: false, @@ -63,16 +63,16 @@ describe('Segment', () => { it("doesn't set has_full_snapshot to true if a FullSnapshot is the initial record", () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init', FULLSNAPSHOT_RECORD) - segment.complete() - expect(writer.completed[0].meta.has_full_snapshot).toEqual(false) + segment.flush() + expect(writer.flushed[0].meta.has_full_snapshot).toEqual(false) }) it("doesn't set has_full_snapshot to true if a FullSnapshot is not directly preceded by a Meta record", () => { const writer = new StringWriter() const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) segment.addRecord(FULLSNAPSHOT_RECORD) - segment.complete() - expect(writer.completed[0].meta.has_full_snapshot).toEqual(false) + segment.flush() + expect(writer.flushed[0].meta.has_full_snapshot).toEqual(false) }) it('sets has_full_snapshot to true if a FullSnapshot is preceded by a Meta record', () => { @@ -80,8 +80,8 @@ describe('Segment', () => { const segment = new Segment(writer, CONTEXT, 'init', LOAD_RECORD) segment.addRecord(META_RECORD) segment.addRecord(FULLSNAPSHOT_RECORD) - segment.complete() - expect(writer.completed[0].meta.has_full_snapshot).toEqual(true) + segment.flush() + expect(writer.flushed[0].meta.has_full_snapshot).toEqual(true) }) it("doesn't overrides has_full_snapshot to false once it has been set to true", () => { @@ -90,19 +90,19 @@ describe('Segment', () => { segment.addRecord(META_RECORD) segment.addRecord(FULLSNAPSHOT_RECORD) segment.addRecord(DOM_CONTENT_LOADED_RECORD) - segment.complete() - expect(writer.completed[0].meta.has_full_snapshot).toEqual(true) + segment.flush() + expect(writer.flushed[0].meta.has_full_snapshot).toEqual(true) }) }) class StringWriter implements SegmentWriter { output = '' - completed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] + flushed: Array<{ meta: SegmentMeta; segment: SegmentMeta & { records: Record[] } }> = [] write(data: string) { this.output += data } - complete(data: string, meta: SegmentMeta) { - this.completed.push({ meta, segment: JSON.parse(this.output + data) as any }) + flush(data: string, meta: SegmentMeta) { + this.flushed.push({ meta, segment: JSON.parse(this.output + data) as any }) this.output = '' } } diff --git a/packages/rum-recorder/src/domain/segment.ts b/packages/rum-recorder/src/domain/segment.ts index 0c775a416a..d9a4cc808f 100644 --- a/packages/rum-recorder/src/domain/segment.ts +++ b/packages/rum-recorder/src/domain/segment.ts @@ -2,7 +2,7 @@ import { CreationReason, Record, RecordType, SegmentContext, SegmentMeta } from export interface SegmentWriter { write(data: string): void - complete(data: string, meta: SegmentMeta): void + flush(data: string, meta: SegmentMeta): void } export class Segment { @@ -39,7 +39,7 @@ export class Segment { this.writer.write(`,${JSON.stringify(record)}`) } - complete() { + flush() { const meta: SegmentMeta = { creation_reason: this.creationReason, end: this.end, @@ -48,6 +48,6 @@ export class Segment { start: this.start, ...this.context, } - this.writer.complete(`],${JSON.stringify(meta).slice(1)}\n`, meta) + this.writer.flush(`],${JSON.stringify(meta).slice(1)}\n`, meta) } } diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index 572fa360b2..f111041ba1 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -23,12 +23,12 @@ describe('startSegmentCollection', () => { const { stop, addRecord } = doStartSegmentCollection(lifeCycle, () => context, sendSpy, worker, eventEmitter) stopErrorCollection = stop - const segmentCompleteSpy = spyOn(Segment.prototype, 'complete').and.callThrough() + const segmentFlushSpy = spyOn(Segment.prototype, 'flush').and.callThrough() return { addRecord, eventEmitter, lifeCycle, - segmentCompleteSpy, + segmentFlushSpy, worker, sendCurrentSegment() { // Make sure the segment is not empty @@ -47,28 +47,28 @@ describe('startSegmentCollection', () => { }) it('immediately starts a new segment', () => { - const { addRecord, worker, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + const { addRecord, worker, segmentFlushSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) expect(worker.pendingData).toBe('') addRecord(RECORD) expect(worker.pendingData).toBe('{"records":[{"type":1,"timestamp":10,"data":{}}') - expect(segmentCompleteSpy).not.toHaveBeenCalled() + expect(segmentFlushSpy).not.toHaveBeenCalled() expect(sendCurrentSegment().creation_reason).toBe('init') }) - it('completes a segment when flushing it', () => { - const { lifeCycle, segmentCompleteSpy, addRecord } = startSegmentCollection(CONTEXT) + it('flushes a segment', () => { + const { lifeCycle, segmentFlushSpy, addRecord } = startSegmentCollection(CONTEXT) addRecord(RECORD) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + expect(segmentFlushSpy).toHaveBeenCalledTimes(1) }) it("ignores calls to addRecord if context can't be get", () => { - const { worker, lifeCycle, addRecord, segmentCompleteSpy } = startSegmentCollection(undefined) + const { worker, lifeCycle, addRecord, segmentFlushSpy } = startSegmentCollection(undefined) addRecord(RECORD) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) expect(worker.pendingData).toBe('') - expect(segmentCompleteSpy).not.toHaveBeenCalled() + expect(segmentFlushSpy).not.toHaveBeenCalled() }) describe('segment flush strategy', () => { @@ -103,9 +103,9 @@ describe('startSegmentCollection', () => { it('does not flush segment when the page become visible', () => { setPageVisibility('visible') - const { eventEmitter, segmentCompleteSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) + const { eventEmitter, segmentFlushSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) - expect(segmentCompleteSpy).not.toHaveBeenCalled() + expect(segmentFlushSpy).not.toHaveBeenCalled() expect(sendCurrentSegment().creation_reason).not.toBe('visibility_change') }) @@ -120,24 +120,24 @@ describe('startSegmentCollection', () => { it('flushes a segment after MAX_SEGMENT_DURATION', () => { jasmine.clock().install() - const { segmentCompleteSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) + const { segmentFlushSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) addRecord(RECORD) jasmine.clock().tick(MAX_SEGMENT_DURATION) - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + expect(segmentFlushSpy).toHaveBeenCalledTimes(1) expect(sendCurrentSegment().creation_reason).toBe('max_duration') }) it('does not flush a segment after MAX_SEGMENT_DURATION if a segment has been created in the meantime', () => { jasmine.clock().install() - const { lifeCycle, segmentCompleteSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) + const { lifeCycle, segmentFlushSpy, sendCurrentSegment, addRecord } = startSegmentCollection(CONTEXT) addRecord(RECORD) jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) lifeCycle.notify(LifeCycleEventType.BEFORE_UNLOAD) addRecord(RECORD) jasmine.clock().tick(BEFORE_MAX_SEGMENT_DURATION) - expect(segmentCompleteSpy).toHaveBeenCalledTimes(1) + expect(segmentFlushSpy).toHaveBeenCalledTimes(1) expect(sendCurrentSegment().creation_reason).not.toBe('max_duration') }) }) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 7e91d5f2eb..7f449199c9 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -100,7 +100,7 @@ export function doStartSegmentCollection( function flushSegment(creationReason: CreationReason) { if (currentSegment) { - currentSegment.complete() + currentSegment.flush() currentSegment = undefined clearTimeout(currentSegmentExpirationTimeoutId) } diff --git a/packages/rum-recorder/test/utils.ts b/packages/rum-recorder/test/utils.ts index ffece77ffe..4100c726a0 100644 --- a/packages/rum-recorder/test/utils.ts +++ b/packages/rum-recorder/test/utils.ts @@ -39,7 +39,7 @@ export class MockWorker implements DeflateWorker { this.deflatedSize += message.data.length this.listener({ data: { id: message.id, size: this.deflatedSize } }) break - case 'complete': + case 'flush': if (message.data) { this.deflatedSize += message.data.length } From ff0524daf8e110f93c2fc56974b767a842557dff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 17:13:17 +0100 Subject: [PATCH 33/43] =?UTF-8?q?=F0=9F=93=9D=F0=9F=91=8C=20improve=20segm?= =?UTF-8?q?entCollection=20doc?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/domain/segmentCollection.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 7f449199c9..63d63b1bfd 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -12,8 +12,9 @@ export const MAX_SEGMENT_DURATION = 30_000 // for indexing or UI needs, and a list of records (RRWeb 'events', renamed to avoid confusing // namings). They are stored without any processing from the intake, and fetched one after the // other while a session is being replayed. Their encoding (deflate) are carefully crafted to allow -// concatenating multiple segments together. Their approximative size limits how often they are -// created have an impact on the replay. +// concatenating multiple segments together. Segments have a size overhead (meta), so our goal is to +// build segments containing as much records as possible while complying with the various flush +// strategies to guarantee a good replay quality. // // When the recording starts, a segment is initially created. The segment is flushed (finalized and // sent) based on various events (non-exhaustive list): From 58af7185bb114de16a673f4610ae2d2f9102099f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Thu, 14 Jan 2021 17:37:36 +0100 Subject: [PATCH 34/43] =?UTF-8?q?=F0=9F=91=8C=20add=20doc=20and=20monitor?= =?UTF-8?q?=20log=20on=20lost=20worker=20response?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/core/src/index.ts | 1 + .../src/domain/deflateSegmentWriter.spec.ts | 9 ++++++++- .../src/domain/deflateSegmentWriter.ts | 20 +++++++++++++++---- 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 41d51fe629..73431554ef 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -17,6 +17,7 @@ export { monitored, monitor, addMonitoringMessage, + setDebugMode, } from './domain/internalMonitoring' export { Observable } from './tools/observable' export { diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts index 3fef166f74..42bfe342a1 100644 --- a/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.spec.ts @@ -1,4 +1,4 @@ -import { noop } from '@datadog/browser-core' +import { noop, setDebugMode } from '@datadog/browser-core' import { MockWorker } from '../../test/utils' import { SegmentMeta } from '../types' @@ -9,6 +9,11 @@ describe('DeflateWriter', () => { beforeEach(() => { worker = new MockWorker() + setDebugMode(true) + }) + + afterEach(() => { + setDebugMode(false) }) it('calls the onWrote callback when data is written', () => { @@ -29,6 +34,7 @@ describe('DeflateWriter', () => { }) it('calls the onFlushed callback with the correct meta even if a previous action failed somehow', () => { + const consoleSpy = spyOn(console, 'log') const onFlushedSpy = jasmine.createSpy<(data: Uint8Array, meta: SegmentMeta) => void>() const writer = new DeflateSegmentWriter(worker, noop, onFlushedSpy) const meta1: SegmentMeta = { start: 12 } as any @@ -37,5 +43,6 @@ describe('DeflateWriter', () => { writer.flush(undefined, meta2) worker.process(0) expect(onFlushedSpy.calls.allArgs()).toEqual([[jasmine.any(Uint8Array), meta2]]) + expect(consoleSpy).toHaveBeenCalledWith('[MONITORING MESSAGE]', '1 deflate worker responses have been lost') }) }) diff --git a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts index 8d58703833..bfd5b9f36b 100644 --- a/packages/rum-recorder/src/domain/deflateSegmentWriter.ts +++ b/packages/rum-recorder/src/domain/deflateSegmentWriter.ts @@ -1,3 +1,4 @@ +import { addMonitoringMessage } from '@datadog/browser-core' import { SegmentMeta } from '../types' import { DeflateWorker } from './deflateWorker' import { SegmentWriter } from './segment' @@ -13,10 +14,21 @@ export class DeflateSegmentWriter implements SegmentWriter { ) { worker.addEventListener('message', ({ data }) => { if ('result' in data) { - let pendingMeta - do { - pendingMeta = this.pendingMeta.shift()! - } while (pendingMeta.id < data.id) + let pendingMeta = this.pendingMeta.shift()! + + // Messages should be received in the same order as they are sent, so the first + // 'pendingMeta' of the list should be the one corresponding to the handled message. + // But if something goes wrong in the worker and a response is lost, we need to avoid + // associating an incorrect meta to the flushed segment. Remove any pending meta with an id + // inferior to the one being waited for. + if (pendingMeta.id !== data.id) { + let lostCount = 0 + while (pendingMeta.id !== data.id) { + pendingMeta = this.pendingMeta.shift()! + lostCount += 1 + } + addMonitoringMessage(`${lostCount} deflate worker responses have been lost`) + } this.onFlushed(data.result, pendingMeta.meta) } else { this.onWrote(data.size) From 6e6e3f5ed9cb22a40985a56f944c04559240dd54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 11:41:06 +0100 Subject: [PATCH 35/43] =?UTF-8?q?=F0=9F=91=8C=20add=20window.setTimeout=20?= =?UTF-8?q?to=20simplify=20typing?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/domain/segmentCollection.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 63d63b1bfd..6a554b3e71 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -57,7 +57,7 @@ export function doStartSegmentCollection( emitter: EventEmitter = window ) { let currentSegment: Segment | undefined - let currentSegmentExpirationTimeoutId: ReturnType + let currentSegmentExpirationTimeoutId: number let nextSegmentCreationReason: CreationReason = 'init' const writer = new DeflateSegmentWriter( @@ -119,7 +119,7 @@ export function doStartSegmentCollection( currentSegment = new Segment(writer, context, nextSegmentCreationReason, record) // Replace the newly created segment after MAX_SEGMENT_DURATION - currentSegmentExpirationTimeoutId = setTimeout( + currentSegmentExpirationTimeoutId = window.setTimeout( monitor(() => { flushSegment('max_duration') }), From 0e0b266fcdd923301210ee2bf1333069039b3004 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 11:49:30 +0100 Subject: [PATCH 36/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20move=20waitReques?= =?UTF-8?q?ts=20to=20the=20end=20of=20test=20cases?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/boot/recorder.spec.ts | 69 +++++++++++-------- 1 file changed, 39 insertions(+), 30 deletions(-) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index fa42270987..e997054f14 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -8,6 +8,7 @@ import { startRecording } from './recorder' describe('startRecording', () => { let setupBuilder: TestSetupBuilder let sessionId: string | undefined + let waitRequests: (callback: (requests: ReadonlyArray<{ data: FormData; size: number }>) => void) => void beforeEach(() => { if (isIE()) { @@ -32,6 +33,27 @@ describe('startRecording', () => { .beforeBuild(({ lifeCycle, applicationId, configuration, parentContexts, session }) => { return startRecording(lifeCycle, applicationId, configuration, session, parentContexts) }) + + const requestSendSpy = spyOn(HttpRequest.prototype, 'send') + + waitRequests = (callback) => { + if (requestSendSpy.calls.first()) { + waitForLastRequest() + } else { + requestSendSpy.and.callFake(waitForLastRequest) + } + + let isWaiting = false + function waitForLastRequest() { + if (isWaiting) { + return + } + isWaiting = true + setTimeout(() => { + callback(requestSendSpy.calls.allArgs().map(([data, size]) => ({ size, data: data as FormData }))) + }, 300) + } + } }) afterEach(() => { @@ -39,6 +61,9 @@ describe('startRecording', () => { }) it('starts recording', (done) => { + const { lifeCycle } = setupBuilder.build() + flushSegment(lifeCycle) + waitRequests((requests) => { expect(requests).toEqual([{ data: jasmine.any(FormData), size: jasmine.any(Number) }]) expect(formDataAsObject(requests[0].data)).toEqual({ @@ -54,33 +79,24 @@ describe('startRecording', () => { }) done() }) - - const { lifeCycle } = setupBuilder.build() - flushSegment(lifeCycle) }) it('flushes the segment when its compressed data is getting too large', (done) => { - const clickCount = 10_000 - waitRequests((requests) => { - expect(requests.length).toBe(1) - expect(requests[0].data.get('records_count')).toBe(String(clickCount + 2)) - done() - }) - setupBuilder.build() + const clickCount = 10_000 const click = createNewEvent('click') for (let i = 0; i < clickCount; i += 1) { document.body.dispatchEvent(click) } - }) - it('stops sending new segment when the session is expired', (done) => { waitRequests((requests) => { expect(requests.length).toBe(1) - expect(requests[0].data.get('records_count')).toBe('3') + expect(requests[0].data.get('records_count')).toBe(String(clickCount + 2)) done() }) + }) + it('stops sending new segment when the session is expired', (done) => { const { lifeCycle } = setupBuilder.build() document.body.dispatchEvent(createNewEvent('click')) @@ -90,16 +106,15 @@ describe('startRecording', () => { document.body.dispatchEvent(createNewEvent('click')) flushSegment(lifeCycle) - }) - it('restarts sending segments when the session is renewed', (done) => { waitRequests((requests) => { expect(requests.length).toBe(1) - expect(requests[0].data.get('records_count')).toBe('1') - expect(requests[0].data.get('session.id')).toBe('new-session-id') + expect(requests[0].data.get('records_count')).toBe('3') done() }) + }) + it('restarts sending segments when the session is renewed', (done) => { sessionId = undefined const { lifeCycle } = setupBuilder.build() @@ -110,6 +125,13 @@ describe('startRecording', () => { document.body.dispatchEvent(createNewEvent('click')) flushSegment(lifeCycle) + + waitRequests((requests) => { + expect(requests.length).toBe(1) + expect(requests[0].data.get('records_count')).toBe('1') + expect(requests[0].data.get('session.id')).toBe('new-session-id') + done() + }) }) }) @@ -124,16 +146,3 @@ function formDataAsObject(data: FormData) { }) return result } - -function waitRequests(callback: (requests: Array<{ data: FormData; size: number }>) => void) { - const requests: Array<{ data: FormData; size: number }> = [] - let isWaiting = false - spyOn(HttpRequest.prototype, 'send').and.callFake((data: FormData, size) => { - requests.push({ data, size }) - if (!isWaiting) { - isWaiting = true - // Delay the callback, so it is called only after the last request being sent - setTimeout(() => callback(requests), 300) - } - }) -} From 3425b84a9103fc63e9a654c05abd3b83d8af6c01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 11:54:09 +0100 Subject: [PATCH 37/43] =?UTF-8?q?=F0=9F=91=8C=20rename=20'doGetSegmentCont?= =?UTF-8?q?ext'=20to=20'computeSegmentContext'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/domain/segmentCollection.spec.ts | 12 ++++++------ .../rum-recorder/src/domain/segmentCollection.ts | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index f111041ba1..5aa072a47e 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -2,7 +2,7 @@ import { createNewEvent, DOM_EVENT, restorePageVisibility, setPageVisibility } f import { LifeCycle, LifeCycleEventType, ParentContexts, RumSession, ViewContext } from '@datadog/browser-rum-core' import { Record, RecordType, SegmentContext, SegmentMeta } from '../types' import { Segment } from './segment' -import { doGetSegmentContext, doStartSegmentCollection, MAX_SEGMENT_DURATION } from './segmentCollection' +import { computeSegmentContext, doStartSegmentCollection, MAX_SEGMENT_DURATION } from './segmentCollection' import { MockWorker } from '../../test/utils' import { SEND_BEACON_BYTE_LENGTH_LIMIT } from '../transport/send' @@ -143,7 +143,7 @@ describe('startSegmentCollection', () => { }) }) -describe('getSegmentContext', () => { +describe('computeSegmentContext', () => { const DEFAULT_VIEW_CONTEXT: ViewContext = { session: { id: '456' }, view: { id: '123', url: 'http://foo.com', referrer: 'http://bar.com' }, @@ -156,7 +156,7 @@ describe('getSegmentContext', () => { } it('returns a segment context', () => { - expect(doGetSegmentContext('appid', DEFAULT_SESSION, mockParentContexts(DEFAULT_VIEW_CONTEXT))).toEqual({ + expect(computeSegmentContext('appid', DEFAULT_SESSION, mockParentContexts(DEFAULT_VIEW_CONTEXT))).toEqual({ application: { id: 'appid' }, session: { id: '456' }, view: { id: '123' }, @@ -164,12 +164,12 @@ describe('getSegmentContext', () => { }) it('returns undefined if there is no current view', () => { - expect(doGetSegmentContext('appid', DEFAULT_SESSION, mockParentContexts(undefined))).toBeUndefined() + expect(computeSegmentContext('appid', DEFAULT_SESSION, mockParentContexts(undefined))).toBeUndefined() }) it('returns undefined if there is no session id', () => { expect( - doGetSegmentContext( + computeSegmentContext( 'appid', DEFAULT_SESSION, mockParentContexts({ @@ -182,7 +182,7 @@ describe('getSegmentContext', () => { it('returns undefined if the session is not tracked', () => { expect( - doGetSegmentContext( + computeSegmentContext( 'appid', { ...DEFAULT_SESSION, diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index 6a554b3e71..b33dad9427 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -139,7 +139,7 @@ export function doStartSegmentCollection( } } -export function doGetSegmentContext(applicationId: string, session: RumSession, parentContexts: ParentContexts) { +export function computeSegmentContext(applicationId: string, session: RumSession, parentContexts: ParentContexts) { if (!session.isTracked()) { return undefined } From ae218f763e0b66feed8969974a7c61fc673416df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 11:56:49 +0100 Subject: [PATCH 38/43] =?UTF-8?q?=E2=9C=85=F0=9F=91=8C=20reword=20startRec?= =?UTF-8?q?ording=20first=20test=20case?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/boot/recorder.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index e997054f14..6640d8091d 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -60,7 +60,7 @@ describe('startRecording', () => { setupBuilder.cleanup() }) - it('starts recording', (done) => { + it('sends recorded segments with valid context', (done) => { const { lifeCycle } = setupBuilder.build() flushSegment(lifeCycle) From a0b86c3d26934628d9239b61b17c0d3edac8e1c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 11:59:52 +0100 Subject: [PATCH 39/43] =?UTF-8?q?=F0=9F=91=8C=20add=20pako=20to=20thirdpar?= =?UTF-8?q?ty=20licenses?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- LICENSE-3rdparty.csv | 1 + 1 file changed, 1 insertion(+) diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 71b54c7bbd..41626ecab9 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -1,5 +1,6 @@ Component,Origin,License,Copyright require,tslib,Apache-2.0,Copyright Microsoft Corporation +file,pako,MIT,(C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin file,rrweb,MIT,Copyright (c) 2018 Contributors (https://github.com/rrweb-io/rrweb/graphs/contributors) and SmartX Inc. file,tracekit,MIT,Copyright 2013 Onur Can Cakmak and all TraceKit contributors prod,rrweb-snapshot,MIT,Copyright (c) 2018 Contributors (https://github.com/rrweb-io/rrweb-snapshot/graphs/contributors) and SmartX Inc. From dd557de4b5fc522ab8d7e5d281833746ad043dc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 12:01:09 +0100 Subject: [PATCH 40/43] =?UTF-8?q?=F0=9F=91=8C=20remove=20comments?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/domain/segmentCollection.ts | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index b33dad9427..c968894986 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -43,7 +43,7 @@ export function startSegmentCollection( const worker = createDeflateWorker() return doStartSegmentCollection( lifeCycle, - () => doGetSegmentContext(applicationId, session, parentContexts), + () => computeSegmentContext(applicationId, session, parentContexts), send, worker ) @@ -72,22 +72,18 @@ export function doStartSegmentCollection( } ) - // Flush when the RUM view changes const { unsubscribe: unsubscribeViewCreated } = lifeCycle.subscribe(LifeCycleEventType.VIEW_CREATED, () => { flushSegment('view_change') }) - // Flush when the session is renewed const { unsubscribe: unsubscribeSessionRenewed } = lifeCycle.subscribe(LifeCycleEventType.SESSION_RENEWED, () => { flushSegment('session_renewed') }) - // Flush when leaving the page const { unsubscribe: unsubscribeBeforeUnload } = lifeCycle.subscribe(LifeCycleEventType.BEFORE_UNLOAD, () => { flushSegment('before_unload') }) - // Flush when visibility changes const { stop: unsubscribeVisibilityChange } = addEventListener( emitter, DOM_EVENT.VISIBILITY_CHANGE, @@ -118,7 +114,6 @@ export function doStartSegmentCollection( } currentSegment = new Segment(writer, context, nextSegmentCreationReason, record) - // Replace the newly created segment after MAX_SEGMENT_DURATION currentSegmentExpirationTimeoutId = window.setTimeout( monitor(() => { flushSegment('max_duration') From daf1360f150e7c973e13ed34611acbc4a21e53d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 12:08:11 +0100 Subject: [PATCH 41/43] =?UTF-8?q?=F0=9F=91=8C=20rename=20visibility=5Fchan?= =?UTF-8?q?ge=20to=20visibility=5Freason?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/rum-recorder/src/domain/segmentCollection.spec.ts | 4 ++-- packages/rum-recorder/src/domain/segmentCollection.ts | 2 +- packages/rum-recorder/src/types.ts | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/rum-recorder/src/domain/segmentCollection.spec.ts b/packages/rum-recorder/src/domain/segmentCollection.spec.ts index 5aa072a47e..b2e99992d0 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.spec.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.spec.ts @@ -98,7 +98,7 @@ describe('startSegmentCollection', () => { setPageVisibility('hidden') const { eventEmitter, sendCurrentSegment } = startSegmentCollection(CONTEXT) eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) - expect(sendCurrentSegment().creation_reason).toBe('visibility_change') + expect(sendCurrentSegment().creation_reason).toBe('visibility_hidden') }) it('does not flush segment when the page become visible', () => { @@ -106,7 +106,7 @@ describe('startSegmentCollection', () => { const { eventEmitter, segmentFlushSpy, sendCurrentSegment } = startSegmentCollection(CONTEXT) eventEmitter.dispatchEvent(createNewEvent(DOM_EVENT.VISIBILITY_CHANGE)) expect(segmentFlushSpy).not.toHaveBeenCalled() - expect(sendCurrentSegment().creation_reason).not.toBe('visibility_change') + expect(sendCurrentSegment().creation_reason).not.toBe('visibility_hidden') }) it('flushes segment when the current segment deflate size reaches SEND_BEACON_BYTE_LENGTH_LIMIT', () => { diff --git a/packages/rum-recorder/src/domain/segmentCollection.ts b/packages/rum-recorder/src/domain/segmentCollection.ts index c968894986..5418f49e9b 100644 --- a/packages/rum-recorder/src/domain/segmentCollection.ts +++ b/packages/rum-recorder/src/domain/segmentCollection.ts @@ -89,7 +89,7 @@ export function doStartSegmentCollection( DOM_EVENT.VISIBILITY_CHANGE, () => { if (document.visibilityState === 'hidden') { - flushSegment('visibility_change') + flushSegment('visibility_hidden') } }, { capture: true } diff --git a/packages/rum-recorder/src/types.ts b/packages/rum-recorder/src/types.ts index f06bb4c1b7..d5cd3d368d 100644 --- a/packages/rum-recorder/src/types.ts +++ b/packages/rum-recorder/src/types.ts @@ -42,4 +42,4 @@ export type CreationReason = | 'view_change' | 'session_renewed' | 'before_unload' - | 'visibility_change' + | 'visibility_hidden' From d67bb184bafc70dde768ea5ef41dadae521d9e0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Tue, 19 Jan 2021 16:05:07 +0100 Subject: [PATCH 42/43] =?UTF-8?q?=E2=9C=85=20try=20to=20make=20recorder=20?= =?UTF-8?q?tests=20less=20flacky?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/boot/recorder.spec.ts | 33 +++++++++++-------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index 6640d8091d..ee5a5158cd 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -37,14 +37,9 @@ describe('startRecording', () => { const requestSendSpy = spyOn(HttpRequest.prototype, 'send') waitRequests = (callback) => { - if (requestSendSpy.calls.first()) { - waitForLastRequest() - } else { - requestSendSpy.and.callFake(waitForLastRequest) - } - let isWaiting = false - function waitForLastRequest() { + + requestSendSpy.and.callFake(() => { if (isWaiting) { return } @@ -52,7 +47,7 @@ describe('startRecording', () => { setTimeout(() => { callback(requestSendSpy.calls.allArgs().map(([data, size]) => ({ size, data: data as FormData }))) }, 300) - } + }) } }) @@ -83,15 +78,19 @@ describe('startRecording', () => { it('flushes the segment when its compressed data is getting too large', (done) => { setupBuilder.build() - const clickCount = 10_000 - const click = createNewEvent('click') - for (let i = 0; i < clickCount; i += 1) { - document.body.dispatchEvent(click) + const inputCount = 150 + const textField = document.createElement('input') + const inputEvent = createNewEvent('input', { target: textField }) + for (let i = 0; i < inputCount; i += 1) { + // Create a random value harder to deflate, so we don't have to send too many events to reach + // the limit. + textField.value = createRandomString(1000) + document.body.dispatchEvent(inputEvent) } waitRequests((requests) => { expect(requests.length).toBe(1) - expect(requests[0].data.get('records_count')).toBe(String(clickCount + 2)) + expect(requests[0].data.get('records_count')).toBe(String(inputCount + 2)) done() }) }) @@ -146,3 +145,11 @@ function formDataAsObject(data: FormData) { }) return result } + +function createRandomString(minLength: number) { + let result = '' + while (result.length < minLength) { + result += Math.random().toString(36) + } + return result +} From 0dbceac30bdad343aced91277a9792280ad55877 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Zugmeyer?= Date: Wed, 20 Jan 2021 11:44:19 +0100 Subject: [PATCH 43/43] =?UTF-8?q?=F0=9F=91=8C=E2=9C=85=20split=20waitReque?= =?UTF-8?q?sts=20and=20expectNoExtraRequest=20to=20be=20more=20explicit?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../rum-recorder/src/boot/recorder.spec.ts | 45 ++++++++++--------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/packages/rum-recorder/src/boot/recorder.spec.ts b/packages/rum-recorder/src/boot/recorder.spec.ts index ee5a5158cd..22a68dbc24 100644 --- a/packages/rum-recorder/src/boot/recorder.spec.ts +++ b/packages/rum-recorder/src/boot/recorder.spec.ts @@ -8,7 +8,11 @@ import { startRecording } from './recorder' describe('startRecording', () => { let setupBuilder: TestSetupBuilder let sessionId: string | undefined - let waitRequests: (callback: (requests: ReadonlyArray<{ data: FormData; size: number }>) => void) => void + let waitRequests: ( + expectedRequestCount: number, + callback: (requests: ReadonlyArray<{ data: FormData; size: number }>) => void + ) => void + let expectNoExtraRequest: (callback: () => void) => void beforeEach(() => { if (isIE()) { @@ -36,18 +40,20 @@ describe('startRecording', () => { const requestSendSpy = spyOn(HttpRequest.prototype, 'send') - waitRequests = (callback) => { - let isWaiting = false + waitRequests = (expectedRequestCount, callback) => { + const requests: Array<{ data: FormData; size: number }> = [] + requestSendSpy.and.callFake((data, size) => { + if (requests.push({ size, data: data as FormData }) === expectedRequestCount) { + callback(requests) + } + }) + } + expectNoExtraRequest = (done) => { requestSendSpy.and.callFake(() => { - if (isWaiting) { - return - } - isWaiting = true - setTimeout(() => { - callback(requestSendSpy.calls.allArgs().map(([data, size]) => ({ size, data: data as FormData }))) - }, 300) + fail('Unexpected request received') }) + setTimeout(done, 300) } }) @@ -59,7 +65,7 @@ describe('startRecording', () => { const { lifeCycle } = setupBuilder.build() flushSegment(lifeCycle) - waitRequests((requests) => { + waitRequests(1, (requests) => { expect(requests).toEqual([{ data: jasmine.any(FormData), size: jasmine.any(Number) }]) expect(formDataAsObject(requests[0].data)).toEqual({ 'application.id': 'appId', @@ -72,7 +78,7 @@ describe('startRecording', () => { start: jasmine.stringMatching(/^\d{13}$/), 'view.id': 'view-id', }) - done() + expectNoExtraRequest(done) }) }) @@ -88,10 +94,9 @@ describe('startRecording', () => { document.body.dispatchEvent(inputEvent) } - waitRequests((requests) => { - expect(requests.length).toBe(1) + waitRequests(1, (requests) => { expect(requests[0].data.get('records_count')).toBe(String(inputCount + 2)) - done() + expectNoExtraRequest(done) }) }) @@ -106,10 +111,9 @@ describe('startRecording', () => { flushSegment(lifeCycle) - waitRequests((requests) => { - expect(requests.length).toBe(1) + waitRequests(1, (requests) => { expect(requests[0].data.get('records_count')).toBe('3') - done() + expectNoExtraRequest(done) }) }) @@ -125,11 +129,10 @@ describe('startRecording', () => { flushSegment(lifeCycle) - waitRequests((requests) => { - expect(requests.length).toBe(1) + waitRequests(1, (requests) => { expect(requests[0].data.get('records_count')).toBe('1') expect(requests[0].data.get('session.id')).toBe('new-session-id') - done() + expectNoExtraRequest(done) }) }) })