-
Notifications
You must be signed in to change notification settings - Fork 8.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[ML] Add support for gzip compressed streams.
- Loading branch information
Showing
7 changed files
with
258 additions
and
51 deletions.
There are no files selected for viewing
42 changes: 42 additions & 0 deletions
42
x-pack/plugins/aiops/server/lib/accept_compression.test.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0; you may not use this file except in compliance with the Elastic License | ||
* 2.0. | ||
*/ | ||
|
||
import { acceptCompression } from './accept_compression'; | ||
|
||
describe('acceptCompression', () => { | ||
it('should return false for empty headers', () => { | ||
expect(acceptCompression({})).toBe(false); | ||
}); | ||
it('should return false for other header containing gzip as string', () => { | ||
expect(acceptCompression({ 'other-header': 'gzip, other' })).toBe(false); | ||
}); | ||
it('should return false for other header containing gzip as array', () => { | ||
expect(acceptCompression({ 'other-header': ['gzip', 'other'] })).toBe(false); | ||
}); | ||
it('should return true for upper-case header containing gzip as string', () => { | ||
expect(acceptCompression({ 'Accept-Encoding': 'gzip, other' })).toBe(true); | ||
}); | ||
it('should return true for lower-case header containing gzip as string', () => { | ||
expect(acceptCompression({ 'accept-encoding': 'gzip, other' })).toBe(true); | ||
}); | ||
it('should return true for upper-case header containing gzip as array', () => { | ||
expect(acceptCompression({ 'Accept-Encoding': ['gzip', 'other'] })).toBe(true); | ||
}); | ||
it('should return true for lower-case header containing gzip as array', () => { | ||
expect(acceptCompression({ 'accept-encoding': ['gzip', 'other'] })).toBe(true); | ||
}); | ||
it('should return true for mixed headers containing gzip as string', () => { | ||
expect( | ||
acceptCompression({ 'accept-encoding': 'gzip, other', 'other-header': 'other-value' }) | ||
).toBe(true); | ||
}); | ||
it('should return true for mixed headers containing gzip as array', () => { | ||
expect( | ||
acceptCompression({ 'accept-encoding': ['gzip', 'other'], 'other-header': 'other-value' }) | ||
).toBe(true); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0; you may not use this file except in compliance with the Elastic License | ||
* 2.0. | ||
*/ | ||
|
||
import type { Headers } from '@kbn/core/server'; | ||
|
||
export function acceptCompression(headers: Headers) { | ||
let compressed = false; | ||
|
||
Object.keys(headers).forEach((key) => { | ||
if (key.toLocaleLowerCase() === 'accept-encoding') { | ||
const acceptEncoding = headers[key]; | ||
|
||
function containsGzip(s: string) { | ||
return s | ||
.split(',') | ||
.map((d) => d.trim()) | ||
.includes('gzip'); | ||
} | ||
|
||
if (typeof acceptEncoding === 'string') { | ||
compressed = containsGzip(acceptEncoding); | ||
} else if (Array.isArray(acceptEncoding)) { | ||
for (const ae of acceptEncoding) { | ||
if (containsGzip(ae)) { | ||
compressed = true; | ||
break; | ||
} | ||
} | ||
} | ||
} | ||
}); | ||
|
||
return compressed; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0; you may not use this file except in compliance with the Elastic License | ||
* 2.0. | ||
*/ | ||
|
||
import zlib from 'zlib'; | ||
|
||
import { loggerMock, MockedLogger } from '@kbn/logging-mocks'; | ||
|
||
import { API_ENDPOINT } from '../../common/api'; | ||
import type { ApiEndpointActions } from '../../common/api'; | ||
|
||
import { streamFactory } from './stream_factory'; | ||
|
||
type Action = ApiEndpointActions['/internal/aiops/explain_log_rate_spikes']; | ||
|
||
const mockItem1: Action = { | ||
type: 'add_fields', | ||
payload: ['clientip'], | ||
}; | ||
const mockItem2: Action = { | ||
type: 'add_fields', | ||
payload: ['referer'], | ||
}; | ||
|
||
describe('streamFactory', () => { | ||
let mockLogger: MockedLogger; | ||
|
||
beforeEach(() => { | ||
mockLogger = loggerMock.create(); | ||
}); | ||
|
||
it('should encode and receive an uncompressed stream', async () => { | ||
const { DELIMITER, end, push, responseWithHeaders, stream } = streamFactory< | ||
typeof API_ENDPOINT.EXPLAIN_LOG_RATE_SPIKES | ||
>(mockLogger, {}); | ||
|
||
push(mockItem1); | ||
push(mockItem2); | ||
end(); | ||
|
||
let streamResult = ''; | ||
for await (const chunk of stream) { | ||
streamResult += chunk.toString('utf8'); | ||
} | ||
|
||
const streamItems = streamResult.split(DELIMITER); | ||
const lastItem = streamItems.pop(); | ||
|
||
const parsedItems = streamItems.map((d) => JSON.parse(d)); | ||
|
||
expect(responseWithHeaders.headers).toBe(undefined); | ||
expect(parsedItems).toHaveLength(2); | ||
expect(parsedItems[0]).toStrictEqual(mockItem1); | ||
expect(parsedItems[1]).toStrictEqual(mockItem2); | ||
expect(lastItem).toBe(''); | ||
}); | ||
|
||
// Because zlib.gunzip's API expects a callback, we need to use `done` here | ||
// to indicate once all assertions are run. However, it's not allowed to use both | ||
// `async` and `done` for the test callback. That's why we're using an "async IIFE" | ||
// pattern inside the tests callback to still be able to do async/await for the | ||
// `for await()` part. Note that the unzipping here is done just to be able to | ||
// decode the stream for the test and assert it. When used in actual code, | ||
// the browser on the client side will automatically take care of unzipping | ||
// without the need for additional custom code. | ||
it('should encode and receive a compressed stream', (done) => { | ||
(async () => { | ||
const { DELIMITER, end, push, responseWithHeaders, stream } = streamFactory< | ||
typeof API_ENDPOINT.EXPLAIN_LOG_RATE_SPIKES | ||
>(mockLogger, { 'accept-encoding': 'gzip' }); | ||
|
||
push(mockItem1); | ||
push(mockItem2); | ||
end(); | ||
|
||
const chunks = []; | ||
for await (const chunk of stream) { | ||
chunks.push(chunk); | ||
} | ||
|
||
const buffer = Buffer.concat(chunks); | ||
|
||
zlib.gunzip(buffer, function (err, decoded) { | ||
expect(err).toBe(null); | ||
|
||
const streamResult = decoded.toString('utf8'); | ||
|
||
const streamItems = streamResult.split(DELIMITER); | ||
const lastItem = streamItems.pop(); | ||
|
||
const parsedItems = streamItems.map((d) => JSON.parse(d)); | ||
|
||
expect(responseWithHeaders.headers).toStrictEqual({ 'content-encoding': 'gzip' }); | ||
expect(parsedItems).toHaveLength(2); | ||
expect(parsedItems[0]).toStrictEqual(mockItem1); | ||
expect(parsedItems[1]).toStrictEqual(mockItem2); | ||
expect(lastItem).toBe(''); | ||
|
||
done(); | ||
}); | ||
})(); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0; you may not use this file except in compliance with the Elastic License | ||
* 2.0. | ||
*/ | ||
|
||
import { Stream } from 'stream'; | ||
import zlib from 'zlib'; | ||
|
||
import type { Headers, Logger } from '@kbn/core/server'; | ||
|
||
import { ApiEndpoint, ApiEndpointActions } from '../../common/api'; | ||
|
||
import { acceptCompression } from './accept_compression'; | ||
|
||
// We need this otherwise Kibana server will crash with a 'ERR_METHOD_NOT_IMPLEMENTED' error. | ||
class ResponseStream extends Stream.PassThrough { | ||
flush() {} | ||
_read() {} | ||
} | ||
|
||
const DELIMITER = '\n'; | ||
|
||
export function streamFactory<T extends ApiEndpoint>(logger: Logger, headers: Headers) { | ||
const isCompressed = acceptCompression(headers); | ||
|
||
const stream = isCompressed ? zlib.createGzip() : new ResponseStream(); | ||
|
||
function push(d: ApiEndpointActions[T]) { | ||
try { | ||
const line = JSON.stringify(d); | ||
stream.write(`${line}${DELIMITER}`); | ||
|
||
// Calling .flush() on a compression stream will | ||
// make zlib return as much output as currently possible. | ||
if (isCompressed) { | ||
stream.flush(); | ||
} | ||
} catch (error) { | ||
logger.error('Could not serialize or stream a message.'); | ||
logger.error(error); | ||
} | ||
} | ||
|
||
function end() { | ||
stream.end(); | ||
} | ||
|
||
const responseWithHeaders = { | ||
body: stream, | ||
...(isCompressed | ||
? { | ||
headers: { | ||
'content-encoding': 'gzip', | ||
}, | ||
} | ||
: {}), | ||
}; | ||
|
||
return { DELIMITER, end, push, responseWithHeaders, stream }; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.