Skip to content

Commit

Permalink
Merge branch 'master' of github.com:elastic/kibana into unskip-datapr…
Browse files Browse the repository at this point in the history
…ovider
  • Loading branch information
angorayc committed Jan 8, 2021
2 parents c240357 + 91d73cf commit 1ca8af9
Show file tree
Hide file tree
Showing 352 changed files with 4,910 additions and 2,494 deletions.
8 changes: 6 additions & 2 deletions docs/developer/plugin-list.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,10 @@ Content is fetched from the remote (https://feeds.elastic.co and https://feeds-s
|Create choropleth maps. Display the results of a term-aggregation as e.g. countries, zip-codes, states.
|{kib-repo}blob/{branch}/src/plugins/runtime_fields/README.mdx[runtimeFields]
|The runtime fields plugin provides types and constants for OSS and xpack runtime field related code.
|{kib-repo}blob/{branch}/src/plugins/saved_objects/README.md[savedObjects]
|The savedObjects plugin exposes utilities to manipulate saved objects on the client side.
Expand Down Expand Up @@ -483,8 +487,8 @@ Elastic.
|Welcome to the Kibana rollup plugin! This plugin provides Kibana support for Elasticsearch's rollup feature. Please refer to the Elasticsearch documentation to understand rollup indices and how to create rollup jobs.
|{kib-repo}blob/{branch}/x-pack/plugins/runtime_fields/README.md[runtimeFields]
|Welcome to the home of the runtime field editor and everything related to runtime fields!
|{kib-repo}blob/{branch}/x-pack/plugins/runtime_field_editor/README.md[runtimeFieldEditor]
|Welcome to the home of the runtime field editor!
|{kib-repo}blob/{branch}/x-pack/plugins/saved_objects_tagging/README.md[savedObjectsTagging]
Expand Down
14 changes: 9 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,16 @@
"@loaders.gl/json": "^2.3.1",
"@slack/webhook": "^5.0.0",
"@storybook/addons": "^6.0.16",
"@turf/along": "6.0.1",
"@turf/area": "6.0.1",
"@turf/bbox": "6.0.1",
"@turf/bbox-polygon": "6.0.1",
"@turf/boolean-contains": "6.0.1",
"@turf/center-of-mass": "6.0.1",
"@turf/circle": "6.0.1",
"@turf/distance": "6.0.1",
"@turf/helpers": "6.0.1",
"@turf/length": "^6.0.2",
"JSONStream": "1.3.5",
"abort-controller": "^3.0.0",
"abortcontroller-polyfill": "^1.4.0",
Expand Down Expand Up @@ -399,11 +408,6 @@
"@testing-library/react": "^11.0.4",
"@testing-library/react-hooks": "^3.4.1",
"@testing-library/user-event": "^12.1.6",
"@turf/bbox": "6.0.1",
"@turf/bbox-polygon": "6.0.1",
"@turf/boolean-contains": "6.0.1",
"@turf/distance": "6.0.1",
"@turf/helpers": "6.0.1",
"@types/accept": "3.1.1",
"@types/angular": "^1.6.56",
"@types/angular-mocks": "^1.7.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@
import Stream, { Readable, Writable } from 'stream';
import { createGunzip } from 'zlib';

import expect from '@kbn/expect';
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';

import { createFormatArchiveStreams } from '../format';
import { createFormatArchiveStreams } from './format';

const INPUTS = [1, 2, { foo: 'bar' }, [1, 2]];
const INPUT_JSON = INPUTS.map((i) => JSON.stringify(i, null, 2)).join('\n\n');
Expand All @@ -32,9 +31,9 @@ describe('esArchiver createFormatArchiveStreams', () => {
describe('{ gzip: false }', () => {
it('returns an array of streams', () => {
const streams = createFormatArchiveStreams({ gzip: false });
expect(streams).to.be.an('array');
expect(streams.length).to.be.greaterThan(0);
streams.forEach((s) => expect(s).to.be.a(Stream));
expect(streams).toBeInstanceOf(Array);
expect(streams.length).toBeGreaterThan(0);
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});

it('streams consume js values and produces buffers', async () => {
Expand All @@ -44,8 +43,8 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);

expect(output.length).to.be.greaterThan(0);
output.forEach((b) => expect(b).to.be.a(Buffer));
expect(output.length).toBeGreaterThan(0);
output.forEach((b) => expect(b).toBeInstanceOf(Buffer));
});

it('product is pretty-printed JSON separated by two newlines', async () => {
Expand All @@ -55,16 +54,16 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream(''),
] as [Readable, ...Writable[]]);

expect(json).to.be(INPUT_JSON);
expect(json).toBe(INPUT_JSON);
});
});

describe('{ gzip: true }', () => {
it('returns an array of streams', () => {
const streams = createFormatArchiveStreams({ gzip: true });
expect(streams).to.be.an('array');
expect(streams.length).to.be.greaterThan(0);
streams.forEach((s) => expect(s).to.be.a(Stream));
expect(streams).toBeInstanceOf(Array);
expect(streams.length).toBeGreaterThan(0);
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});

it('streams consume js values and produces buffers', async () => {
Expand All @@ -74,8 +73,8 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);

expect(output.length).to.be.greaterThan(0);
output.forEach((b) => expect(b).to.be.a(Buffer));
expect(output.length).toBeGreaterThan(0);
output.forEach((b) => expect(b).toBeInstanceOf(Buffer));
});

it('output can be gunzipped', async () => {
Expand All @@ -85,7 +84,7 @@ describe('esArchiver createFormatArchiveStreams', () => {
createGunzip(),
createConcatStream(''),
] as [Readable, ...Writable[]]);
expect(output).to.be(INPUT_JSON);
expect(output).toBe(INPUT_JSON);
});
});

Expand All @@ -97,7 +96,7 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream(''),
] as [Readable, ...Writable[]]);

expect(json).to.be(INPUT_JSON);
expect(json).toBe(INPUT_JSON);
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,17 @@
import Stream, { PassThrough, Readable, Writable, Transform } from 'stream';
import { createGzip } from 'zlib';

import expect from '@kbn/expect';
import { createConcatStream, createListStream, createPromiseFromStreams } from '@kbn/utils';

import { createParseArchiveStreams } from '../parse';
import { createParseArchiveStreams } from './parse';

describe('esArchiver createParseArchiveStreams', () => {
describe('{ gzip: false }', () => {
it('returns an array of streams', () => {
const streams = createParseArchiveStreams({ gzip: false });
expect(streams).to.be.an('array');
expect(streams.length).to.be.greaterThan(0);
streams.forEach((s) => expect(s).to.be.a(Stream));
expect(streams).toBeInstanceOf(Array);
expect(streams.length).toBeGreaterThan(0);
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});

describe('streams', () => {
Expand All @@ -46,7 +45,7 @@ describe('esArchiver createParseArchiveStreams', () => {
...createParseArchiveStreams({ gzip: false }),
]);

expect(output).to.eql({ a: 1 });
expect(output).toEqual({ a: 1 });
});
it('consume buffers of valid JSON separated by two newlines', async () => {
const output = await createPromiseFromStreams([
Expand All @@ -63,7 +62,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);

expect(output).to.eql([{ a: 1 }, 1]);
expect(output).toEqual([{ a: 1 }, 1]);
});

it('provides each JSON object as soon as it is parsed', async () => {
Expand All @@ -87,10 +86,10 @@ describe('esArchiver createParseArchiveStreams', () => {
] as [Readable, ...Writable[]]);

input.write(Buffer.from('{"a": 1}\n\n{"a":'));
expect(await receivedPromise).to.eql({ a: 1 });
expect(await receivedPromise).toEqual({ a: 1 });
input.write(Buffer.from('2}'));
input.end();
expect(await finalPromise).to.eql([{ a: 1 }, { a: 2 }]);
expect(await finalPromise).toEqual([{ a: 1 }, { a: 2 }]);
});
});

Expand All @@ -108,7 +107,7 @@ describe('esArchiver createParseArchiveStreams', () => {
] as [Readable, ...Writable[]]);
throw new Error('should have failed');
} catch (err) {
expect(err.message).to.contain('Unexpected number');
expect(err.message).toEqual(expect.stringContaining('Unexpected number'));
}
});
});
Expand All @@ -117,9 +116,9 @@ describe('esArchiver createParseArchiveStreams', () => {
describe('{ gzip: true }', () => {
it('returns an array of streams', () => {
const streams = createParseArchiveStreams({ gzip: true });
expect(streams).to.be.an('array');
expect(streams.length).to.be.greaterThan(0);
streams.forEach((s) => expect(s).to.be.a(Stream));
expect(streams).toBeInstanceOf(Array);
expect(streams.length).toBeGreaterThan(0);
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});

describe('streams', () => {
Expand All @@ -135,7 +134,7 @@ describe('esArchiver createParseArchiveStreams', () => {
...createParseArchiveStreams({ gzip: true }),
]);

expect(output).to.eql({ a: 1 });
expect(output).toEqual({ a: 1 });
});

it('parses valid gzipped JSON strings separated by two newlines', async () => {
Expand All @@ -146,7 +145,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);

expect(output).to.eql([{ a: 1 }, { a: 2 }]);
expect(output).toEqual([{ a: 1 }, { a: 2 }]);
});
});

Expand All @@ -158,7 +157,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);

expect(output).to.eql([]);
expect(output).toEqual([]);
});

describe('stream errors', () => {
Expand All @@ -171,7 +170,7 @@ describe('esArchiver createParseArchiveStreams', () => {
] as [Readable, ...Writable[]]);
throw new Error('should have failed');
} catch (err) {
expect(err.message).to.contain('incorrect header check');
expect(err.message).toEqual(expect.stringContaining('incorrect header check'));
}
});
});
Expand All @@ -183,7 +182,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createListStream([Buffer.from('{"a": 1}')]),
...createParseArchiveStreams(),
]);
expect(output).to.eql({ a: 1 });
expect(output).toEqual({ a: 1 });
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,21 @@
*/

import sinon from 'sinon';
import expect from '@kbn/expect';
import { delay } from 'bluebird';
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';

import { createGenerateDocRecordsStream } from '../generate_doc_records_stream';
import { Progress } from '../../progress';
import { createStubStats, createStubClient } from './stubs';
import { createGenerateDocRecordsStream } from './generate_doc_records_stream';
import { Progress } from '../progress';
import { createStubStats, createStubClient } from './__mocks__/stubs';

describe('esArchiver: createGenerateDocRecordsStream()', () => {
it('scolls 1000 documents at a time', async () => {
const stats = createStubStats();
const client = createStubClient([
(name, params) => {
expect(name).to.be('search');
expect(params).to.have.property('index', 'logstash-*');
expect(params).to.have.property('size', 1000);
expect(name).toBe('search');
expect(params).toHaveProperty('index', 'logstash-*');
expect(params).toHaveProperty('size', 1000);
return {
hits: {
total: 0,
Expand All @@ -49,18 +48,18 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
createGenerateDocRecordsStream({ client, stats, progress }),
]);

expect(progress.getTotal()).to.be(0);
expect(progress.getComplete()).to.be(0);
expect(progress.getTotal()).toBe(0);
expect(progress.getComplete()).toBe(0);
});

it('uses a 1 minute scroll timeout', async () => {
const stats = createStubStats();
const client = createStubClient([
(name, params) => {
expect(name).to.be('search');
expect(params).to.have.property('index', 'logstash-*');
expect(params).to.have.property('scroll', '1m');
expect(params).to.have.property('rest_total_hits_as_int', true);
expect(name).toBe('search');
expect(params).toHaveProperty('index', 'logstash-*');
expect(params).toHaveProperty('scroll', '1m');
expect(params).toHaveProperty('rest_total_hits_as_int', true);
return {
hits: {
total: 0,
Expand All @@ -76,35 +75,35 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
createGenerateDocRecordsStream({ client, stats, progress }),
]);

expect(progress.getTotal()).to.be(0);
expect(progress.getComplete()).to.be(0);
expect(progress.getTotal()).toBe(0);
expect(progress.getComplete()).toBe(0);
});

it('consumes index names and scrolls completely before continuing', async () => {
const stats = createStubStats();
let checkpoint = Date.now();
const client = createStubClient([
async (name, params) => {
expect(name).to.be('search');
expect(params).to.have.property('index', 'index1');
expect(name).toBe('search');
expect(params).toHaveProperty('index', 'index1');
await delay(200);
return {
_scroll_id: 'index1ScrollId',
hits: { total: 2, hits: [{ _id: 1, _index: '.kibana_foo' }] },
};
},
async (name, params) => {
expect(name).to.be('scroll');
expect(params).to.have.property('scrollId', 'index1ScrollId');
expect(Date.now() - checkpoint).to.not.be.lessThan(200);
expect(name).toBe('scroll');
expect(params).toHaveProperty('scrollId', 'index1ScrollId');
expect(Date.now() - checkpoint).not.toBeLessThan(200);
checkpoint = Date.now();
await delay(200);
return { hits: { total: 2, hits: [{ _id: 2, _index: 'foo' }] } };
},
async (name, params) => {
expect(name).to.be('search');
expect(params).to.have.property('index', 'index2');
expect(Date.now() - checkpoint).to.not.be.lessThan(200);
expect(name).toBe('search');
expect(params).toHaveProperty('index', 'index2');
expect(Date.now() - checkpoint).not.toBeLessThan(200);
checkpoint = Date.now();
await delay(200);
return { hits: { total: 0, hits: [] } };
Expand All @@ -118,7 +117,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
createConcatStream([]),
]);

expect(docRecords).to.eql([
expect(docRecords).toEqual([
{
type: 'doc',
value: {
Expand All @@ -139,7 +138,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
},
]);
sinon.assert.calledTwice(stats.archivedDoc as any);
expect(progress.getTotal()).to.be(2);
expect(progress.getComplete()).to.be(2);
expect(progress.getTotal()).toBe(2);
expect(progress.getComplete()).toBe(2);
});
});
Loading

0 comments on commit 1ca8af9

Please sign in to comment.