Skip to content

Commit

Permalink
Merge branch 'main' into tagcloud_convertotlens
Browse files Browse the repository at this point in the history
  • Loading branch information
kibanamachine authored Jun 12, 2023
2 parents da9d008 + 70472fd commit 954650e
Show file tree
Hide file tree
Showing 23 changed files with 281 additions and 63 deletions.
3 changes: 3 additions & 0 deletions .buildkite/pipelines/flaky_tests/pipeline.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,9 @@ for (const testSuite of testSuites) {
// by setting chunks vars to value 1, which means all test will run in one job
CLI_NUMBER: 1,
CLI_COUNT: 1,
// The security solution cypress tests don't recognize CLI_NUMBER and CLI_COUNT, they use `BUILDKITE_PARALLEL_JOB_COUNT` and `BUILDKITE_PARALLEL_JOB`, which cannot be overridden here.
// Use `RUN_ALL_TESTS` to make Security Solution Cypress tests run all tests instead of a subset.
RUN_ALL_TESTS: 'true',
},
});
break;
Expand Down
16 changes: 14 additions & 2 deletions docs/api/saved-objects/export.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ experimental[] Retrieve sets of saved objects that you want to import into {kib}
==== Request body

`type`::
(Optional, array|string) The saved object types to include in the export.
(Optional, array|string) The saved object types to include in the export. Use `*` to export all the types.

`objects`::
(Optional, array) A list of objects to export.
Expand Down Expand Up @@ -106,7 +106,7 @@ $ curl -X POST api/saved_objects/_export -H 'kbn-xsrf: true' -H 'Content-Type: a
--------------------------------------------------
// KIBANA

Export a specific saved object and it's related objects :
Export a specific saved object and it's related objects:

[source,sh]
--------------------------------------------------
Expand All @@ -122,3 +122,15 @@ $ curl -X POST api/saved_objects/_export -H 'kbn-xsrf: true' -H 'Content-Type: a
}'
--------------------------------------------------
// KIBANA

Export all exportable saved objects and their related objects:

[source,sh]
--------------------------------------------------
$ curl -X POST api/saved_objects/_export -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d '
{
"type": "*",
"includeReferencesDeep": true
}'
--------------------------------------------------
// KIBANA
2 changes: 1 addition & 1 deletion docs/developer/plugin/plugin-tooling.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ WARNING: {kib} distributable is not shipped with `@kbn/optimizer` anymore. You n

You can leverage {kib-repo}blob/{branch}/packages/kbn-plugin-helpers[@kbn/plugin-helpers] to build a distributable archive for your plugin.
The package transpiles the plugin code, adds polyfills, and links necessary js modules in the runtime.
You don't need to install the `plugin-helpers`: the `package.json` is already pre-configured if you created your plugin with `node scripts/generate_plugin` script.
You don't need to install the `plugin-helpers` dependency. If you created the plugin using `node scripts/generate_plugin` script, `package.json` is already pre-configured.
To build your plugin run within your plugin folder:
["source","shell"]
-----------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
* Side Public License, v 1.
*/

export { SavedObjectsImporter, SavedObjectsImportError } from './src/import';
export {
SavedObjectsExporter,
SavedObjectsExportError,
SavedObjectsImporter,
SavedObjectsImportError,
} from './src';
EXPORT_ALL_TYPES_TOKEN,
} from './src/export';
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,8 @@
* Side Public License, v 1.
*/

export { SavedObjectsImporter, SavedObjectsImportError } from './import';
export { SavedObjectsExporter, SavedObjectsExportError } from './export';
/**
* Token used for the `type` option when exporting by type
* to specify that all (importable and exportable) types should be exported.
*/
export const EXPORT_ALL_TYPES_TOKEN = '*';
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@

export { SavedObjectsExporter } from './saved_objects_exporter';
export { SavedObjectsExportError } from './errors';
export { EXPORT_ALL_TYPES_TOKEN } from './constants';
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,27 @@
*/

import { httpServerMock } from '@kbn/core-http-server-mocks';
import type { SavedObject } from '@kbn/core-saved-objects-server';
import type { SavedObject, SavedObjectsType } from '@kbn/core-saved-objects-server';
import { SavedObjectTypeRegistry } from '@kbn/core-saved-objects-base-server-internal';
import { SavedObjectsExporter } from './saved_objects_exporter';
import { savedObjectsClientMock } from '@kbn/core-saved-objects-api-server-mocks';
import { loggerMock, type MockedLogger } from '@kbn/logging-mocks';
import { Readable } from 'stream';
import { createPromiseFromStreams, createConcatStream } from '@kbn/utils';
import { EXPORT_ALL_TYPES_TOKEN } from './constants';

async function readStreamToCompletion(stream: Readable): Promise<Array<SavedObject<any>>> {
return createPromiseFromStreams([stream, createConcatStream([])]);
}

const createType = (parts: Partial<SavedObjectsType>): SavedObjectsType => ({
name: 'type',
namespaceType: 'single',
hidden: false,
mappings: { properties: {} },
...parts,
});

const exportSizeLimit = 10000;
const request = httpServerMock.createKibanaRequest();

Expand All @@ -32,13 +41,17 @@ describe('getSortedObjectsForExport()', () => {
logger = loggerMock.create();
typeRegistry = new SavedObjectTypeRegistry();
savedObjectsClient = savedObjectsClientMock.create();
exporter = new SavedObjectsExporter({
exporter = createExporter();
});

const createExporter = () => {
return new SavedObjectsExporter({
exportSizeLimit,
logger,
savedObjectsClient,
typeRegistry,
});
});
};

describe('#exportByTypes', () => {
test('exports selected types and sorts them', async () => {
Expand Down Expand Up @@ -986,6 +999,45 @@ describe('getSortedObjectsForExport()', () => {
]
`);
});

test(`supports the "all types" wildcard`, async () => {
typeRegistry.registerType(
createType({
name: 'exportable_1',
management: { importableAndExportable: true },
})
);
typeRegistry.registerType(
createType({
name: 'exportable_2',
management: { importableAndExportable: true },
})
);
typeRegistry.registerType(
createType({
name: 'not_exportable',
management: { importableAndExportable: false },
})
);

exporter = createExporter();

savedObjectsClient.find.mockResolvedValueOnce({
total: 0,
per_page: 1000,
page: 1,
saved_objects: [],
});

await exporter.exportByTypes({ request, types: [EXPORT_ALL_TYPES_TOKEN] });

expect(savedObjectsClient.createPointInTimeFinder).toHaveBeenCalledTimes(1);
expect(savedObjectsClient.createPointInTimeFinder).toHaveBeenCalledWith(
expect.objectContaining({
type: ['exportable_1', 'exportable_2'],
})
);
});
});

describe('#exportByObjects', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import {
getPreservedOrderComparator,
type SavedObjectComparator,
} from './utils';
import { EXPORT_ALL_TYPES_TOKEN } from './constants';

/**
* @internal
Expand All @@ -39,6 +40,7 @@ export class SavedObjectsExporter implements ISavedObjectsExporter {
readonly #exportSizeLimit: number;
readonly #typeRegistry: ISavedObjectTypeRegistry;
readonly #log: Logger;
readonly #exportableTypes: string[];

constructor({
savedObjectsClient,
Expand All @@ -55,6 +57,9 @@ export class SavedObjectsExporter implements ISavedObjectsExporter {
this.#savedObjectsClient = savedObjectsClient;
this.#exportSizeLimit = exportSizeLimit;
this.#typeRegistry = typeRegistry;
this.#exportableTypes = this.#typeRegistry
.getImportableAndExportableTypes()
.map((type) => type.name);
}

public async exportByTypes(options: SavedObjectsExportByTypeOptions) {
Expand Down Expand Up @@ -146,6 +151,10 @@ export class SavedObjectsExporter implements ISavedObjectsExporter {
hasReference,
search,
}: SavedObjectsExportByTypeOptions) {
if (types.includes(EXPORT_ALL_TYPES_TOKEN)) {
types = this.#exportableTypes;
}

const finder = this.#savedObjectsClient.createPointInTimeFinder({
type: types,
hasReference,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import { kibanaResponseFactory } from '@kbn/core-http-router-server-internal';
import { typeRegistryInstanceMock } from '../saved_objects_service.test.mocks';
import { httpServerMock } from '@kbn/core-http-server-mocks';
import { loggerMock, type MockedLogger } from '@kbn/logging-mocks';
import { EXPORT_ALL_TYPES_TOKEN } from '@kbn/core-saved-objects-import-export-server-internal';

async function readStreamToCompletion(stream: Readable) {
return createPromiseFromStreams([stream, createConcatStream([])]);
Expand Down Expand Up @@ -148,6 +149,20 @@ describe('validateTypes', () => {
expect(validateTypes(allowedTypes, allowedTypes)).toBeUndefined();
expect(validateTypes(['config'], allowedTypes)).toBeUndefined();
});
it('supports the all types token', () => {
expect(validateTypes([EXPORT_ALL_TYPES_TOKEN], allowedTypes)).toBeUndefined();
expect(validateTypes([EXPORT_ALL_TYPES_TOKEN, allowedTypes[0]], allowedTypes)).toBeUndefined();
});
it('returns an error message for non-allowed types even with the all types token', () => {
expect(
validateTypes(
[EXPORT_ALL_TYPES_TOKEN, 'not-allowed-type', 'not-allowed-type-2'],
allowedTypes
)
).toMatchInlineSnapshot(
`"Trying to export non-exportable type(s): not-allowed-type, not-allowed-type-2"`
);
});
});

describe('validateObjects', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ import {
SavedObjectsExportResultDetails,
SavedObjectsErrorHelpers,
} from '@kbn/core-saved-objects-server';
import { Logger } from '@kbn/logging';
import type { Logger } from '@kbn/logging';
import { EXPORT_ALL_TYPES_TOKEN } from '@kbn/core-saved-objects-import-export-server-internal';

export async function createSavedObjectsStreamFromNdJson(ndJsonStream: Readable) {
const savedObjects = await createPromiseFromStreams([
Expand All @@ -43,7 +44,9 @@ export async function createSavedObjectsStreamFromNdJson(ndJsonStream: Readable)
}

export function validateTypes(types: string[], supportedTypes: string[]): string | undefined {
const invalidTypes = types.filter((t) => !supportedTypes.includes(t));
const invalidTypes = types.filter(
(t) => t !== EXPORT_ALL_TYPES_TOKEN && !supportedTypes.includes(t)
);
if (invalidTypes.length) {
return `Trying to export non-exportable type(s): ${invalidTypes.join(', ')}`;
}
Expand Down Expand Up @@ -109,6 +112,7 @@ export function throwOnGloballyHiddenTypes(
);
}
}

/**
* @param {string[]} unsupportedTypes saved object types registered with hidden=false and hiddenFromHttpApis=true
*/
Expand All @@ -120,6 +124,7 @@ export function throwOnHttpHiddenTypes(unsupportedTypes: string[]) {
);
}
}

/**
* @param {string[]} type saved object type
* @param {ISavedObjectTypeRegistry} registry the saved object type registry
Expand Down Expand Up @@ -147,6 +152,7 @@ export function throwIfAnyTypeNotVisibleByAPI(
throwOnHttpHiddenTypes(unsupportedTypes);
}
}

export interface BulkGetItem {
type: string;
id: string;
Expand All @@ -168,6 +174,7 @@ export interface LogWarnOnExternalRequest {
req: KibanaRequest;
logger: Logger;
}

/**
* Only log a warning when the request is internal
* Allows us to silence the logs for development
Expand Down
27 changes: 27 additions & 0 deletions test/api_integration/apis/saved_objects/export.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import type { FtrProviderContext } from '../../ftr_provider_context';
function ndjsonToObject(input: string) {
return input.split('\n').map((str) => JSON.parse(str));
}

export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
const kibanaServer = getService('kibanaServer');
Expand Down Expand Up @@ -53,6 +54,32 @@ export default function ({ getService }: FtrProviderContext) {
});
});

it('should support exporting all types', async () => {
await supertest
.post(`/s/${SPACE_ID}/api/saved_objects/_export`)
.send({
type: ['*'],
excludeExportDetails: true,
})
.expect(200)
.then((resp) => {
const objects = ndjsonToObject(resp.text);
expect(objects).to.have.length(4);
expect(objects.map((obj) => ({ id: obj.id, type: obj.type }))).to.eql([
{ id: '7.0.0-alpha1', type: 'config' },
{
id: '91200a00-9efd-11e7-acb3-3dab96693fab',
type: 'index-pattern',
},
{
id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
type: 'visualization',
},
{ id: 'be3733a0-9efe-11e7-acb3-3dab96693fab', type: 'dashboard' },
]);
});
});

it('should exclude the export details if asked', async () => {
await supertest
.post(`/s/${SPACE_ID}/api/saved_objects/_export`)
Expand Down
10 changes: 10 additions & 0 deletions test/functional/services/common/find.ts
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,16 @@ export class FindService extends FtrService {
}, timeout);
}

public async existsByXpath(
selector: string,
timeout: number = this.WAIT_FOR_EXISTS_TIME
): Promise<boolean> {
this.log.debug(`Find.existsByXpath('${selector}') with timeout=${timeout}`);
return await this.exists(async (drive) => {
return this.wrapAll(await drive.findElements(By.xpath(selector)));
}, timeout);
}

public async clickByCssSelectorWhenNotDisabled(selector: string, opts?: TimeoutOpt) {
const timeout = opts?.timeout ?? this.defaultFindTimeout;

Expand Down
6 changes: 6 additions & 0 deletions x-pack/plugins/apm/dev_docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,12 @@ node x-pack/plugins/apm/scripts/test/e2e --server
node x-pack/plugins/apm/scripts/test/e2e --runner --open
```

### Rum tests multiple times to check for flakiness

```
node x-pack/plugins/apm/scripts/test/e2e --runner --times <NUMBER> [--spec <FILE_NAME>]
```

### A11y checks

Accessibility tests are added on the e2e with `checkA11y()`, they will run together with cypress.
Expand Down
Loading

0 comments on commit 954650e

Please sign in to comment.