diff --git a/src/core/server/saved_objects/import/check_conflict_for_data_source.test.ts b/src/core/server/saved_objects/import/check_conflict_for_data_source.test.ts new file mode 100644 index 000000000000..e5b32bbde114 --- /dev/null +++ b/src/core/server/saved_objects/import/check_conflict_for_data_source.test.ts @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Any modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { mockUuidv4 } from './__mocks__'; +import { SavedObjectReference, SavedObjectsImportRetry } from 'opensearch-dashboards/public'; +import { SavedObject } from '../types'; +import { SavedObjectsErrorHelpers } from '..'; +import { + checkConflictsForDataSource, + ConflictsForDataSourceParams, +} from './check_conflict_for_data_source'; + +type SavedObjectType = SavedObject<{ title?: string }>; + +/** + * Function to create a realistic-looking import object given a type and ID + */ +const createObject = (type: string, id: string): SavedObjectType => ({ + type, + id, + attributes: { title: 'some-title' }, + references: (Symbol() as unknown) as SavedObjectReference[], +}); + +const getResultMock = { + conflict: (type: string, id: string) => { + const error = SavedObjectsErrorHelpers.createConflictError(type, id).output.payload; + return { type, id, error }; + }, + unresolvableConflict: (type: string, id: string) => { + const conflictMock = getResultMock.conflict(type, id); + const metadata = { isNotOverwritable: true }; + return { ...conflictMock, error: { ...conflictMock.error, metadata } }; + }, + invalidType: (type: string, id: string) => { + const error = SavedObjectsErrorHelpers.createUnsupportedTypeError(type).output.payload; + return { type, id, error }; + }, +}; + +/** + * Create a variety of different objects to exercise different import / result scenarios + */ +const obj1 = createObject('type-1', 'id-1'); // -> success +const obj2 = createObject('type-2', 'id-2'); // -> conflict +const obj3 = createObject('type-3', 'id-3'); // -> unresolvable conflict +const obj4 = createObject('type-4', 'id-4'); // -> invalid type +const dsObj = createObject('data-source', 'data-source-id-1'); // -> data-source type, no need to add in the filteredObjects +const objects = [obj1, obj2, obj3, obj4]; +const dsObj1 = createObject('type-1', 'ds_id-1'); // -> object with data source id +const dsObj2 = createObject('type-2', 'ds_id-2'); // -> object with data source id +const objectsWithDataSource = [dsObj, dsObj1, dsObj2]; +const dsObj1Error = getResultMock.conflict(dsObj1.type, dsObj1.id); + +describe('#checkConflictsForDataSource', () => { + const setupParams = (partial: { + objects: SavedObjectType[]; + ignoreRegularConflicts?: boolean; + retries?: SavedObjectsImportRetry[]; + createNewCopies?: boolean; + dataSourceId?: string; + }): ConflictsForDataSourceParams => { + return { ...partial }; + }; + + beforeEach(() => { + mockUuidv4.mockReset(); + mockUuidv4.mockReturnValueOnce(`new-object-id`); + }); + + it('exits early if there are no objects to check', async () => { + const params = setupParams({ objects: [] }); + const checkConflictsForDataSourceResult = await checkConflictsForDataSource(params); + expect(checkConflictsForDataSourceResult).toEqual({ + filteredObjects: [], + errors: [], + importIdMap: new Map(), + pendingOverwrites: new Set(), + }); + }); + + it('returns original objects result when there is no data source id', async () => { + const params = setupParams({ objects }); + const checkConflictsForDataSourceResult = await checkConflictsForDataSource(params); + expect(checkConflictsForDataSourceResult).toEqual({ + filteredObjects: [...objects], + errors: [], + importIdMap: new Map(), + pendingOverwrites: new Set(), + }); + }); + + it('return obj if it is not data source obj and there is no conflict of the data source id', async () => { + const params = setupParams({ objects: objectsWithDataSource, dataSourceId: 'ds' }); + const checkConflictsForDataSourceResult = await checkConflictsForDataSource(params); + expect(checkConflictsForDataSourceResult).toEqual({ + filteredObjects: [dsObj1, dsObj2], + errors: [], + importIdMap: new Map(), + pendingOverwrites: new Set(), + }); + }); + + it('can resolve the data source id conflict when the ds it not match when ignoreRegularConflicts=true', async () => { + const params = setupParams({ + objects: objectsWithDataSource, + ignoreRegularConflicts: true, + dataSourceId: 'currentDsId', + }); + const checkConflictsForDataSourceResult = await checkConflictsForDataSource(params); + + expect(checkConflictsForDataSourceResult).toEqual( + expect.objectContaining({ + filteredObjects: [ + { + ...dsObj1, + id: 'currentDsId_id-1', + }, + { + ...dsObj2, + id: 'currentDsId_id-2', + }, + ], + errors: [], + importIdMap: new Map([ + [`${dsObj1.type}:${dsObj1.id}`, { id: 'currentDsId_id-1', omitOriginId: true }], + [`${dsObj2.type}:${dsObj2.id}`, { id: 'currentDsId_id-2', omitOriginId: true }], + ]), + pendingOverwrites: new Set([`${dsObj1.type}:${dsObj1.id}`, `${dsObj2.type}:${dsObj2.id}`]), + }) + ); + }); + + it('can push error when do not override with data source conflict', async () => { + const params = setupParams({ + objects: [dsObj1], + ignoreRegularConflicts: false, + dataSourceId: 'currentDs', + }); + const checkConflictsForDataSourceResult = await checkConflictsForDataSource(params); + expect(checkConflictsForDataSourceResult).toEqual({ + filteredObjects: [], + errors: [ + { + ...dsObj1Error, + title: dsObj1.attributes.title, + meta: { title: dsObj1.attributes.title }, + error: { type: 'conflict' }, + }, + ], + importIdMap: new Map(), + pendingOverwrites: new Set(), + }); + }); +}); diff --git a/src/core/server/saved_objects/import/check_conflict_for_data_source.ts b/src/core/server/saved_objects/import/check_conflict_for_data_source.ts new file mode 100644 index 000000000000..e8f15dd66d16 --- /dev/null +++ b/src/core/server/saved_objects/import/check_conflict_for_data_source.ts @@ -0,0 +1,115 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Any modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { SavedObject, SavedObjectsImportError, SavedObjectsImportRetry } from '../types'; + +export interface ConflictsForDataSourceParams { + objects: Array>; + ignoreRegularConflicts?: boolean; + retries?: SavedObjectsImportRetry[]; + createNewCopies?: boolean; + dataSourceId?: string; +} + +/** + * function to check the conflict when enabled multiple data source + * the purpose of this function is to check the conflict of the imported saved objects and data source + * @param objects, this the array of saved objects to be verified whether contains the data source conflict + * @param ignoreRegularConflicts whether to override + * @param dataSourceId the id to identify the data source + * @returns {filteredObjects, errors, importIdMap, pendingOverwrites } + */ +export async function checkConflictsForDataSource({ + objects, + ignoreRegularConflicts, + retries = [], + dataSourceId, +}: ConflictsForDataSourceParams) { + const filteredObjects: Array> = []; + const errors: SavedObjectsImportError[] = []; + const importIdMap = new Map(); + const pendingOverwrites = new Set(); + + // exit early if there are no objects to check + if (objects.length === 0) { + return { filteredObjects, errors, importIdMap, pendingOverwrites }; + } + const retryMap = retries.reduce( + (acc, cur) => acc.set(`${cur.type}:${cur.id}`, cur), + new Map() + ); + objects.forEach((object) => { + const { + type, + id, + attributes: { title }, + } = object; + const { destinationId } = retryMap.get(`${type}:${id}`) || {}; + + const currentDataSourceId = dataSourceId; + + if (!object.type.includes('data-source')) { + // check the previous data source existed or not + // by extract it from the id + // e.g. e0c9e490-bdd7-11ee-b216-d78a57002330_ff959d40-b880-11e8-a6d9-e546fe2bba5f + // e0c9e490-bdd7-11ee-b216-d78a57002330 is the data source id + // for saved object data source itself, e0c9e490-bdd7-11ee-b216-d78a57002330 return undefined + const parts = id.split('_'); // this is the array to host the split results of the id + const previoudDataSourceId = parts.length > 1 ? parts[0] : undefined; + // case for import saved object from osd exported + // when the imported daved objects with the different dataSourceId comparing to the current dataSourceId + // previous data source id not exist, push it to filtered object + // no conflict + if (!previoudDataSourceId || previoudDataSourceId === currentDataSourceId) { + filteredObjects.push(object); + } else if (previoudDataSourceId && previoudDataSourceId !== currentDataSourceId) { + if (ignoreRegularConflicts) { + // overwrite + // ues old key and new value in the importIdMap + // old key is used to look up, new key is used to be the id of new object + const omitOriginId = ignoreRegularConflicts; + // e.g. e0c9e490-bdd7-11ee-b216-d78a57002330_ff959d40-b880-11e8-a6d9-e546fe2bba5f + // rawId is ff959d40-b880-11e8-a6d9-e546fe2bba5f + const rawId = parts[1]; + importIdMap.set(`${type}:${id}`, { id: `${currentDataSourceId}_${rawId}`, omitOriginId }); + pendingOverwrites.add(`${type}:${id}`); + filteredObjects.push({ ...object, id: `${currentDataSourceId}_${rawId}` }); + } else { + // not override + // push error + const error = { type: 'conflict' as 'conflict', ...(destinationId && { destinationId }) }; + errors.push({ type, id, title, meta: { title }, error }); + } + } + } + }); + + return { filteredObjects, errors, importIdMap, pendingOverwrites }; +} diff --git a/src/core/server/saved_objects/import/check_conflicts.ts b/src/core/server/saved_objects/import/check_conflicts.ts index 830f7f55d7c5..c54f7b6ad141 100644 --- a/src/core/server/saved_objects/import/check_conflicts.ts +++ b/src/core/server/saved_objects/import/check_conflicts.ts @@ -44,6 +44,7 @@ interface CheckConflictsParams { ignoreRegularConflicts?: boolean; retries?: SavedObjectsImportRetry[]; createNewCopies?: boolean; + dataSourceId?: string; } const isUnresolvableConflict = (error: SavedObjectError) => @@ -66,7 +67,6 @@ export async function checkConflicts({ if (objects.length === 0) { return { filteredObjects, errors, importIdMap, pendingOverwrites }; } - const retryMap = retries.reduce( (acc, cur) => acc.set(`${cur.type}:${cur.id}`, cur), new Map() diff --git a/src/core/server/saved_objects/import/check_origin_conflicts.ts b/src/core/server/saved_objects/import/check_origin_conflicts.ts index 7ef98982e149..740cf83db1a1 100644 --- a/src/core/server/saved_objects/import/check_origin_conflicts.ts +++ b/src/core/server/saved_objects/import/check_origin_conflicts.ts @@ -45,6 +45,7 @@ interface CheckOriginConflictsParams { namespace?: string; ignoreRegularConflicts?: boolean; importIdMap: Map; + dataSourceId?: string; } type CheckOriginConflictParams = Omit & { @@ -186,6 +187,7 @@ export async function checkOriginConflicts({ objects, ...params }: CheckOriginCo if (sources.length === 1 && destinations.length === 1) { // This is a simple "inexact match" result -- a single import object has a single destination conflict. if (params.ignoreRegularConflicts) { + // importIdMap.set(`${type}:${id}`, { id: dataSourceId ? `${dataSourceId}_${destinations[0].id}` : `${destinations[0].id}` }); importIdMap.set(`${type}:${id}`, { id: destinations[0].id }); pendingOverwrites.add(`${type}:${id}`); } else { diff --git a/src/core/server/saved_objects/import/collect_saved_objects.ts b/src/core/server/saved_objects/import/collect_saved_objects.ts index 8fd015c44991..18f746c2d92e 100644 --- a/src/core/server/saved_objects/import/collect_saved_objects.ts +++ b/src/core/server/saved_objects/import/collect_saved_objects.ts @@ -46,6 +46,7 @@ interface CollectSavedObjectsOptions { objectLimit: number; filter?: (obj: SavedObject) => boolean; supportedTypes: string[]; + dataSourceId?: string; } export async function collectSavedObjects({ @@ -53,6 +54,7 @@ export async function collectSavedObjects({ objectLimit, filter, supportedTypes, + dataSourceId, }: CollectSavedObjectsOptions) { const errors: SavedObjectsImportError[] = []; const entries: Array<{ type: string; id: string }> = []; @@ -79,7 +81,12 @@ export async function collectSavedObjects({ }), createFilterStream((obj) => (filter ? filter(obj) : true)), createMapStream((obj: SavedObject) => { - importIdMap.set(`${obj.type}:${obj.id}`, {}); + if (dataSourceId) { + importIdMap.set(`${dataSourceId}_${obj.type}:${obj.id}`, {}); + } else { + importIdMap.set(`${obj.type}:${obj.id}`, {}); + } + // Ensure migrations execute on every saved object return Object.assign({ migrationVersion: {} }, obj); }), diff --git a/src/core/server/saved_objects/import/create_saved_objects.ts b/src/core/server/saved_objects/import/create_saved_objects.ts index a3a1eebbd2ab..0a006c11b258 100644 --- a/src/core/server/saved_objects/import/create_saved_objects.ts +++ b/src/core/server/saved_objects/import/create_saved_objects.ts @@ -39,6 +39,8 @@ interface CreateSavedObjectsParams { importIdMap: Map; namespace?: string; overwrite?: boolean; + dataSourceId?: string; + dataSourceTitle?: string; } interface CreateSavedObjectsResult { createdObjects: Array>; @@ -56,8 +58,11 @@ export const createSavedObjects = async ({ importIdMap, namespace, overwrite, + dataSourceId, + dataSourceTitle, }: CreateSavedObjectsParams): Promise> => { // filter out any objects that resulted in errors + const errorSet = accumulatedErrors.reduce( (acc, { type, id }) => acc.add(`${type}:${id}`), new Set() @@ -76,7 +81,70 @@ export const createSavedObjects = async ({ ); // filter out the 'version' field of each object, if it exists + const objectsToCreate = filteredObjects.map(({ version, ...object }) => { + if (dataSourceId) { + // @ts-expect-error + if (dataSourceTitle && object.attributes.title) { + if ( + object.type === 'dashboard' || + object.type === 'visualization' || + object.type === 'search' + ) { + // @ts-expect-error + object.attributes.title = object.attributes.title + `_${dataSourceTitle}`; + } + } + + if (object.type === 'index-pattern') { + object.references = [ + { + id: `${dataSourceId}`, + type: 'data-source', + name: 'dataSource', + }, + ]; + } + + if (object.type === 'visualization' || object.type === 'search') { + // @ts-expect-error + const searchSourceString = object.attributes?.kibanaSavedObjectMeta?.searchSourceJSON; + // @ts-expect-error + const visStateString = object.attributes?.visState; + + if (searchSourceString) { + const searchSource = JSON.parse(searchSourceString); + if (searchSource.index) { + const searchSourceIndex = searchSource.index.includes('_') + ? searchSource.index.split('_')[searchSource.index.split('_').length - 1] + : searchSource.index; + searchSource.index = `${dataSourceId}_` + searchSourceIndex; + + // @ts-expect-error + object.attributes.kibanaSavedObjectMeta.searchSourceJSON = JSON.stringify(searchSource); + } + } + + if (visStateString) { + const visState = JSON.parse(visStateString); + const controlList = visState.params?.controls; + if (controlList) { + // @ts-expect-error + controlList.map((control) => { + if (control.indexPattern) { + const controlIndexPattern = control.indexPattern.includes('_') + ? control.indexPattern.split('_')[control.indexPattern.split('_').length - 1] + : control.indexPattern; + control.indexPattern = `${dataSourceId}_` + controlIndexPattern; + } + }); + } + // @ts-expect-error + object.attributes.visState = JSON.stringify(visState); + } + } + } + // use the import ID map to ensure that each reference is being created with the correct ID const references = object.references?.map((reference) => { const { type, id } = reference; diff --git a/src/core/server/saved_objects/import/import_saved_objects.test.ts b/src/core/server/saved_objects/import/import_saved_objects.test.ts index de8fb34dfbed..1742af8b7b21 100644 --- a/src/core/server/saved_objects/import/import_saved_objects.test.ts +++ b/src/core/server/saved_objects/import/import_saved_objects.test.ts @@ -47,6 +47,7 @@ import { validateReferences } from './validate_references'; import { checkConflicts } from './check_conflicts'; import { checkOriginConflicts } from './check_origin_conflicts'; import { createSavedObjects } from './create_saved_objects'; +import { checkConflictsForDataSource } from './check_conflict_for_data_source'; jest.mock('./collect_saved_objects'); jest.mock('./regenerate_ids'); @@ -54,6 +55,7 @@ jest.mock('./validate_references'); jest.mock('./check_conflicts'); jest.mock('./check_origin_conflicts'); jest.mock('./create_saved_objects'); +jest.mock('./check_conflict_for_data_source'); const getMockFn = any, U>(fn: (...args: Parameters) => U) => fn as jest.MockedFunction<(...args: Parameters) => U>; @@ -80,6 +82,12 @@ describe('#importSavedObjectsFromStream', () => { importIdMap: new Map(), pendingOverwrites: new Set(), }); + getMockFn(checkConflictsForDataSource).mockResolvedValue({ + errors: [], + filteredObjects: [], + importIdMap: new Map(), + pendingOverwrites: new Set(), + }); getMockFn(createSavedObjects).mockResolvedValue({ errors: [], createdObjects: [] }); }); @@ -89,8 +97,12 @@ describe('#importSavedObjectsFromStream', () => { let savedObjectsClient: jest.Mocked; let typeRegistry: jest.Mocked; const namespace = 'some-namespace'; + const testDataSourceId = 'some-datasource'; - const setupOptions = (createNewCopies: boolean = false): SavedObjectsImportOptions => { + const setupOptions = ( + createNewCopies: boolean = false, + dataSourceId: string | undefined + ): SavedObjectsImportOptions => { readStream = new Readable(); savedObjectsClient = savedObjectsClientMock.create(); typeRegistry = typeRegistryMock.create(); @@ -109,14 +121,17 @@ describe('#importSavedObjectsFromStream', () => { typeRegistry, namespace, createNewCopies, + dataSourceId, }; }; - const createObject = (): SavedObject<{ + const createObject = ( + dataSourceId: string | undefined + ): SavedObject<{ title: string; }> => { return { type: 'foo-type', - id: uuidv4(), + id: dataSourceId ? `${dataSourceId}_${uuidv4()}` : uuidv4(), references: [], attributes: { title: 'some-title' }, }; @@ -140,7 +155,7 @@ describe('#importSavedObjectsFromStream', () => { */ describe('module calls', () => { test('collects saved objects from stream', async () => { - const options = setupOptions(); + const options = setupOptions(false, undefined); const supportedTypes = ['foo-type']; typeRegistry.getImportableAndExportableTypes.mockReturnValue( supportedTypes.map((name) => ({ name })) as SavedObjectsType[] @@ -153,8 +168,8 @@ describe('#importSavedObjectsFromStream', () => { }); test('validates references', async () => { - const options = setupOptions(); - const collectedObjects = [createObject()]; + const options = setupOptions(false, undefined); + const collectedObjects = [createObject(undefined)]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [], collectedObjects, @@ -171,8 +186,8 @@ describe('#importSavedObjectsFromStream', () => { describe('with createNewCopies disabled', () => { test('does not regenerate object IDs', async () => { - const options = setupOptions(); - const collectedObjects = [createObject()]; + const options = setupOptions(false, undefined); + const collectedObjects = [createObject(undefined)]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [], collectedObjects, @@ -184,8 +199,8 @@ describe('#importSavedObjectsFromStream', () => { }); test('checks conflicts', async () => { - const options = setupOptions(); - const collectedObjects = [createObject()]; + const options = setupOptions(false, undefined); + const collectedObjects = [createObject(undefined)]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [], collectedObjects, @@ -203,8 +218,8 @@ describe('#importSavedObjectsFromStream', () => { }); test('checks origin conflicts', async () => { - const options = setupOptions(); - const filteredObjects = [createObject()]; + const options = setupOptions(false, undefined); + const filteredObjects = [createObject(undefined)]; const importIdMap = new Map(); getMockFn(checkConflicts).mockResolvedValue({ errors: [], @@ -225,10 +240,28 @@ describe('#importSavedObjectsFromStream', () => { expect(checkOriginConflicts).toHaveBeenCalledWith(checkOriginConflictsParams); }); + test('checks data source conflicts', async () => { + const options = setupOptions(false, testDataSourceId); + const collectedObjects = [createObject(undefined)]; + getMockFn(collectSavedObjects).mockResolvedValue({ + errors: [], + collectedObjects, + importIdMap: new Map(), + }); + + await importSavedObjectsFromStream(options); + const checkConflictsForDataSourceParams = { + objects: collectedObjects, + ignoreRegularConflicts: overwrite, + dataSourceId: testDataSourceId, + }; + expect(checkConflictsForDataSource).toHaveBeenCalledWith(checkConflictsForDataSourceParams); + }); + test('creates saved objects', async () => { - const options = setupOptions(); - const collectedObjects = [createObject()]; - const filteredObjects = [createObject()]; + const options = setupOptions(false, undefined); + const collectedObjects = [createObject(undefined)]; + const filteredObjects = [createObject(undefined)]; const errors = [createError(), createError(), createError(), createError()]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [errors[0]], @@ -272,8 +305,8 @@ describe('#importSavedObjectsFromStream', () => { describe('with createNewCopies enabled', () => { test('regenerates object IDs', async () => { - const options = setupOptions(true); - const collectedObjects = [createObject()]; + const options = setupOptions(true, undefined); + const collectedObjects = [createObject(undefined)]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [], collectedObjects, @@ -281,21 +314,22 @@ describe('#importSavedObjectsFromStream', () => { }); await importSavedObjectsFromStream(options); - expect(regenerateIds).toHaveBeenCalledWith(collectedObjects); + expect(regenerateIds).toHaveBeenCalledWith(collectedObjects, undefined); }); - test('does not check conflicts or check origin conflicts', async () => { - const options = setupOptions(true); + test('does not check conflicts or check origin conflicts or check data source conflict', async () => { + const options = setupOptions(true, undefined); getMockFn(validateReferences).mockResolvedValue([]); await importSavedObjectsFromStream(options); expect(checkConflicts).not.toHaveBeenCalled(); expect(checkOriginConflicts).not.toHaveBeenCalled(); + expect(checkConflictsForDataSource).not.toHaveBeenCalled(); }); test('creates saved objects', async () => { - const options = setupOptions(true); - const collectedObjects = [createObject()]; + const options = setupOptions(true, undefined); + const collectedObjects = [createObject(undefined)]; const errors = [createError(), createError()]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [errors[0]], @@ -326,14 +360,14 @@ describe('#importSavedObjectsFromStream', () => { describe('results', () => { test('returns success=true if no errors occurred', async () => { - const options = setupOptions(); + const options = setupOptions(false, undefined); const result = await importSavedObjectsFromStream(options); expect(result).toEqual({ success: true, successCount: 0 }); }); test('returns success=false if an error occurred', async () => { - const options = setupOptions(); + const options = setupOptions(false, undefined); getMockFn(collectSavedObjects).mockResolvedValue({ errors: [createError()], collectedObjects: [], @@ -345,13 +379,20 @@ describe('#importSavedObjectsFromStream', () => { }); describe('handles a mix of successes and errors and injects metadata', () => { - const obj1 = createObject(); - const tmp = createObject(); + const obj1 = createObject(undefined); + const tmp = createObject(undefined); const obj2 = { ...tmp, destinationId: 'some-destinationId', originId: tmp.id }; - const obj3 = { ...createObject(), destinationId: 'another-destinationId' }; // empty originId + const obj3 = { ...createObject(undefined), destinationId: 'another-destinationId' }; // empty originId const createdObjects = [obj1, obj2, obj3]; const error1 = createError(); const error2 = createError(); + // add some objects with data source id + const dsObj1 = createObject(testDataSourceId); + const tmp2 = createObject(testDataSourceId); + const dsObj2 = { ...tmp2, destinationId: 'some-destinationId', originId: tmp.id }; + const dsObj3 = { ...createObject(testDataSourceId), destinationId: 'another-destinationId' }; + const createdDsObjects = [dsObj1, dsObj2, dsObj3]; + // results const success1 = { type: obj1.type, @@ -372,8 +413,28 @@ describe('#importSavedObjectsFromStream', () => { }; const errors = [error1, error2]; + const dsSuccess1 = { + type: dsObj1.type, + id: dsObj1.id, + meta: { title: dsObj1.attributes.title, icon: `${dsObj1.type}-icon` }, + }; + + const dsSuccess2 = { + type: dsObj2.type, + id: dsObj2.id, + meta: { title: dsObj2.attributes.title, icon: `${dsObj2.type}-icon` }, + destinationId: dsObj2.destinationId, + }; + + const dsSuccess3 = { + type: dsObj3.type, + id: dsObj3.id, + meta: { title: dsObj3.attributes.title, icon: `${dsObj3.type}-icon` }, + destinationId: dsObj3.destinationId, + }; + test('with createNewCopies disabled', async () => { - const options = setupOptions(); + const options = setupOptions(false, undefined); getMockFn(checkConflicts).mockResolvedValue({ errors: [], filteredObjects: [], @@ -410,7 +471,7 @@ describe('#importSavedObjectsFromStream', () => { test('with createNewCopies enabled', async () => { // however, we include it here for posterity - const options = setupOptions(true); + const options = setupOptions(true, undefined); getMockFn(createSavedObjects).mockResolvedValue({ errors, createdObjects }); const result = await importSavedObjectsFromStream(options); @@ -428,10 +489,73 @@ describe('#importSavedObjectsFromStream', () => { errors: errorResults, }); }); + + test('with createNewCopies disabled and with data source id', async () => { + const options = setupOptions(false, testDataSourceId); + getMockFn(checkConflictsForDataSource).mockResolvedValue({ + errors: [], + filteredObjects: [], + importIdMap: new Map(), + pendingOverwrites: new Set([`${dsSuccess2.type}:${dsSuccess2.id}`]), + }); + getMockFn(checkConflicts).mockResolvedValue({ + errors: [], + filteredObjects: [], + importIdMap: new Map(), + pendingOverwrites: new Set([ + `${dsSuccess2.type}:${dsSuccess2.id}`, // the dsSuccess2 object was overwritten + `${error2.type}:${error2.id}`, // an attempt was made to overwrite the error2 object + ]), + }); + getMockFn(createSavedObjects).mockResolvedValue({ + errors, + createdObjects: createdDsObjects, + }); + + const result = await importSavedObjectsFromStream(options); + // successResults only includes the imported object's type, id, and destinationId (if a new one was generated) + const successResults = [ + dsSuccess1, + { ...dsSuccess2, overwrite: true }, + { ...dsSuccess3, createNewCopy: true }, + ]; + const errorResults = [ + { ...error1, meta: { ...error1.meta, icon: `${error1.type}-icon` } }, + { ...error2, meta: { ...error2.meta, icon: `${error2.type}-icon` }, overwrite: true }, + ]; + expect(result).toEqual({ + success: false, + successCount: 3, + successResults, + errors: errorResults, + }); + }); + + test('with createNewCopies enabled and with data source id', async () => { + const options = setupOptions(true, testDataSourceId); + getMockFn(createSavedObjects).mockResolvedValue({ + errors, + createdObjects: createdDsObjects, + }); + + const result = await importSavedObjectsFromStream(options); + const successDsResults = [dsSuccess1, dsSuccess2, dsSuccess3]; + + const errorResults = [ + { ...error1, meta: { ...error1.meta, icon: `${error1.type}-icon` } }, + { ...error2, meta: { ...error2.meta, icon: `${error2.type}-icon` } }, + ]; + expect(result).toEqual({ + success: false, + successCount: 3, + successResults: successDsResults, + errors: errorResults, + }); + }); }); test('accumulates multiple errors', async () => { - const options = setupOptions(); + const options = setupOptions(false, undefined); const errors = [createError(), createError(), createError(), createError(), createError()]; getMockFn(collectSavedObjects).mockResolvedValue({ errors: [errors[0]], diff --git a/src/core/server/saved_objects/import/import_saved_objects.ts b/src/core/server/saved_objects/import/import_saved_objects.ts index cd250fc5f65f..b045c078d45b 100644 --- a/src/core/server/saved_objects/import/import_saved_objects.ts +++ b/src/core/server/saved_objects/import/import_saved_objects.ts @@ -39,6 +39,7 @@ import { checkOriginConflicts } from './check_origin_conflicts'; import { createSavedObjects } from './create_saved_objects'; import { checkConflicts } from './check_conflicts'; import { regenerateIds } from './regenerate_ids'; +import { checkConflictsForDataSource } from './check_conflict_for_data_source'; /** * Import saved objects from given stream. See the {@link SavedObjectsImportOptions | options} for more @@ -54,6 +55,8 @@ export async function importSavedObjectsFromStream({ savedObjectsClient, typeRegistry, namespace, + dataSourceId, + dataSourceTitle, }: SavedObjectsImportOptions): Promise { let errorAccumulator: SavedObjectsImportError[] = []; const supportedTypes = typeRegistry.getImportableAndExportableTypes().map((type) => type.name); @@ -63,6 +66,7 @@ export async function importSavedObjectsFromStream({ readStream, objectLimit, supportedTypes, + dataSourceId, }); errorAccumulator = [...errorAccumulator, ...collectSavedObjectsResult.errors]; /** Map of all IDs for objects that we are attempting to import; each value is empty by default */ @@ -78,19 +82,40 @@ export async function importSavedObjectsFromStream({ errorAccumulator = [...errorAccumulator, ...validateReferencesResult]; if (createNewCopies) { - importIdMap = regenerateIds(collectSavedObjectsResult.collectedObjects); + // randomly generated id + importIdMap = regenerateIds(collectSavedObjectsResult.collectedObjects, dataSourceId); } else { + // in check conclict and override mode // Check single-namespace objects for conflicts in this namespace, and check multi-namespace objects for conflicts across all namespaces const checkConflictsParams = { objects: collectSavedObjectsResult.collectedObjects, savedObjectsClient, namespace, ignoreRegularConflicts: overwrite, + dataSourceId, }; + + // resolve when data source exist, pass the filtered objects to next check conflict + if (dataSourceId) { + const checkConflictsForDataSourceResult = await checkConflictsForDataSource({ + objects: collectSavedObjectsResult.collectedObjects, + ignoreRegularConflicts: overwrite, + dataSourceId, + }); + + checkConflictsParams.objects = checkConflictsForDataSourceResult.filteredObjects; + + pendingOverwrites = new Set([ + ...pendingOverwrites, + ...checkConflictsForDataSourceResult.pendingOverwrites, + ]); + } + + // Check single-namespace objects for conflicts in this namespace, and check multi-namespace objects for conflicts across all namespaces const checkConflictsResult = await checkConflicts(checkConflictsParams); errorAccumulator = [...errorAccumulator, ...checkConflictsResult.errors]; importIdMap = new Map([...importIdMap, ...checkConflictsResult.importIdMap]); - pendingOverwrites = checkConflictsResult.pendingOverwrites; + pendingOverwrites = new Set([...pendingOverwrites, ...checkConflictsResult.pendingOverwrites]); // Check multi-namespace object types for origin conflicts in this namespace const checkOriginConflictsParams = { @@ -118,6 +143,8 @@ export async function importSavedObjectsFromStream({ importIdMap, overwrite, namespace, + dataSourceId, + dataSourceTitle, }; const createSavedObjectsResult = await createSavedObjects(createSavedObjectsParams); errorAccumulator = [...errorAccumulator, ...createSavedObjectsResult.errors]; @@ -136,6 +163,7 @@ export async function importSavedObjectsFromStream({ }; } ); + const errorResults = errorAccumulator.map((error) => { const icon = typeRegistry.getType(error.type)?.management?.icon; const attemptedOverwrite = pendingOverwrites.has(`${error.type}:${error.id}`); diff --git a/src/core/server/saved_objects/import/regenerate_ids.test.ts b/src/core/server/saved_objects/import/regenerate_ids.test.ts index 11556c8a21c1..71125f388cd6 100644 --- a/src/core/server/saved_objects/import/regenerate_ids.test.ts +++ b/src/core/server/saved_objects/import/regenerate_ids.test.ts @@ -44,7 +44,7 @@ describe('#regenerateIds', () => { .mockReturnValueOnce('uuidv4 #1') .mockReturnValueOnce('uuidv4 #2') .mockReturnValueOnce('uuidv4 #3'); - expect(regenerateIds(objects)).toMatchInlineSnapshot(` + expect(regenerateIds(objects, '')).toMatchInlineSnapshot(` Map { "foo:1" => Object { "id": "uuidv4 #1", diff --git a/src/core/server/saved_objects/import/regenerate_ids.ts b/src/core/server/saved_objects/import/regenerate_ids.ts index 672a8f030620..d1b6a2a9992d 100644 --- a/src/core/server/saved_objects/import/regenerate_ids.ts +++ b/src/core/server/saved_objects/import/regenerate_ids.ts @@ -36,8 +36,15 @@ import { SavedObject } from '../types'; * * @param objects The saved objects to generate new IDs for. */ -export const regenerateIds = (objects: SavedObject[]) => { +export const regenerateIds = (objects: SavedObject[], dataSourceId: string | undefined) => { + // add datasource? const importIdMap = objects.reduce((acc, object) => { + if (dataSourceId) { + return acc.set(`${object.type}:${object.id}`, { + id: `${dataSourceId}_${uuidv4()}`, + omitOriginId: true, + }); + } return acc.set(`${object.type}:${object.id}`, { id: uuidv4(), omitOriginId: true }); }, new Map()); return importIdMap; diff --git a/src/core/server/saved_objects/import/resolve_import_errors.test.ts b/src/core/server/saved_objects/import/resolve_import_errors.test.ts index bd0ba1afdc9d..ef22155f046b 100644 --- a/src/core/server/saved_objects/import/resolve_import_errors.test.ts +++ b/src/core/server/saved_objects/import/resolve_import_errors.test.ts @@ -369,7 +369,7 @@ describe('#importSavedObjectsFromStream', () => { }); await resolveSavedObjectsImportErrors(options); - expect(regenerateIds).toHaveBeenCalledWith(collectedObjects); + expect(regenerateIds).toHaveBeenCalledWith(collectedObjects, undefined); }); test('creates saved objects', async () => { diff --git a/src/core/server/saved_objects/import/resolve_import_errors.ts b/src/core/server/saved_objects/import/resolve_import_errors.ts index 162410c4ce9b..09207c893043 100644 --- a/src/core/server/saved_objects/import/resolve_import_errors.ts +++ b/src/core/server/saved_objects/import/resolve_import_errors.ts @@ -59,6 +59,8 @@ export async function resolveSavedObjectsImportErrors({ typeRegistry, namespace, createNewCopies, + dataSourceId, + dataSourceTitle, }: SavedObjectsResolveImportErrorsOptions): Promise { // throw a BadRequest error if we see invalid retries validateRetries(retries); @@ -76,6 +78,7 @@ export async function resolveSavedObjectsImportErrors({ objectLimit, filter, supportedTypes, + dataSourceId, } ); errorAccumulator = [...errorAccumulator, ...collectorErrors]; @@ -116,7 +119,7 @@ export async function resolveSavedObjectsImportErrors({ if (createNewCopies) { // In case any missing reference errors were resolved, ensure that we regenerate those object IDs as well // This is because a retry to resolve a missing reference error may not necessarily specify a destinationId - importIdMap = regenerateIds(objectsToResolve); + importIdMap = regenerateIds(objectsToResolve, dataSourceId); } // Check single-namespace objects for conflicts in this namespace, and check multi-namespace objects for conflicts across all namespaces @@ -126,6 +129,7 @@ export async function resolveSavedObjectsImportErrors({ namespace, retries, createNewCopies, + dataSourceId, }; const checkConflictsResult = await checkConflicts(checkConflictsParams); errorAccumulator = [...errorAccumulator, ...checkConflictsResult.errors]; @@ -157,6 +161,8 @@ export async function resolveSavedObjectsImportErrors({ importIdMap, namespace, overwrite, + dataSourceId, + dataSourceTitle, }; const { createdObjects, errors: bulkCreateErrors } = await createSavedObjects( createSavedObjectsParams diff --git a/src/core/server/saved_objects/import/types.ts b/src/core/server/saved_objects/import/types.ts index 88beacb9d2fd..73bc548b1f24 100644 --- a/src/core/server/saved_objects/import/types.ts +++ b/src/core/server/saved_objects/import/types.ts @@ -187,6 +187,8 @@ export interface SavedObjectsImportOptions { namespace?: string; /** If true, will create new copies of import objects, each with a random `id` and undefined `originId`. */ createNewCopies: boolean; + dataSourceId?: string; + dataSourceTitle?: string; } /** @@ -208,6 +210,8 @@ export interface SavedObjectsResolveImportErrorsOptions { namespace?: string; /** If true, will create new copies of import objects, each with a random `id` and undefined `originId`. */ createNewCopies: boolean; + dataSourceId?: string; + dataSourceTitle?: string; } export type CreatedObject = SavedObject & { destinationId?: string }; diff --git a/src/core/server/saved_objects/routes/import.ts b/src/core/server/saved_objects/routes/import.ts index b157feb0860e..916caaa765a4 100644 --- a/src/core/server/saved_objects/routes/import.ts +++ b/src/core/server/saved_objects/routes/import.ts @@ -60,6 +60,7 @@ export const registerImportRoute = (router: IRouter, config: SavedObjectConfig) { overwrite: schema.boolean({ defaultValue: false }), createNewCopies: schema.boolean({ defaultValue: false }), + dataSourceId: schema.string({ defaultValue: '' }), }, { validate: (object) => { @@ -75,13 +76,26 @@ export const registerImportRoute = (router: IRouter, config: SavedObjectConfig) }, }, router.handleLegacyErrors(async (context, req, res) => { - const { overwrite, createNewCopies } = req.query; + const { overwrite, createNewCopies, dataSourceId } = req.query; const file = req.body.file as FileStream; const fileExtension = extname(file.hapi.filename).toLowerCase(); if (fileExtension !== '.ndjson') { return res.badRequest({ body: `Invalid file extension ${fileExtension}` }); } + // get datasource from saved object service + const dataSource = await context.core.savedObjects.client + .get('data-source', dataSourceId) + .then((response) => { + const attributes: any = response?.attributes || {}; + return { + id: response.id, + title: attributes.title, + }; + }); + + const dataSourceTitle = dataSource.title; + let readStream: Readable; try { readStream = await createSavedObjectsStreamFromNdJson(file); @@ -98,6 +112,8 @@ export const registerImportRoute = (router: IRouter, config: SavedObjectConfig) objectLimit: maxImportExportSize, overwrite, createNewCopies, + dataSourceId, + dataSourceTitle, }); return res.ok({ body: result }); diff --git a/src/core/server/saved_objects/routes/resolve_import_errors.ts b/src/core/server/saved_objects/routes/resolve_import_errors.ts index 5e07125671f1..d78255b6f5cd 100644 --- a/src/core/server/saved_objects/routes/resolve_import_errors.ts +++ b/src/core/server/saved_objects/routes/resolve_import_errors.ts @@ -58,6 +58,7 @@ export const registerResolveImportErrorsRoute = (router: IRouter, config: SavedO validate: { query: schema.object({ createNewCopies: schema.boolean({ defaultValue: false }), + dataSourceId: schema.string({ defaultValue: '' }), }), body: schema.object({ file: schema.stream(), @@ -89,6 +90,21 @@ export const registerResolveImportErrorsRoute = (router: IRouter, config: SavedO return res.badRequest({ body: `Invalid file extension ${fileExtension}` }); } + const dataSourceId = req.query.dataSourceId; + + // get datasource from saved object service + const dataSource = await context.core.savedObjects.client + .get('data-source', dataSourceId) + .then((response) => { + const attributes: any = response?.attributes || {}; + return { + id: response.id, + title: attributes.title, + }; + }); + + const dataSourceTitle = dataSource.title; + let readStream: Readable; try { readStream = await createSavedObjectsStreamFromNdJson(file); @@ -105,6 +121,8 @@ export const registerResolveImportErrorsRoute = (router: IRouter, config: SavedO retries: req.body.retries, objectLimit: maxImportExportSize, createNewCopies: req.query.createNewCopies, + dataSourceId, + dataSourceTitle, }); return res.ok({ body: result }); diff --git a/src/core/server/saved_objects/saved_objects_type_registry.ts b/src/core/server/saved_objects/saved_objects_type_registry.ts index b314d38b3298..0ee58a29bd91 100644 --- a/src/core/server/saved_objects/saved_objects_type_registry.ts +++ b/src/core/server/saved_objects/saved_objects_type_registry.ts @@ -79,6 +79,7 @@ export class SavedObjectTypeRegistry { * To only get the visible types (which is the most common use case), use `getVisibleTypes` instead. */ public getAllTypes() { + // console.log("[DEBUG] getAllTypes", this.types.values()); return [...this.types.values()]; }