Skip to content

Commit

Permalink
Re-enable & fix flaky test (#112260)
Browse files Browse the repository at this point in the history
* Fixes #87968 Re-enable & fix flaky test

* Review comments

Co-authored-by: Kibana Machine <[email protected]>
  • Loading branch information
rudolf and kibanamachine authored Sep 18, 2021
1 parent 2e0d2ba commit 7040b47
Show file tree
Hide file tree
Showing 2 changed files with 253 additions and 122 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import { Root } from '../../../root';

const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;

const logFilePath = Path.join(__dirname, 'migration_from_v1.log');
const logFilePath = Path.join(__dirname, 'migration_from_older_v1.log');

const asyncUnlink = Util.promisify(Fs.unlink);
async function removeLogFile() {
Expand Down Expand Up @@ -53,7 +53,10 @@ async function fetchDocuments(esClient: ElasticsearchClient, index: string) {
.sort(sortByTypeAndId);
}

describe('migration v2', () => {
describe('migrating from 7.3.0-xpack which used v1 migrations', () => {
const migratedIndex = `.kibana_${kibanaVersion}_001`;
const originalIndex = `.kibana_1`; // v1 migrations index

let esServer: kbnTestServer.TestElasticsearchUtils;
let root: Root;
let coreStart: InternalCoreStart;
Expand Down Expand Up @@ -159,136 +162,50 @@ describe('migration v2', () => {
await new Promise((resolve) => setTimeout(resolve, 10000));
};

// FLAKY: https://github.com/elastic/kibana/issues/87968
describe.skip('migrating from 7.3.0-xpack version', () => {
const migratedIndex = `.kibana_${kibanaVersion}_001`;

beforeAll(async () => {
await removeLogFile();
await startServers({
oss: false,
dataArchive: Path.join(__dirname, 'archives', '7.3.0_xpack_sample_saved_objects.zip'),
});
});

afterAll(async () => {
await stopServers();
beforeAll(async () => {
await removeLogFile();
await startServers({
oss: false,
dataArchive: Path.join(__dirname, 'archives', '7.3.0_xpack_sample_saved_objects.zip'),
});
});

it('creates the new index and the correct aliases', async () => {
const { body } = await esClient.indices.get(
{
index: migratedIndex,
},
{ ignore: [404] }
);

const response = body[migratedIndex];

expect(response).toBeDefined();
expect(Object.keys(response.aliases!).sort()).toEqual([
'.kibana',
`.kibana_${kibanaVersion}`,
]);
});
afterAll(async () => {
await stopServers();
});

it('copies all the document of the previous index to the new one', async () => {
const migratedIndexResponse = await esClient.count({
it('creates the new index and the correct aliases', async () => {
const { body } = await esClient.indices.get(
{
index: migratedIndex,
});
const oldIndexResponse = await esClient.count({
index: '.kibana_1',
});
},
{ ignore: [404] }
);

// Use a >= comparison since once Kibana has started it might create new
// documents like telemetry tasks
expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count);
});
const response = body[migratedIndex];

it('migrates the documents to the highest version', async () => {
const expectedVersions = getExpectedVersionPerType();
const res = await esClient.search({
index: migratedIndex,
body: {
sort: ['_doc'],
},
size: 10000,
});
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
allDocuments.forEach((doc) => {
assertMigrationVersion(doc, expectedVersions);
});
});
expect(response).toBeDefined();
expect(Object.keys(response.aliases!).sort()).toEqual(['.kibana', `.kibana_${kibanaVersion}`]);
});

describe('migrating from the same Kibana version that used v1 migrations', () => {
const originalIndex = `.kibana_1`; // v1 migrations index
const migratedIndex = `.kibana_${kibanaVersion}_001`;

beforeAll(async () => {
await removeLogFile();
await startServers({
oss: false,
dataArchive: Path.join(
__dirname,
'archives',
'8.0.0_v1_migrations_sample_data_saved_objects.zip'
),
});
});

afterAll(async () => {
await stopServers();
});

it('creates the new index and the correct aliases', async () => {
const { body } = await esClient.indices.get(
{
index: migratedIndex,
},
{ ignore: [404] }
);
const response = body[migratedIndex];

expect(response).toBeDefined();
expect(Object.keys(response.aliases!).sort()).toEqual([
'.kibana',
`.kibana_${kibanaVersion}`,
]);
});

it('copies the documents from the previous index to the new one', async () => {
// original assertion on document count comparison (how atteched are we to this assertion?)
const migratedIndexResponse = await esClient.count({
index: migratedIndex,
});
const oldIndexResponse = await esClient.count({
index: originalIndex,
});

// Use a >= comparison since once Kibana has started it might create new
// documents like telemetry tasks
expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count);
it('copies all the document of the previous index to the new one', async () => {
const originalDocs = await fetchDocuments(esClient, originalIndex);
const migratedDocs = await fetchDocuments(esClient, migratedIndex);
expect(assertMigratedDocuments(migratedDocs, originalDocs));
});

// new assertion against a document array comparison
const originalDocs = await fetchDocuments(esClient, originalIndex);
const migratedDocs = await fetchDocuments(esClient, migratedIndex);
expect(assertMigratedDocuments(migratedDocs, originalDocs));
it('migrates the documents to the highest version', async () => {
const expectedVersions = getExpectedVersionPerType();
const res = await esClient.search({
index: migratedIndex,
body: {
sort: ['_doc'],
},
size: 10000,
});

it('migrates the documents to the highest version', async () => {
const expectedVersions = getExpectedVersionPerType();
const res = await esClient.search({
index: migratedIndex,
body: {
sort: ['_doc'],
},
size: 10000,
});
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
allDocuments.forEach((doc) => {
assertMigrationVersion(doc, expectedVersions);
});
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
allDocuments.forEach((doc) => {
assertMigrationVersion(doc, expectedVersions);
});
});
});
Loading

0 comments on commit 7040b47

Please sign in to comment.