Skip to content

Commit

Permalink
[Enterprise Search] Display most recent crawl request status in Indic…
Browse files Browse the repository at this point in the history
…es and Crawl Request tables (#137128)
  • Loading branch information
Byron Hulcher authored Jul 26, 2022
1 parent d96fbc4 commit bb0365e
Show file tree
Hide file tree
Showing 12 changed files with 249 additions and 66 deletions.
20 changes: 20 additions & 0 deletions x-pack/plugins/enterprise_search/common/types/crawler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,27 @@
* 2.0.
*/

// See SharedTogo::Crawler::Status for details on how these are generated
export enum CrawlerStatus {
Pending = 'pending',
Suspended = 'suspended',
Starting = 'starting',
Running = 'running',
Suspending = 'suspending',
Canceling = 'canceling',
Success = 'success',
Failed = 'failed',
Canceled = 'canceled',
Skipped = 'skipped',
}

export interface CrawlRequest {
id: string;
configuration_oid: string;
status: CrawlerStatus;
}
export interface Crawler {
id: string;
index_name: string;
most_recent_crawl_request_status?: CrawlerStatus;
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
* 2.0.
*/
import { Meta } from '../../../../../common/types';
import { CrawlerStatus } from '../../../../../common/types/crawler';

// TODO remove this proxy export, which will affect a lot of files
export { CrawlerStatus };

export enum CrawlerPolicies {
allow = 'allow',
Expand Down Expand Up @@ -51,19 +55,6 @@ export type CrawlerDomainValidationStepName =
| 'networkConnectivity'
| 'indexingRestrictions'
| 'contentVerification';
// See SharedTogo::Crawler::Status for details on how these are generated
export enum CrawlerStatus {
Pending = 'pending',
Suspended = 'suspended',
Starting = 'starting',
Running = 'running',
Suspending = 'suspending',
Canceling = 'canceling',
Success = 'success',
Failed = 'failed',
Canceled = 'canceled',
Skipped = 'skipped',
}

export type CrawlEventStage = 'crawl' | 'process';

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,16 @@ export const readableCrawlerStatuses: { [key in CrawlerStatus]: string } = {
{ defaultMessage: 'Skipped' }
),
};

export const crawlStatusColors: { [key in CrawlerStatus]: 'default' | 'danger' | 'success' } = {
[CrawlerStatus.Pending]: 'default',
[CrawlerStatus.Suspended]: 'default',
[CrawlerStatus.Starting]: 'default',
[CrawlerStatus.Running]: 'default',
[CrawlerStatus.Suspending]: 'default',
[CrawlerStatus.Canceling]: 'default',
[CrawlerStatus.Success]: 'success',
[CrawlerStatus.Failed]: 'danger',
[CrawlerStatus.Canceled]: 'default',
[CrawlerStatus.Skipped]: 'default',
};
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import { CrawlEvent } from '../../../../api/crawler/types';
import { CrawlDetailLogic } from '../crawl_details_flyout/crawl_detail_logic';
import { CrawlerLogic } from '../crawler_logic';

import { readableCrawlerStatuses } from './constants';
import { crawlStatusColors, readableCrawlerStatuses } from './constants';
import { CrawlEventTypeBadge } from './crawl_event_type_badge';

export const CrawlRequestsTable: React.FC = () => {
Expand Down Expand Up @@ -84,7 +84,9 @@ export const CrawlRequestsTable: React.FC = () => {
name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status', {
defaultMessage: 'Status',
}),
render: (status: CrawlEvent['status']) => readableCrawlerStatuses[status],
render: (status: CrawlEvent['status']) => (
<EuiBadge color={crawlStatusColors[status]}>{readableCrawlerStatuses[status]}</EuiBadge>
),
},
];

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@ import { Meta } from '../../../../../common/types';
import { EuiLinkTo, EuiButtonIconTo } from '../../../shared/react_router_helpers';
import { convertMetaToPagination } from '../../../shared/table_pagination';
import { SEARCH_INDEX_PATH } from '../../routes';
import { ElasticsearchViewIndex, IngestionMethod, IngestionStatus } from '../../types';
import { ingestionMethodToText } from '../../utils/indices';
import { ElasticsearchViewIndex, IngestionMethod } from '../../types';
import { crawlerStatusToColor, crawlerStatusToText } from '../../utils/crawler_status_helpers';
import { ingestionMethodToText, isCrawlerIndex } from '../../utils/indices';
import {
ingestionStatusToColor,
ingestionStatusToText,
Expand Down Expand Up @@ -119,18 +120,22 @@ const columns: Array<EuiBasicTableColumn<ElasticsearchViewIndex>> = [
truncateText: true,
},
{
field: 'ingestionStatus',
name: i18n.translate(
'xpack.enterpriseSearch.content.searchIndices.ingestionStatus.columnTitle',
{
defaultMessage: 'Ingestion status',
}
),
render: (ingestionStatus: IngestionStatus) => (
<EuiBadge color={ingestionStatusToColor(ingestionStatus)}>
{ingestionStatusToText(ingestionStatus)}
</EuiBadge>
),
render: (index: ElasticsearchViewIndex) =>
isCrawlerIndex(index) ? (
<EuiBadge color={crawlerStatusToColor(index.crawler?.most_recent_crawl_request_status)}>
{crawlerStatusToText(index.crawler?.most_recent_crawl_request_status)}
</EuiBadge>
) : (
<EuiBadge color={ingestionStatusToColor(index.ingestionStatus)}>
{ingestionStatusToText(index.ingestionStatus)}
</EuiBadge>
),

truncateText: true,
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { i18n } from '@kbn/i18n';

import { CrawlerStatus } from '../api/crawler/types';
import {
crawlStatusColors,
readableCrawlerStatuses,
} from '../components/search_index/crawler/crawl_requests_panel/constants';

export function crawlerStatusToText(crawlerStatus?: CrawlerStatus): string {
return crawlerStatus
? readableCrawlerStatuses[crawlerStatus]
: i18n.translate('xpack.enterpriseSearch.content.searchIndices.ingestionStatus.idle.label', {
defaultMessage: 'Idle',
});
}

export function crawlerStatusToColor(
crawlerStatus?: CrawlerStatus
): 'default' | 'danger' | 'success' {
return crawlerStatus ? crawlStatusColors[crawlerStatus] : 'default';
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ import { IScopedClusterClient } from '@kbn/core/server';

import { CONNECTORS_INDEX } from '../..';
import { Connector, ConnectorDocument } from '../../../common/types/connectors';
import { isNotNullish } from '../../../common/utils/is_not_nullish';
import { setupConnectorsIndices } from '../../index_management/setup_indices';

import { isIndexNotFoundException } from '../../utils/identify_exceptions';
import { fetchAll } from '../fetch_all';

export const fetchConnectorById = async (
client: IScopedClusterClient,
Expand Down Expand Up @@ -63,31 +63,12 @@ export const fetchConnectors = async (
client: IScopedClusterClient,
indexNames?: string[]
): Promise<Connector[]> => {
const query: QueryDslQueryContainer = indexNames
? { terms: { index_name: indexNames } }
: { match_all: {} };

try {
const connectorResult = await client.asCurrentUser.search<ConnectorDocument>({
from: 0,
index: CONNECTORS_INDEX,
query: { match_all: {} },
size: 1000,
});
let connectors = connectorResult.hits.hits;
let length = connectors.length;
const query: QueryDslQueryContainer = indexNames
? { terms: { index_name: indexNames } }
: { match_all: {} };
while (length >= 1000) {
const newConnectorResult = await client.asCurrentUser.search<ConnectorDocument>({
from: 0,
index: CONNECTORS_INDEX,
query,
size: 1000,
});
connectors = connectors.concat(newConnectorResult.hits.hits);
length = newConnectorResult.hits.hits.length;
}
return connectors
.map(({ _source, _id }) => (_source ? { ..._source, id: _id } : undefined))
.filter(isNotNullish);
return await fetchAll<Connector>(client, CONNECTORS_INDEX, query);
} catch (error) {
if (isIndexNotFoundException(error)) {
await setupConnectorsIndices(client.asCurrentUser);
Expand Down
102 changes: 102 additions & 0 deletions x-pack/plugins/enterprise_search/server/lib/crawler/fetch_crawlers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
import { IScopedClusterClient } from '@kbn/core/server';

import { Crawler, CrawlRequest } from '../../../common/types/crawler';
import { fetchAll } from '../fetch_all';

const CRAWLER_CONFIGURATIONS_INDEX = '.ent-search-actastic-crawler2_configurations';
const CRAWLER_CRAWL_REQUESTS_INDEX = '.ent-search-actastic-crawler2_crawl_requests';

export const fetchMostRecentCrawlerRequestByConfigurationId = async (
client: IScopedClusterClient,
configurationId: string
): Promise<CrawlRequest | undefined> => {
try {
const crawlRequestResult = await client.asCurrentUser.search<CrawlRequest>({
index: CRAWLER_CRAWL_REQUESTS_INDEX,
query: { term: { configuration_oid: configurationId } },
sort: 'created_at:desc',
});
const result = crawlRequestResult.hits.hits[0]?._source;

return result;
} catch (error) {
return undefined;
}
};

export const fetchCrawlerByIndexName = async (
client: IScopedClusterClient,
indexName: string
): Promise<Crawler | undefined> => {
let crawler: Crawler | undefined;
try {
const crawlerResult = await client.asCurrentUser.search<Crawler>({
index: CRAWLER_CONFIGURATIONS_INDEX,
query: { term: { index_name: indexName } },
});
crawler = crawlerResult.hits.hits[0]?._source;
} catch (error) {
return undefined;
}

if (crawler) {
try {
const mostRecentCrawlRequest = await fetchMostRecentCrawlerRequestByConfigurationId(
client,
crawler.id
);

return {
...crawler,
most_recent_crawl_request_status: mostRecentCrawlRequest?.status,
};
} catch (error) {
return crawler;
}
}

return undefined;
};

export const fetchCrawlers = async (
client: IScopedClusterClient,
indexNames?: string[]
): Promise<Crawler[]> => {
const query: QueryDslQueryContainer = indexNames
? { terms: { index_name: indexNames } }
: { match_all: {} };
let crawlers: Crawler[];
try {
crawlers = await fetchAll<Crawler>(client, CRAWLER_CONFIGURATIONS_INDEX, query);
} catch (error) {
return [];
}

try {
// TODO replace this with an aggregation query
const crawlersWithStatuses = await Promise.all(
crawlers.map(async (crawler): Promise<Crawler> => {
const mostRecentCrawlRequest = await fetchMostRecentCrawlerRequestByConfigurationId(
client,
crawler.id
);

return {
...crawler,
most_recent_crawl_request_status: mostRecentCrawlRequest?.status,
};
})
);
return crawlersWithStatuses;
} catch (error) {
return crawlers;
}
};
37 changes: 37 additions & 0 deletions x-pack/plugins/enterprise_search/server/lib/fetch_all.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { isNotNullish } from '@opentelemetry/sdk-metrics-base/build/src/utils';

import { QueryDslQueryContainer, SearchHit } from '@elastic/elasticsearch/lib/api/types';
import { IScopedClusterClient } from '@kbn/core-elasticsearch-server';

// TODO add safety to prevent an OOM error if the query results are too enough

export const fetchAll = async <T>(
client: IScopedClusterClient,
index: string,
query: QueryDslQueryContainer
): Promise<T[]> => {
let hits: Array<SearchHit<T>> = [];
let accumulator: Array<SearchHit<T>> = [];

do {
const connectorResult = await client.asCurrentUser.search<T>({
from: accumulator.length,
index,
query,
size: 1000,
});
hits = connectorResult.hits.hits;
accumulator = accumulator.concat(hits);
} while (hits.length >= 1000);

return accumulator
.map(({ _source, _id }) => (_source ? { ..._source, id: _id } : undefined))
.filter(isNotNullish);
};
Loading

0 comments on commit bb0365e

Please sign in to comment.