diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/add_domain/add_domain_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/add_domain/add_domain_logic.test.ts
index 633c8de5e5655..4dd699a733b8c 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/add_domain/add_domain_logic.test.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/add_domain/add_domain_logic.test.ts
@@ -304,6 +304,8 @@ describe('AddDomainLogic', () => {
http.post.mockReturnValueOnce(
Promise.resolve({
domains: [],
+ events: [],
+ most_recent_crawl_request: null,
})
);
@@ -312,6 +314,8 @@ describe('AddDomainLogic', () => {
expect(CrawlerLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith({
domains: [],
+ events: [],
+ mostRecentCrawlRequest: null,
});
});
@@ -328,6 +332,8 @@ describe('AddDomainLogic', () => {
name: 'https://swiftype.co/site-search',
},
],
+ events: [],
+ most_recent_crawl_request: null,
})
);
jest.spyOn(AddDomainLogic.actions, 'onSubmitNewDomainSuccess');
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.test.tsx
index 13a7c641822b9..b36b92bc42847 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.test.tsx
@@ -16,16 +16,17 @@ import { EuiBasicTable, EuiEmptyPrompt } from '@elastic/eui';
import { mountWithIntl } from '../../../../test_helpers';
-import { CrawlerStatus, CrawlRequest } from '../types';
+import { CrawlEvent, CrawlerStatus } from '../types';
import { CrawlRequestsTable } from './crawl_requests_table';
-const values: { crawlRequests: CrawlRequest[] } = {
+const values: { events: CrawlEvent[] } = {
// CrawlerLogic
- crawlRequests: [
+ events: [
{
id: '618d0e66abe97bc688328900',
status: CrawlerStatus.Pending,
+ stage: 'crawl',
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
@@ -69,7 +70,7 @@ describe('CrawlRequestsTable', () => {
it('displays an empty prompt when there are no crawl requests', () => {
setMockValues({
...values,
- crawlRequests: [],
+ events: [],
});
wrapper = shallow();
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.tsx
index 8a2b08878ff78..6d14e35946adf 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_requests_table.tsx
@@ -9,16 +9,21 @@ import React from 'react';
import { useValues } from 'kea';
-import { EuiBasicTable, EuiEmptyPrompt, EuiTableFieldDataColumnType } from '@elastic/eui';
+import {
+ EuiBasicTable,
+ EuiEmptyPrompt,
+ EuiIconTip,
+ EuiTableFieldDataColumnType,
+} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { CrawlerLogic } from '../crawler_logic';
-import { CrawlRequest, readableCrawlerStatuses } from '../types';
+import { CrawlEvent, readableCrawlerStatuses } from '../types';
import { CustomFormattedTimestamp } from './custom_formatted_timestamp';
-const columns: Array> = [
+const columns: Array> = [
{
field: 'id',
name: i18n.translate(
@@ -36,7 +41,7 @@ const columns: Array> = [
defaultMessage: 'Created',
}
),
- render: (createdAt: CrawlRequest['createdAt']) => (
+ render: (createdAt: CrawlEvent['createdAt']) => (
),
},
@@ -48,17 +53,32 @@ const columns: Array> = [
defaultMessage: 'Status',
}
),
- render: (status: CrawlRequest['status']) => readableCrawlerStatuses[status],
+ align: 'right',
+ render: (status: CrawlEvent['status'], event: CrawlEvent) => (
+ <>
+ {event.stage === 'process' && (
+
+ )}
+ {readableCrawlerStatuses[status]}
+ >
+ ),
},
];
export const CrawlRequestsTable: React.FC = () => {
- const { crawlRequests } = useValues(CrawlerLogic);
+ const { events } = useValues(CrawlerLogic);
return (
{
availableDeduplicationFields: ['title', 'description'],
},
],
+ events: [
+ {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ stage: 'crawl',
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ },
+ ],
+ mostRecentCrawlRequest: {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ },
};
beforeEach(() => {
@@ -127,32 +146,16 @@ describe('CrawlerLogic', () => {
it('should set all received data as top-level values', () => {
expect(CrawlerLogic.values.domains).toEqual(crawlerData.domains);
+ expect(CrawlerLogic.values.events).toEqual(crawlerData.events);
+ expect(CrawlerLogic.values.mostRecentCrawlRequest).toEqual(
+ crawlerData.mostRecentCrawlRequest
+ );
});
it('should set dataLoading to false', () => {
expect(CrawlerLogic.values.dataLoading).toEqual(false);
});
});
-
- describe('onReceiveCrawlRequests', () => {
- const crawlRequests: CrawlRequest[] = [
- {
- id: '618d0e66abe97bc688328900',
- status: CrawlerStatus.Pending,
- createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
- beganAt: null,
- completedAt: null,
- },
- ];
-
- beforeEach(() => {
- CrawlerLogic.actions.onReceiveCrawlRequests(crawlRequests);
- });
-
- it('should set the crawl requests', () => {
- expect(CrawlerLogic.values.crawlRequests).toEqual(crawlRequests);
- });
- });
});
describe('listeners', () => {
@@ -170,20 +173,90 @@ describe('CrawlerLogic', () => {
);
});
+ it('creates a new timeout when there is an active process crawl', async () => {
+ jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlerData');
+ http.get.mockReturnValueOnce(
+ Promise.resolve({
+ ...MOCK_SERVER_CRAWLER_DATA,
+ most_recent_crawl_request: null,
+ events: [
+ {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Running,
+ stage: 'process',
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ },
+ ],
+ })
+ );
+
+ CrawlerLogic.actions.fetchCrawlerData();
+ await nextTick();
+
+ expect(CrawlerLogic.actions.createNewTimeoutForCrawlerData).toHaveBeenCalled();
+ });
+
+ describe('on success', () => {
+ [
+ CrawlerStatus.Pending,
+ CrawlerStatus.Starting,
+ CrawlerStatus.Running,
+ CrawlerStatus.Canceling,
+ ].forEach((status) => {
+ it(`creates a new timeout for status ${status}`, async () => {
+ jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlerData');
+ http.get.mockReturnValueOnce(
+ Promise.resolve({
+ ...MOCK_SERVER_CRAWLER_DATA,
+ most_recent_crawl_request: { status },
+ })
+ );
+
+ CrawlerLogic.actions.fetchCrawlerData();
+ await nextTick();
+
+ expect(CrawlerLogic.actions.createNewTimeoutForCrawlerData).toHaveBeenCalled();
+ });
+ });
+
+ [CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
+ it(`clears the timeout and fetches data for status ${status}`, async () => {
+ jest.spyOn(CrawlerLogic.actions, 'clearTimeoutId');
+ jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
+ http.get.mockReturnValueOnce(
+ Promise.resolve({
+ ...MOCK_SERVER_CRAWLER_DATA,
+ most_recent_crawl_request: { status },
+ })
+ );
+
+ CrawlerLogic.actions.fetchCrawlerData();
+ await nextTick();
+
+ expect(CrawlerLogic.actions.clearTimeoutId).toHaveBeenCalled();
+ expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalled();
+ });
+ });
+ });
+
it('calls flashApiErrors when there is an error on the request for crawler data', async () => {
+ jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlerData');
http.get.mockReturnValueOnce(Promise.reject('error'));
CrawlerLogic.actions.fetchCrawlerData();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
+ expect(CrawlerLogic.actions.createNewTimeoutForCrawlerData).toHaveBeenCalled();
});
});
describe('startCrawl', () => {
describe('success path', () => {
- it('creates a new crawl request and then fetches the latest crawl requests', async () => {
- jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
+ it('creates a new crawl request and then fetches the latest crawler data', async () => {
+ jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
http.post.mockReturnValueOnce(Promise.resolve());
CrawlerLogic.actions.startCrawl();
@@ -192,7 +265,7 @@ describe('CrawlerLogic', () => {
expect(http.post).toHaveBeenCalledWith(
'/internal/app_search/engines/some-engine/crawler/crawl_requests'
);
- expect(CrawlerLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
+ expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalled();
});
});
@@ -210,8 +283,8 @@ describe('CrawlerLogic', () => {
describe('stopCrawl', () => {
describe('success path', () => {
- it('stops the crawl starts and then fetches the latest crawl requests', async () => {
- jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
+ it('stops the crawl starts and then fetches the latest crawler data', async () => {
+ jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
http.post.mockReturnValueOnce(Promise.resolve());
CrawlerLogic.actions.stopCrawl();
@@ -220,13 +293,13 @@ describe('CrawlerLogic', () => {
expect(http.post).toHaveBeenCalledWith(
'/internal/app_search/engines/some-engine/crawler/crawl_requests/cancel'
);
- expect(CrawlerLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
+ expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalled();
});
});
describe('on failure', () => {
it('flashes an error message', async () => {
- jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
+ jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
http.post.mockReturnValueOnce(Promise.reject('error'));
CrawlerLogic.actions.stopCrawl();
@@ -237,19 +310,19 @@ describe('CrawlerLogic', () => {
});
});
- describe('createNewTimeoutForCrawlRequests', () => {
+ describe('createNewTimeoutForCrawlerData', () => {
it('saves the timeout ID in the logic', () => {
jest.spyOn(CrawlerLogic.actions, 'onCreateNewTimeout');
- jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
+ jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
- CrawlerLogic.actions.createNewTimeoutForCrawlRequests(2000);
+ CrawlerLogic.actions.createNewTimeoutForCrawlerData(2000);
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 2000);
expect(CrawlerLogic.actions.onCreateNewTimeout).toHaveBeenCalled();
jest.runAllTimers();
- expect(CrawlerLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
+ expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalled();
});
it('clears a timeout if one already exists', () => {
@@ -258,130 +331,32 @@ describe('CrawlerLogic', () => {
timeoutId,
});
- CrawlerLogic.actions.createNewTimeoutForCrawlRequests(2000);
+ CrawlerLogic.actions.createNewTimeoutForCrawlerData(2000);
expect(clearTimeout).toHaveBeenCalledWith(timeoutId);
});
});
-
- describe('getLatestCrawlRequests', () => {
- describe('on success', () => {
- [
- CrawlerStatus.Pending,
- CrawlerStatus.Starting,
- CrawlerStatus.Running,
- CrawlerStatus.Canceling,
- ].forEach((status) => {
- it(`creates a new timeout for status ${status}`, async () => {
- jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlRequests');
- http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
-
- CrawlerLogic.actions.getLatestCrawlRequests();
- await nextTick();
-
- expect(CrawlerLogic.actions.createNewTimeoutForCrawlRequests).toHaveBeenCalled();
- });
- });
-
- [CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
- it(`clears the timeout and fetches data for status ${status}`, async () => {
- jest.spyOn(CrawlerLogic.actions, 'clearTimeoutId');
- jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
- http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
-
- CrawlerLogic.actions.getLatestCrawlRequests();
- await nextTick();
-
- expect(CrawlerLogic.actions.clearTimeoutId).toHaveBeenCalled();
- expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalled();
- });
-
- it(`optionally supresses fetching data for status ${status}`, async () => {
- jest.spyOn(CrawlerLogic.actions, 'clearTimeoutId');
- jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
- http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
-
- CrawlerLogic.actions.getLatestCrawlRequests(false);
- await nextTick();
-
- expect(CrawlerLogic.actions.clearTimeoutId).toHaveBeenCalled();
- expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalledTimes(0);
- });
- });
- });
-
- describe('on failure', () => {
- it('creates a new timeout', async () => {
- jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlRequests');
- http.get.mockReturnValueOnce(Promise.reject());
-
- CrawlerLogic.actions.getLatestCrawlRequests();
- await nextTick();
-
- expect(CrawlerLogic.actions.createNewTimeoutForCrawlRequests).toHaveBeenCalled();
- });
- });
- });
});
describe('selectors', () => {
describe('mostRecentCrawlRequestStatus', () => {
- it('is Success when there are no crawl requests', () => {
+ it('is Success when there is no recent crawl request', () => {
mount({
- crawlRequests: [],
+ mostRecentCrawlRequest: null,
});
expect(CrawlerLogic.values.mostRecentCrawlRequestStatus).toEqual(CrawlerStatus.Success);
});
- it('is Success when there are only crawl requests', () => {
+ it('is the most recent crawl request status', () => {
mount({
- crawlRequests: [
- {
- id: '2',
- status: CrawlerStatus.Skipped,
- createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
- beganAt: null,
- completedAt: null,
- },
- {
- id: '1',
- status: CrawlerStatus.Skipped,
- createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
- beganAt: null,
- completedAt: null,
- },
- ],
- });
-
- expect(CrawlerLogic.values.mostRecentCrawlRequestStatus).toEqual(CrawlerStatus.Success);
- });
-
- it('is the first non-skipped crawl request status', () => {
- mount({
- crawlRequests: [
- {
- id: '3',
- status: CrawlerStatus.Skipped,
- createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
- beganAt: null,
- completedAt: null,
- },
- {
- id: '2',
- status: CrawlerStatus.Failed,
- createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
- beganAt: null,
- completedAt: null,
- },
- {
- id: '1',
- status: CrawlerStatus.Success,
- createdAt: 'Mon, 29 Aug 2020 17:00:00 +0000',
- beganAt: null,
- completedAt: null,
- },
- ],
+ mostRecentCrawlRequest: {
+ id: '2',
+ status: CrawlerStatus.Failed,
+ createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ },
});
expect(CrawlerLogic.values.mostRecentCrawlRequestStatus).toEqual(CrawlerStatus.Failed);
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts
index 972532597e344..5b9960ddf54e0 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_logic.ts
@@ -12,34 +12,33 @@ import { flashAPIErrors } from '../../../shared/flash_messages';
import { HttpLogic } from '../../../shared/http';
import { EngineLogic } from '../engine';
-import {
- CrawlerData,
- CrawlerDomain,
- CrawlRequest,
- CrawlRequestFromServer,
- CrawlerStatus,
-} from './types';
-import { crawlerDataServerToClient, crawlRequestServerToClient } from './utils';
+import { CrawlerData, CrawlerDomain, CrawlEvent, CrawlRequest, CrawlerStatus } from './types';
+import { crawlerDataServerToClient } from './utils';
const POLLING_DURATION = 1000;
const POLLING_DURATION_ON_FAILURE = 5000;
+const ACTIVE_STATUSES = [
+ CrawlerStatus.Pending,
+ CrawlerStatus.Starting,
+ CrawlerStatus.Running,
+ CrawlerStatus.Canceling,
+];
export interface CrawlerValues {
- crawlRequests: CrawlRequest[];
+ events: CrawlEvent[];
dataLoading: boolean;
domains: CrawlerDomain[];
- mostRecentCrawlRequestStatus: CrawlerStatus;
+ mostRecentCrawlRequest: CrawlRequest | null;
+ mostRecentCrawlRequestStatus: CrawlerStatus | null;
timeoutId: NodeJS.Timeout | null;
}
interface CrawlerActions {
clearTimeoutId(): void;
- createNewTimeoutForCrawlRequests(duration: number): { duration: number };
+ createNewTimeoutForCrawlerData(duration: number): { duration: number };
fetchCrawlerData(): void;
- getLatestCrawlRequests(refreshData?: boolean): { refreshData?: boolean };
onCreateNewTimeout(timeoutId: NodeJS.Timeout): { timeoutId: NodeJS.Timeout };
onReceiveCrawlerData(data: CrawlerData): { data: CrawlerData };
- onReceiveCrawlRequests(crawlRequests: CrawlRequest[]): { crawlRequests: CrawlRequest[] };
startCrawl(): void;
stopCrawl(): void;
}
@@ -48,12 +47,10 @@ export const CrawlerLogic = kea>({
path: ['enterprise_search', 'app_search', 'crawler_logic'],
actions: {
clearTimeoutId: true,
- createNewTimeoutForCrawlRequests: (duration) => ({ duration }),
+ createNewTimeoutForCrawlerData: (duration) => ({ duration }),
fetchCrawlerData: true,
- getLatestCrawlRequests: (refreshData) => ({ refreshData }),
onCreateNewTimeout: (timeoutId) => ({ timeoutId }),
onReceiveCrawlerData: (data) => ({ data }),
- onReceiveCrawlRequests: (crawlRequests) => ({ crawlRequests }),
startCrawl: () => null,
stopCrawl: () => null,
},
@@ -70,10 +67,16 @@ export const CrawlerLogic = kea>({
onReceiveCrawlerData: (_, { data: { domains } }) => domains,
},
],
- crawlRequests: [
+ events: [
[],
{
- onReceiveCrawlRequests: (_, { crawlRequests }) => crawlRequests,
+ onReceiveCrawlerData: (_, { data: { events } }) => events,
+ },
+ ],
+ mostRecentCrawlRequest: [
+ null,
+ {
+ onReceiveCrawlerData: (_, { data: { mostRecentCrawlRequest } }) => mostRecentCrawlRequest,
},
],
timeoutId: [
@@ -86,15 +89,12 @@ export const CrawlerLogic = kea>({
},
selectors: ({ selectors }) => ({
mostRecentCrawlRequestStatus: [
- () => [selectors.crawlRequests],
- (crawlRequests: CrawlerValues['crawlRequests']) => {
- const eligibleCrawlRequests = crawlRequests.filter(
- (req) => req.status !== CrawlerStatus.Skipped
- );
- if (eligibleCrawlRequests.length === 0) {
- return CrawlerStatus.Success;
+ () => [selectors.mostRecentCrawlRequest],
+ (crawlRequest: CrawlerValues['mostRecentCrawlRequest']) => {
+ if (crawlRequest) {
+ return crawlRequest.status;
}
- return eligibleCrawlRequests[0].status;
+ return CrawlerStatus.Success;
},
],
}),
@@ -107,10 +107,21 @@ export const CrawlerLogic = kea>({
const response = await http.get(`/internal/app_search/engines/${engineName}/crawler`);
const crawlerData = crawlerDataServerToClient(response);
-
actions.onReceiveCrawlerData(crawlerData);
+
+ const continuePoll =
+ (crawlerData.mostRecentCrawlRequest &&
+ ACTIVE_STATUSES.includes(crawlerData.mostRecentCrawlRequest.status)) ||
+ crawlerData.events.find((event) => ACTIVE_STATUSES.includes(event.status));
+
+ if (continuePoll) {
+ actions.createNewTimeoutForCrawlerData(POLLING_DURATION);
+ } else {
+ actions.clearTimeoutId();
+ }
} catch (e) {
flashAPIErrors(e);
+ actions.createNewTimeoutForCrawlerData(POLLING_DURATION_ON_FAILURE);
}
},
startCrawl: async () => {
@@ -119,7 +130,7 @@ export const CrawlerLogic = kea>({
try {
await http.post(`/internal/app_search/engines/${engineName}/crawler/crawl_requests`);
- actions.getLatestCrawlRequests();
+ actions.fetchCrawlerData();
} catch (e) {
flashAPIErrors(e);
}
@@ -130,55 +141,22 @@ export const CrawlerLogic = kea>({
try {
await http.post(`/internal/app_search/engines/${engineName}/crawler/crawl_requests/cancel`);
- actions.getLatestCrawlRequests();
+ actions.fetchCrawlerData();
} catch (e) {
flashAPIErrors(e);
}
},
- createNewTimeoutForCrawlRequests: ({ duration }) => {
+ createNewTimeoutForCrawlerData: ({ duration }) => {
if (values.timeoutId) {
clearTimeout(values.timeoutId);
}
const timeoutIdId = setTimeout(() => {
- actions.getLatestCrawlRequests();
+ actions.fetchCrawlerData();
}, duration);
actions.onCreateNewTimeout(timeoutIdId);
},
- getLatestCrawlRequests: async ({ refreshData = true }) => {
- const { http } = HttpLogic.values;
- const { engineName } = EngineLogic.values;
-
- try {
- const crawlRequestsFromServer: CrawlRequestFromServer[] = await http.get(
- `/internal/app_search/engines/${engineName}/crawler/crawl_requests`
- );
- const crawlRequests = crawlRequestsFromServer.map(crawlRequestServerToClient);
- actions.onReceiveCrawlRequests(crawlRequests);
- if (
- [
- CrawlerStatus.Pending,
- CrawlerStatus.Starting,
- CrawlerStatus.Running,
- CrawlerStatus.Canceling,
- ].includes(crawlRequests[0]?.status)
- ) {
- actions.createNewTimeoutForCrawlRequests(POLLING_DURATION);
- } else if (
- [CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].includes(
- crawlRequests[0]?.status
- )
- ) {
- actions.clearTimeoutId();
- if (refreshData) {
- actions.fetchCrawlerData();
- }
- }
- } catch (e) {
- actions.createNewTimeoutForCrawlRequests(POLLING_DURATION_ON_FAILURE);
- }
- },
}),
events: ({ values }) => ({
beforeUnmount: () => {
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx
index 705dfc44baa88..67f8826dace8a 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.test.tsx
@@ -28,7 +28,7 @@ import {
CrawlerPolicies,
CrawlerRules,
CrawlerStatus,
- CrawlRequestFromServer,
+ CrawlEventFromServer,
} from './types';
const domains: CrawlerDomainFromServer[] = [
@@ -65,9 +65,10 @@ const domains: CrawlerDomainFromServer[] = [
},
];
-const crawlRequests: CrawlRequestFromServer[] = [
+const events: CrawlEventFromServer[] = [
{
id: 'a',
+ stage: 'crawl',
status: CrawlerStatus.Canceled,
created_at: 'Mon, 31 Aug 2020 11:00:00 +0000',
began_at: 'Mon, 31 Aug 2020 12:00:00 +0000',
@@ -75,6 +76,7 @@ const crawlRequests: CrawlRequestFromServer[] = [
},
{
id: 'b',
+ stage: 'crawl',
status: CrawlerStatus.Success,
created_at: 'Mon, 31 Aug 2020 14:00:00 +0000',
began_at: 'Mon, 31 Aug 2020 15:00:00 +0000',
@@ -86,7 +88,8 @@ describe('CrawlerOverview', () => {
const mockValues = {
dataLoading: false,
domains,
- crawlRequests,
+ events,
+ mostRecentCrawlRequest: null,
};
beforeEach(() => {
@@ -118,7 +121,7 @@ describe('CrawlerOverview', () => {
});
it('hides the domain and crawl request tables when there are no domains, and no crawl requests', () => {
- setMockValues({ ...mockValues, domains: [], crawlRequests: [] });
+ setMockValues({ ...mockValues, domains: [], events: [] });
const wrapper = shallow();
@@ -130,7 +133,7 @@ describe('CrawlerOverview', () => {
});
it('shows the domain and the crawl request tables when there are domains, but no crawl requests', () => {
- setMockValues({ ...mockValues, crawlRequests: [] });
+ setMockValues({ ...mockValues, events: [] });
const wrapper = shallow();
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx
index be4e1743748b7..b6fa50e06c904 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview.tsx
@@ -29,7 +29,7 @@ import { CRAWLER_TITLE } from './constants';
import { CrawlerLogic } from './crawler_logic';
export const CrawlerOverview: React.FC = () => {
- const { crawlRequests, dataLoading, domains } = useValues(CrawlerLogic);
+ const { events, dataLoading, domains } = useValues(CrawlerLogic);
return (
{
>
)}
- {(crawlRequests.length > 0 || domains.length > 0) && (
+ {(events.length > 0 || domains.length > 0) && (
<>
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts
index 9a5d99abdd469..a701c43d4775c 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_overview_logic.test.ts
@@ -43,6 +43,8 @@ const MOCK_SERVER_CRAWLER_DATA: CrawlerDataFromServer = {
available_deduplication_fields: ['title', 'description'],
},
],
+ events: [],
+ most_recent_crawl_request: null,
};
const MOCK_CLIENT_CRAWLER_DATA = crawlerDataServerToClient(MOCK_SERVER_CRAWLER_DATA);
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx
index 8c49e97d6462b..da2b3cf2261b7 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx
@@ -19,7 +19,6 @@ import { CrawlerSingleDomain } from './crawler_single_domain';
describe('CrawlerRouter', () => {
const mockActions = {
fetchCrawlerData: jest.fn(),
- getLatestCrawlRequests: jest.fn(),
};
let wrapper: ShallowWrapper;
@@ -32,7 +31,6 @@ describe('CrawlerRouter', () => {
it('calls fetchCrawlerData and starts polling on page load', () => {
expect(mockActions.fetchCrawlerData).toHaveBeenCalledTimes(1);
- expect(mockActions.getLatestCrawlRequests).toHaveBeenCalledWith(false);
});
it('renders a crawler views', () => {
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx
index f95423cd2c704..2cebb28d962f0 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx
@@ -18,11 +18,10 @@ import { CrawlerOverview } from './crawler_overview';
import { CrawlerSingleDomain } from './crawler_single_domain';
export const CrawlerRouter: React.FC = () => {
- const { fetchCrawlerData, getLatestCrawlRequests } = useActions(CrawlerLogic);
+ const { fetchCrawlerData } = useActions(CrawlerLogic);
useEffect(() => {
fetchCrawlerData();
- getLatestCrawlRequests(false);
}, []);
return (
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx
index 76612ee913c48..beb1e65af47a4 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx
@@ -36,7 +36,6 @@ const MOCK_VALUES = {
const MOCK_ACTIONS = {
fetchCrawlerData: jest.fn(),
fetchDomainData: jest.fn(),
- getLatestCrawlRequests: jest.fn(),
};
describe('CrawlerSingleDomain', () => {
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/types.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/types.ts
index 8cfbce6c10315..a4d5a984faaec 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/types.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/types.ts
@@ -120,10 +120,14 @@ export interface CrawlerDomainFromServer {
export interface CrawlerData {
domains: CrawlerDomain[];
+ events: CrawlEvent[];
+ mostRecentCrawlRequest: CrawlRequest | null;
}
export interface CrawlerDataFromServer {
domains: CrawlerDomainFromServer[];
+ events: CrawlEventFromServer[];
+ most_recent_crawl_request: CrawlRequestFromServer | null;
}
export interface CrawlerDomainValidationResultFromServer {
@@ -191,6 +195,26 @@ export interface CrawlRequest {
completedAt: string | null;
}
+export type CrawlEventStage = 'crawl' | 'process';
+
+export interface CrawlEventFromServer {
+ id: string;
+ stage: CrawlEventStage;
+ status: CrawlerStatus;
+ created_at: string;
+ began_at: string | null;
+ completed_at: string | null;
+}
+
+export interface CrawlEvent {
+ id: string;
+ stage: CrawlEventStage;
+ status: CrawlerStatus;
+ createdAt: string;
+ beganAt: string | null;
+ completedAt: string | null;
+}
+
export const readableCrawlerStatuses: { [key in CrawlerStatus]: string } = {
[CrawlerStatus.Pending]: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusOptions.pending',
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.test.ts
index b679a7cc9c12c..fc810ba8fd7cb 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.test.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.test.ts
@@ -153,10 +153,27 @@ describe('crawlerDataServerToClient', () => {
beforeAll(() => {
output = crawlerDataServerToClient({
domains,
+ events: [
+ {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ stage: 'crawl',
+ created_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ began_at: null,
+ completed_at: null,
+ },
+ ],
+ most_recent_crawl_request: {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ created_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ began_at: null,
+ completed_at: null,
+ },
});
});
- it('converts all domains from the server form to their client form', () => {
+ it('converts all data from the server form to their client form', () => {
expect(output.domains).toEqual([
{
id: 'x',
@@ -185,6 +202,23 @@ describe('crawlerDataServerToClient', () => {
availableDeduplicationFields: ['title', 'description'],
},
]);
+ expect(output.events).toEqual([
+ {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ stage: 'crawl',
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ },
+ ]);
+ expect(output.mostRecentCrawlRequest).toEqual({
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ });
});
});
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.ts
index e44e6c0e652fa..9c94040355d47 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/utils.ts
@@ -16,6 +16,8 @@ import {
CrawlerDomainValidationStep,
CrawlRequestFromServer,
CrawlRequest,
+ CrawlEventFromServer,
+ CrawlEvent,
} from './types';
export function crawlerDomainServerToClient(payload: CrawlerDomainFromServer): CrawlerDomain {
@@ -76,11 +78,34 @@ export function crawlRequestServerToClient(crawlRequest: CrawlRequestFromServer)
};
}
+export function crawlerEventServerToClient(event: CrawlEventFromServer): CrawlEvent {
+ const {
+ id,
+ stage,
+ status,
+ created_at: createdAt,
+ began_at: beganAt,
+ completed_at: completedAt,
+ } = event;
+
+ return {
+ id,
+ stage,
+ status,
+ createdAt,
+ beganAt,
+ completedAt,
+ };
+}
+
export function crawlerDataServerToClient(payload: CrawlerDataFromServer): CrawlerData {
- const { domains } = payload;
+ const { domains, events, most_recent_crawl_request: mostRecentCrawlRequest } = payload;
return {
domains: domains.map((domain) => crawlerDomainServerToClient(domain)),
+ events: events.map((event) => crawlerEventServerToClient(event)),
+ mostRecentCrawlRequest:
+ mostRecentCrawlRequest && crawlRequestServerToClient(mostRecentCrawlRequest),
};
}