Split CrawlerLogic from CrawlerOverviewLogic (#110850)

This commit is contained in:
Byron Hulcher 2021-09-02 08:56:20 -04:00 committed by GitHub
parent 814cf7a4fc
commit c7ee7d7898
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 676 additions and 620 deletions

View file

@ -13,8 +13,8 @@ import {
} from '../../../../../__mocks__/kea_logic';
import '../../../../__mocks__/engine_logic.mock';
jest.mock('../../crawler_overview_logic', () => ({
CrawlerOverviewLogic: {
jest.mock('../../crawler_logic', () => ({
CrawlerLogic: {
actions: {
onReceiveCrawlerData: jest.fn(),
},
@ -28,7 +28,7 @@ jest.mock('./utils', () => ({
import { nextTick } from '@kbn/test/jest';
import { CrawlerOverviewLogic } from '../../crawler_overview_logic';
import { CrawlerLogic } from '../../crawler_logic';
import { CrawlerDomain } from '../../types';
import { AddDomainLogic, AddDomainLogicValues } from './add_domain_logic';
@ -310,7 +310,7 @@ describe('AddDomainLogic', () => {
AddDomainLogic.actions.submitNewDomain();
await nextTick();
expect(CrawlerOverviewLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith({
expect(CrawlerLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith({
domains: [],
});
});

View file

@ -17,7 +17,7 @@ import { KibanaLogic } from '../../../../../shared/kibana';
import { ENGINE_CRAWLER_DOMAIN_PATH } from '../../../../routes';
import { EngineLogic, generateEnginePath } from '../../../engine';
import { CrawlerOverviewLogic } from '../../crawler_overview_logic';
import { CrawlerLogic } from '../../crawler_logic';
import {
CrawlerDataFromServer,
CrawlerDomain,
@ -262,7 +262,7 @@ export const AddDomainLogic = kea<MakeLogicType<AddDomainLogicValues, AddDomainL
});
const crawlerData = crawlerDataServerToClient(response as CrawlerDataFromServer);
CrawlerOverviewLogic.actions.onReceiveCrawlerData(crawlerData);
CrawlerLogic.actions.onReceiveCrawlerData(crawlerData);
const newDomain = crawlerData.domains[crawlerData.domains.length - 1];
if (newDomain) {
actions.onSubmitNewDomainSuccess(newDomain);

View file

@ -16,38 +16,12 @@ import { EuiBasicTable, EuiEmptyPrompt } from '@elastic/eui';
import { mountWithIntl } from '../../../../test_helpers';
import {
CrawlerDomain,
CrawlerPolicies,
CrawlerRules,
CrawlerStatus,
CrawlRequest,
} from '../types';
import { CrawlerStatus, CrawlRequest } from '../types';
import { CrawlRequestsTable } from './crawl_requests_table';
const values: { domains: CrawlerDomain[]; crawlRequests: CrawlRequest[] } = {
// CrawlerOverviewLogic
domains: [
{
id: '507f1f77bcf86cd799439011',
createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000',
url: 'elastic.co',
documentCount: 13,
sitemaps: [],
entryPoints: [],
crawlRules: [],
defaultCrawlRule: {
id: '-',
policy: CrawlerPolicies.allow,
rule: CrawlerRules.regex,
pattern: '.*',
},
deduplicationEnabled: false,
deduplicationFields: ['title'],
availableDeduplicationFields: ['title', 'description'],
},
],
const values: { crawlRequests: CrawlRequest[] } = {
// CrawlerLogic
crawlRequests: [
{
id: '618d0e66abe97bc688328900',

View file

@ -13,7 +13,7 @@ import { EuiBasicTable, EuiEmptyPrompt, EuiTableFieldDataColumnType } from '@ela
import { i18n } from '@kbn/i18n';
import { CrawlerOverviewLogic } from '../crawler_overview_logic';
import { CrawlerLogic } from '../crawler_logic';
import { CrawlRequest, readableCrawlerStatuses } from '../types';
import { CustomFormattedTimestamp } from './custom_formatted_timestamp';
@ -53,7 +53,7 @@ const columns: Array<EuiTableFieldDataColumnType<CrawlRequest>> = [
];
export const CrawlRequestsTable: React.FC = () => {
const { crawlRequests } = useValues(CrawlerOverviewLogic);
const { crawlRequests } = useValues(CrawlerLogic);
return (
<EuiBasicTable

View file

@ -13,11 +13,11 @@ import { EuiCallOut } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { CrawlerOverviewLogic } from '../crawler_overview_logic';
import { CrawlerLogic } from '../crawler_logic';
import { CrawlerStatus } from '../types';
export const CrawlerStatusBanner: React.FC = () => {
const { mostRecentCrawlRequestStatus } = useValues(CrawlerOverviewLogic);
const { mostRecentCrawlRequestStatus } = useValues(CrawlerLogic);
if (
mostRecentCrawlRequestStatus === CrawlerStatus.Running ||
mostRecentCrawlRequestStatus === CrawlerStatus.Starting ||

View file

@ -13,14 +13,14 @@ import { EuiButton } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { CrawlerOverviewLogic } from '../../crawler_overview_logic';
import { CrawlerLogic } from '../../crawler_logic';
import { CrawlerStatus } from '../../types';
import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
export const CrawlerStatusIndicator: React.FC = () => {
const { domains, mostRecentCrawlRequestStatus } = useValues(CrawlerOverviewLogic);
const { startCrawl, stopCrawl } = useActions(CrawlerOverviewLogic);
const { domains, mostRecentCrawlRequestStatus } = useValues(CrawlerLogic);
const { startCrawl, stopCrawl } = useActions(CrawlerLogic);
const disabledButton = (
<EuiButton disabled>

View file

@ -20,6 +20,7 @@ import { KibanaLogic } from '../../../../shared/kibana';
import { AppLogic } from '../../../app_logic';
import { ENGINE_CRAWLER_DOMAIN_PATH } from '../../../routes';
import { generateEnginePath } from '../../engine';
import { CrawlerLogic } from '../crawler_logic';
import { CrawlerOverviewLogic } from '../crawler_overview_logic';
import { CrawlerDomain } from '../types';
@ -28,7 +29,7 @@ import { getDeleteDomainConfirmationMessage } from '../utils';
import { CustomFormattedTimestamp } from './custom_formatted_timestamp';
export const DomainsTable: React.FC = () => {
const { domains } = useValues(CrawlerOverviewLogic);
const { domains } = useValues(CrawlerLogic);
const { deleteDomain } = useActions(CrawlerOverviewLogic);

View file

@ -0,0 +1,411 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
LogicMounter,
mockHttpValues,
mockFlashMessageHelpers,
} from '../../../__mocks__/kea_logic';
import '../../__mocks__/engine_logic.mock';
import { nextTick } from '@kbn/test/jest';
import { CrawlerLogic, CrawlerValues } from './crawler_logic';
import {
CrawlerData,
CrawlerDataFromServer,
CrawlerPolicies,
CrawlerRules,
CrawlerStatus,
CrawlRequest,
CrawlRule,
} from './types';
import { crawlerDataServerToClient } from './utils';
const DEFAULT_VALUES: CrawlerValues = {
crawlRequests: [],
dataLoading: true,
domains: [],
mostRecentCrawlRequestStatus: CrawlerStatus.Success,
timeoutId: null,
};
const DEFAULT_CRAWL_RULE: CrawlRule = {
id: '-',
policy: CrawlerPolicies.allow,
rule: CrawlerRules.regex,
pattern: '.*',
};
const MOCK_SERVER_CRAWLER_DATA: CrawlerDataFromServer = {
domains: [
{
id: '507f1f77bcf86cd799439011',
name: 'elastic.co',
created_on: 'Mon, 31 Aug 2020 17:00:00 +0000',
document_count: 13,
sitemaps: [],
entry_points: [],
crawl_rules: [],
deduplication_enabled: false,
deduplication_fields: ['title'],
available_deduplication_fields: ['title', 'description'],
},
],
};
const MOCK_CLIENT_CRAWLER_DATA = crawlerDataServerToClient(MOCK_SERVER_CRAWLER_DATA);
describe('CrawlerLogic', () => {
const { mount, unmount } = new LogicMounter(CrawlerLogic);
const { http } = mockHttpValues;
const { flashAPIErrors } = mockFlashMessageHelpers;
beforeEach(() => {
jest.clearAllMocks();
jest.useFakeTimers(); // this should be run before every test to reset these mocks
mount();
});
afterAll(() => {
jest.useRealTimers();
});
it('has expected default values', () => {
expect(CrawlerLogic.values).toEqual(DEFAULT_VALUES);
});
describe('actions', () => {
describe('clearTimeoutId', () => {
it('clears the timeout in the logic', () => {
mount({
timeoutId: setTimeout(() => {}, 1),
});
CrawlerLogic.actions.clearTimeoutId();
expect(CrawlerLogic.values.timeoutId).toEqual(null);
});
});
describe('onCreateNewTimeout', () => {
it('sets the timeout in the logic', () => {
const timeout = setTimeout(() => {}, 1);
CrawlerLogic.actions.onCreateNewTimeout(timeout);
expect(CrawlerLogic.values.timeoutId).toEqual(timeout);
});
});
describe('onReceiveCrawlerData', () => {
const crawlerData: CrawlerData = {
domains: [
{
id: '507f1f77bcf86cd799439011',
createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000',
url: 'elastic.co',
documentCount: 13,
sitemaps: [],
entryPoints: [],
crawlRules: [],
defaultCrawlRule: DEFAULT_CRAWL_RULE,
deduplicationEnabled: false,
deduplicationFields: ['title'],
availableDeduplicationFields: ['title', 'description'],
},
],
};
beforeEach(() => {
CrawlerLogic.actions.onReceiveCrawlerData(crawlerData);
});
it('should set all received data as top-level values', () => {
expect(CrawlerLogic.values.domains).toEqual(crawlerData.domains);
});
it('should set dataLoading to false', () => {
expect(CrawlerLogic.values.dataLoading).toEqual(false);
});
});
describe('onReceiveCrawlRequests', () => {
const crawlRequests: CrawlRequest[] = [
{
id: '618d0e66abe97bc688328900',
status: CrawlerStatus.Pending,
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
];
beforeEach(() => {
CrawlerLogic.actions.onReceiveCrawlRequests(crawlRequests);
});
it('should set the crawl requests', () => {
expect(CrawlerLogic.values.crawlRequests).toEqual(crawlRequests);
});
});
});
describe('listeners', () => {
describe('fetchCrawlerData', () => {
it('updates logic with data that has been converted from server to client', async () => {
jest.spyOn(CrawlerLogic.actions, 'onReceiveCrawlerData');
http.get.mockReturnValueOnce(Promise.resolve(MOCK_SERVER_CRAWLER_DATA));
CrawlerLogic.actions.fetchCrawlerData();
await nextTick();
expect(http.get).toHaveBeenCalledWith('/api/app_search/engines/some-engine/crawler');
expect(CrawlerLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith(
MOCK_CLIENT_CRAWLER_DATA
);
});
it('calls flashApiErrors when there is an error on the request for crawler data', async () => {
http.get.mockReturnValueOnce(Promise.reject('error'));
CrawlerLogic.actions.fetchCrawlerData();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
describe('startCrawl', () => {
describe('success path', () => {
it('creates a new crawl request and then fetches the latest crawl requests', async () => {
jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
http.post.mockReturnValueOnce(Promise.resolve());
CrawlerLogic.actions.startCrawl();
await nextTick();
expect(http.post).toHaveBeenCalledWith(
'/api/app_search/engines/some-engine/crawler/crawl_requests'
);
expect(CrawlerLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
});
});
describe('on failure', () => {
it('flashes an error message', async () => {
http.post.mockReturnValueOnce(Promise.reject('error'));
CrawlerLogic.actions.startCrawl();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
});
describe('stopCrawl', () => {
describe('success path', () => {
it('stops the crawl starts and then fetches the latest crawl requests', async () => {
jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
http.post.mockReturnValueOnce(Promise.resolve());
CrawlerLogic.actions.stopCrawl();
await nextTick();
expect(http.post).toHaveBeenCalledWith(
'/api/app_search/engines/some-engine/crawler/crawl_requests/cancel'
);
expect(CrawlerLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
});
});
describe('on failure', () => {
it('flashes an error message', async () => {
jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
http.post.mockReturnValueOnce(Promise.reject('error'));
CrawlerLogic.actions.stopCrawl();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
});
describe('createNewTimeoutForCrawlRequests', () => {
it('saves the timeout ID in the logic', () => {
jest.spyOn(CrawlerLogic.actions, 'onCreateNewTimeout');
jest.spyOn(CrawlerLogic.actions, 'getLatestCrawlRequests');
CrawlerLogic.actions.createNewTimeoutForCrawlRequests(2000);
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 2000);
expect(CrawlerLogic.actions.onCreateNewTimeout).toHaveBeenCalled();
jest.runAllTimers();
expect(CrawlerLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
});
it('clears a timeout if one already exists', () => {
const timeoutId = setTimeout(() => {}, 1);
mount({
timeoutId,
});
CrawlerLogic.actions.createNewTimeoutForCrawlRequests(2000);
expect(clearTimeout).toHaveBeenCalledWith(timeoutId);
});
});
describe('getLatestCrawlRequests', () => {
describe('on success', () => {
[
CrawlerStatus.Pending,
CrawlerStatus.Starting,
CrawlerStatus.Running,
CrawlerStatus.Canceling,
].forEach((status) => {
it(`creates a new timeout for status ${status}`, async () => {
jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlRequests');
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
CrawlerLogic.actions.getLatestCrawlRequests();
await nextTick();
expect(CrawlerLogic.actions.createNewTimeoutForCrawlRequests).toHaveBeenCalled();
});
});
[CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
it(`clears the timeout and fetches data for status ${status}`, async () => {
jest.spyOn(CrawlerLogic.actions, 'clearTimeoutId');
jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
CrawlerLogic.actions.getLatestCrawlRequests();
await nextTick();
expect(CrawlerLogic.actions.clearTimeoutId).toHaveBeenCalled();
expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalled();
});
it(`optionally supresses fetching data for status ${status}`, async () => {
jest.spyOn(CrawlerLogic.actions, 'clearTimeoutId');
jest.spyOn(CrawlerLogic.actions, 'fetchCrawlerData');
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
CrawlerLogic.actions.getLatestCrawlRequests(false);
await nextTick();
expect(CrawlerLogic.actions.clearTimeoutId).toHaveBeenCalled();
expect(CrawlerLogic.actions.fetchCrawlerData).toHaveBeenCalledTimes(0);
});
});
});
describe('on failure', () => {
it('creates a new timeout', async () => {
jest.spyOn(CrawlerLogic.actions, 'createNewTimeoutForCrawlRequests');
http.get.mockReturnValueOnce(Promise.reject());
CrawlerLogic.actions.getLatestCrawlRequests();
await nextTick();
expect(CrawlerLogic.actions.createNewTimeoutForCrawlRequests).toHaveBeenCalled();
});
});
});
});
describe('selectors', () => {
describe('mostRecentCrawlRequestStatus', () => {
it('is Success when there are no crawl requests', () => {
mount({
crawlRequests: [],
});
expect(CrawlerLogic.values.mostRecentCrawlRequestStatus).toEqual(CrawlerStatus.Success);
});
it('is Success when there are only crawl requests', () => {
mount({
crawlRequests: [
{
id: '2',
status: CrawlerStatus.Skipped,
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
{
id: '1',
status: CrawlerStatus.Skipped,
createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
],
});
expect(CrawlerLogic.values.mostRecentCrawlRequestStatus).toEqual(CrawlerStatus.Success);
});
it('is the first non-skipped crawl request status', () => {
mount({
crawlRequests: [
{
id: '3',
status: CrawlerStatus.Skipped,
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
{
id: '2',
status: CrawlerStatus.Failed,
createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
{
id: '1',
status: CrawlerStatus.Success,
createdAt: 'Mon, 29 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
],
});
expect(CrawlerLogic.values.mostRecentCrawlRequestStatus).toEqual(CrawlerStatus.Failed);
});
});
});
describe('events', () => {
describe('beforeUnmount', () => {
it('clears the timeout if there is one', () => {
jest.spyOn(global, 'setTimeout');
mount({
timeoutId: setTimeout(() => {}, 1),
});
unmount();
expect(setTimeout).toHaveBeenCalled();
});
it('does not crash if no timeout exists', () => {
mount({ timeoutId: null });
unmount();
});
});
});
});

View file

@ -0,0 +1,190 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { kea, MakeLogicType } from 'kea';
import { flashAPIErrors } from '../../../shared/flash_messages';
import { HttpLogic } from '../../../shared/http';
import { EngineLogic } from '../engine';
import {
CrawlerData,
CrawlerDomain,
CrawlRequest,
CrawlRequestFromServer,
CrawlerStatus,
} from './types';
import { crawlerDataServerToClient, crawlRequestServerToClient } from './utils';
const POLLING_DURATION = 1000;
const POLLING_DURATION_ON_FAILURE = 5000;
export interface CrawlerValues {
crawlRequests: CrawlRequest[];
dataLoading: boolean;
domains: CrawlerDomain[];
mostRecentCrawlRequestStatus: CrawlerStatus;
timeoutId: NodeJS.Timeout | null;
}
interface CrawlerActions {
clearTimeoutId(): void;
createNewTimeoutForCrawlRequests(duration: number): { duration: number };
fetchCrawlerData(): void;
getLatestCrawlRequests(refreshData?: boolean): { refreshData?: boolean };
onCreateNewTimeout(timeoutId: NodeJS.Timeout): { timeoutId: NodeJS.Timeout };
onReceiveCrawlerData(data: CrawlerData): { data: CrawlerData };
onReceiveCrawlRequests(crawlRequests: CrawlRequest[]): { crawlRequests: CrawlRequest[] };
startCrawl(): void;
stopCrawl(): void;
}
export const CrawlerLogic = kea<MakeLogicType<CrawlerValues, CrawlerActions>>({
path: ['enterprise_search', 'app_search', 'crawler', 'crawler_overview'],
actions: {
clearTimeoutId: true,
createNewTimeoutForCrawlRequests: (duration) => ({ duration }),
fetchCrawlerData: true,
getLatestCrawlRequests: (refreshData) => ({ refreshData }),
onCreateNewTimeout: (timeoutId) => ({ timeoutId }),
onReceiveCrawlerData: (data) => ({ data }),
onReceiveCrawlRequests: (crawlRequests) => ({ crawlRequests }),
startCrawl: () => null,
stopCrawl: () => null,
},
reducers: {
dataLoading: [
true,
{
onReceiveCrawlerData: () => false,
},
],
domains: [
[],
{
onReceiveCrawlerData: (_, { data: { domains } }) => domains,
},
],
crawlRequests: [
[],
{
onReceiveCrawlRequests: (_, { crawlRequests }) => crawlRequests,
},
],
timeoutId: [
null,
{
clearTimeoutId: () => null,
onCreateNewTimeout: (_, { timeoutId }) => timeoutId,
},
],
},
selectors: ({ selectors }) => ({
mostRecentCrawlRequestStatus: [
() => [selectors.crawlRequests],
(crawlRequests: CrawlerValues['crawlRequests']) => {
const eligibleCrawlRequests = crawlRequests.filter(
(req) => req.status !== CrawlerStatus.Skipped
);
if (eligibleCrawlRequests.length === 0) {
return CrawlerStatus.Success;
}
return eligibleCrawlRequests[0].status;
},
],
}),
listeners: ({ actions, values }) => ({
fetchCrawlerData: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
const response = await http.get(`/api/app_search/engines/${engineName}/crawler`);
const crawlerData = crawlerDataServerToClient(response);
actions.onReceiveCrawlerData(crawlerData);
} catch (e) {
flashAPIErrors(e);
}
},
startCrawl: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
await http.post(`/api/app_search/engines/${engineName}/crawler/crawl_requests`);
actions.getLatestCrawlRequests();
} catch (e) {
flashAPIErrors(e);
}
},
stopCrawl: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
await http.post(`/api/app_search/engines/${engineName}/crawler/crawl_requests/cancel`);
actions.getLatestCrawlRequests();
} catch (e) {
flashAPIErrors(e);
}
},
createNewTimeoutForCrawlRequests: ({ duration }) => {
if (values.timeoutId) {
clearTimeout(values.timeoutId);
}
const timeoutIdId = setTimeout(() => {
actions.getLatestCrawlRequests();
}, duration);
actions.onCreateNewTimeout(timeoutIdId);
},
getLatestCrawlRequests: async ({ refreshData = true }) => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
const crawlRequestsFromServer: CrawlRequestFromServer[] = await http.get(
`/api/app_search/engines/${engineName}/crawler/crawl_requests`
);
const crawlRequests = crawlRequestsFromServer.map(crawlRequestServerToClient);
actions.onReceiveCrawlRequests(crawlRequests);
if (
[
CrawlerStatus.Pending,
CrawlerStatus.Starting,
CrawlerStatus.Running,
CrawlerStatus.Canceling,
].includes(crawlRequests[0]?.status)
) {
actions.createNewTimeoutForCrawlRequests(POLLING_DURATION);
} else if (
[CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].includes(
crawlRequests[0]?.status
)
) {
actions.clearTimeoutId();
if (refreshData) {
actions.fetchCrawlerData();
}
}
} catch (e) {
actions.createNewTimeoutForCrawlRequests(POLLING_DURATION_ON_FAILURE);
}
},
}),
events: ({ values }) => ({
beforeUnmount: () => {
if (values.timeoutId) {
clearTimeout(values.timeoutId);
}
},
}),
});

View file

@ -5,8 +5,7 @@
* 2.0.
*/
import { setMockActions, setMockValues } from '../../../__mocks__/kea_logic';
import '../../../__mocks__/shallow_useeffect.mock';
import { setMockValues } from '../../../__mocks__/kea_logic';
import '../../__mocks__/engine_logic.mock';
import React from 'react';
@ -84,11 +83,6 @@ const crawlRequests: CrawlRequestFromServer[] = [
];
describe('CrawlerOverview', () => {
const mockActions = {
fetchCrawlerData: jest.fn(),
getLatestCrawlRequests: jest.fn(),
};
const mockValues = {
dataLoading: false,
domains,
@ -97,32 +91,27 @@ describe('CrawlerOverview', () => {
beforeEach(() => {
jest.clearAllMocks();
setMockActions(mockActions);
});
it('calls fetchCrawlerData and starts polling on page load', () => {
setMockValues(mockValues);
shallow(<CrawlerOverview />);
expect(mockActions.fetchCrawlerData).toHaveBeenCalledTimes(1);
expect(mockActions.getLatestCrawlRequests).toHaveBeenCalledWith(false);
});
it('contains a crawler status banner', () => {
setMockValues(mockValues);
const wrapper = shallow(<CrawlerOverview />);
expect(wrapper.find(CrawlerStatusBanner)).toHaveLength(1);
});
it('contains a crawler status indicator', () => {
setMockValues(mockValues);
const wrapper = shallow(<CrawlerOverview />);
expect(getPageHeaderActions(wrapper).find(CrawlerStatusIndicator)).toHaveLength(1);
});
it('contains a popover to manage crawls', () => {
setMockValues(mockValues);
const wrapper = shallow(<CrawlerOverview />);
expect(getPageHeaderActions(wrapper).find(ManageCrawlsPopover)).toHaveLength(1);

View file

@ -5,9 +5,9 @@
* 2.0.
*/
import React, { useEffect } from 'react';
import React from 'react';
import { useActions, useValues } from 'kea';
import { useValues } from 'kea';
import { EuiFlexGroup, EuiFlexItem, EuiLink, EuiSpacer, EuiText, EuiTitle } from '@elastic/eui';
@ -26,17 +26,10 @@ import { CrawlerStatusIndicator } from './components/crawler_status_indicator/cr
import { DomainsTable } from './components/domains_table';
import { ManageCrawlsPopover } from './components/manage_crawls_popover/manage_crawls_popover';
import { CRAWLER_TITLE } from './constants';
import { CrawlerOverviewLogic } from './crawler_overview_logic';
import { CrawlerLogic } from './crawler_logic';
export const CrawlerOverview: React.FC = () => {
const { crawlRequests, dataLoading, domains } = useValues(CrawlerOverviewLogic);
const { fetchCrawlerData, getLatestCrawlRequests } = useActions(CrawlerOverviewLogic);
useEffect(() => {
fetchCrawlerData();
getLatestCrawlRequests(false);
}, []);
const { crawlRequests, dataLoading, domains } = useValues(CrawlerLogic);
return (
<AppSearchPageTemplate

View file

@ -12,36 +12,22 @@ import {
} from '../../../__mocks__/kea_logic';
import '../../__mocks__/engine_logic.mock';
jest.mock('./crawler_logic', () => ({
CrawlerLogic: {
actions: {
onReceiveCrawlerData: jest.fn(),
},
},
}));
import { nextTick } from '@kbn/test/jest';
import { CrawlerOverviewLogic, CrawlerOverviewValues } from './crawler_overview_logic';
import {
CrawlerData,
CrawlerDataFromServer,
CrawlerDomain,
CrawlerPolicies,
CrawlerRules,
CrawlerStatus,
CrawlRequest,
CrawlRule,
} from './types';
import { CrawlerLogic } from './crawler_logic';
import { CrawlerOverviewLogic } from './crawler_overview_logic';
import { CrawlerDataFromServer, CrawlerDomain } from './types';
import { crawlerDataServerToClient } from './utils';
const DEFAULT_VALUES: CrawlerOverviewValues = {
crawlRequests: [],
dataLoading: true,
domains: [],
mostRecentCrawlRequestStatus: CrawlerStatus.Success,
timeoutId: null,
};
const DEFAULT_CRAWL_RULE: CrawlRule = {
id: '-',
policy: CrawlerPolicies.allow,
rule: CrawlerRules.regex,
pattern: '.*',
};
const MOCK_SERVER_CRAWLER_DATA: CrawlerDataFromServer = {
domains: [
{
@ -62,128 +48,19 @@ const MOCK_SERVER_CRAWLER_DATA: CrawlerDataFromServer = {
const MOCK_CLIENT_CRAWLER_DATA = crawlerDataServerToClient(MOCK_SERVER_CRAWLER_DATA);
describe('CrawlerOverviewLogic', () => {
const { mount, unmount } = new LogicMounter(CrawlerOverviewLogic);
const { mount } = new LogicMounter(CrawlerOverviewLogic);
const { http } = mockHttpValues;
const { flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers;
beforeEach(() => {
jest.clearAllMocks();
jest.useFakeTimers(); // this should be run before every test to reset these mocks
mount();
});
afterAll(() => {
jest.useRealTimers();
});
it('has expected default values', () => {
expect(CrawlerOverviewLogic.values).toEqual(DEFAULT_VALUES);
});
describe('actions', () => {
describe('clearTimeoutId', () => {
it('clears the timeout in the logic', () => {
mount({
timeoutId: setTimeout(() => {}, 1),
});
CrawlerOverviewLogic.actions.clearTimeoutId();
expect(CrawlerOverviewLogic.values.timeoutId).toEqual(null);
});
});
describe('onCreateNewTimeout', () => {
it('sets the timeout in the logic', () => {
const timeout = setTimeout(() => {}, 1);
CrawlerOverviewLogic.actions.onCreateNewTimeout(timeout);
expect(CrawlerOverviewLogic.values.timeoutId).toEqual(timeout);
});
});
describe('onReceiveCrawlerData', () => {
const crawlerData: CrawlerData = {
domains: [
{
id: '507f1f77bcf86cd799439011',
createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000',
url: 'elastic.co',
documentCount: 13,
sitemaps: [],
entryPoints: [],
crawlRules: [],
defaultCrawlRule: DEFAULT_CRAWL_RULE,
deduplicationEnabled: false,
deduplicationFields: ['title'],
availableDeduplicationFields: ['title', 'description'],
},
],
};
beforeEach(() => {
CrawlerOverviewLogic.actions.onReceiveCrawlerData(crawlerData);
});
it('should set all received data as top-level values', () => {
expect(CrawlerOverviewLogic.values.domains).toEqual(crawlerData.domains);
});
it('should set dataLoading to false', () => {
expect(CrawlerOverviewLogic.values.dataLoading).toEqual(false);
});
});
describe('onReceiveCrawlRequests', () => {
const crawlRequests: CrawlRequest[] = [
{
id: '618d0e66abe97bc688328900',
status: CrawlerStatus.Pending,
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
];
beforeEach(() => {
CrawlerOverviewLogic.actions.onReceiveCrawlRequests(crawlRequests);
});
it('should set the crawl requests', () => {
expect(CrawlerOverviewLogic.values.crawlRequests).toEqual(crawlRequests);
});
});
});
describe('listeners', () => {
describe('fetchCrawlerData', () => {
it('updates logic with data that has been converted from server to client', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'onReceiveCrawlerData');
http.get.mockReturnValueOnce(Promise.resolve(MOCK_SERVER_CRAWLER_DATA));
CrawlerOverviewLogic.actions.fetchCrawlerData();
await nextTick();
expect(http.get).toHaveBeenCalledWith('/api/app_search/engines/some-engine/crawler');
expect(CrawlerOverviewLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith(
MOCK_CLIENT_CRAWLER_DATA
);
});
it('calls flashApiErrors when there is an error on the request for crawler data', async () => {
http.get.mockReturnValueOnce(Promise.reject('error'));
CrawlerOverviewLogic.actions.fetchCrawlerData();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
describe('deleteDomain', () => {
it('calls onReceiveCrawlerData with retrieved data that has been converted from server to client', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'onReceiveCrawlerData');
jest.spyOn(CrawlerLogic.actions, 'onReceiveCrawlerData');
http.delete.mockReturnValue(Promise.resolve(MOCK_SERVER_CRAWLER_DATA));
CrawlerOverviewLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain);
@ -195,7 +72,7 @@ describe('CrawlerOverviewLogic', () => {
query: { respond_with: 'crawler_details' },
}
);
expect(CrawlerOverviewLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith(
expect(CrawlerLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith(
MOCK_CLIENT_CRAWLER_DATA
);
expect(flashSuccessToast).toHaveBeenCalled();
@ -210,241 +87,5 @@ describe('CrawlerOverviewLogic', () => {
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
describe('startCrawl', () => {
describe('success path', () => {
it('creates a new crawl request and then fetches the latest crawl requests', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
http.post.mockReturnValueOnce(Promise.resolve());
CrawlerOverviewLogic.actions.startCrawl();
await nextTick();
expect(http.post).toHaveBeenCalledWith(
'/api/app_search/engines/some-engine/crawler/crawl_requests'
);
expect(CrawlerOverviewLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
});
});
describe('on failure', () => {
it('flashes an error message', async () => {
http.post.mockReturnValueOnce(Promise.reject('error'));
CrawlerOverviewLogic.actions.startCrawl();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
});
describe('stopCrawl', () => {
describe('success path', () => {
it('stops the crawl starts and then fetches the latest crawl requests', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
http.post.mockReturnValueOnce(Promise.resolve());
CrawlerOverviewLogic.actions.stopCrawl();
await nextTick();
expect(http.post).toHaveBeenCalledWith(
'/api/app_search/engines/some-engine/crawler/crawl_requests/cancel'
);
expect(CrawlerOverviewLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
});
});
describe('on failure', () => {
it('flashes an error message', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
http.post.mockReturnValueOnce(Promise.reject('error'));
CrawlerOverviewLogic.actions.stopCrawl();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
});
describe('createNewTimeoutForCrawlRequests', () => {
it('saves the timeout ID in the logic', () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'onCreateNewTimeout');
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests(2000);
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 2000);
expect(CrawlerOverviewLogic.actions.onCreateNewTimeout).toHaveBeenCalled();
jest.runAllTimers();
expect(CrawlerOverviewLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
});
it('clears a timeout if one already exists', () => {
const timeoutId = setTimeout(() => {}, 1);
mount({
timeoutId,
});
CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests(2000);
expect(clearTimeout).toHaveBeenCalledWith(timeoutId);
});
});
describe('getLatestCrawlRequests', () => {
describe('on success', () => {
[
CrawlerStatus.Pending,
CrawlerStatus.Starting,
CrawlerStatus.Running,
CrawlerStatus.Canceling,
].forEach((status) => {
it(`creates a new timeout for status ${status}`, async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'createNewTimeoutForCrawlRequests');
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
CrawlerOverviewLogic.actions.getLatestCrawlRequests();
await nextTick();
expect(
CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests
).toHaveBeenCalled();
});
});
[CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
it(`clears the timeout and fetches data for status ${status}`, async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'clearTimeoutId');
jest.spyOn(CrawlerOverviewLogic.actions, 'fetchCrawlerData');
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
CrawlerOverviewLogic.actions.getLatestCrawlRequests();
await nextTick();
expect(CrawlerOverviewLogic.actions.clearTimeoutId).toHaveBeenCalled();
expect(CrawlerOverviewLogic.actions.fetchCrawlerData).toHaveBeenCalled();
});
it(`optionally supresses fetching data for status ${status}`, async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'clearTimeoutId');
jest.spyOn(CrawlerOverviewLogic.actions, 'fetchCrawlerData');
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
CrawlerOverviewLogic.actions.getLatestCrawlRequests(false);
await nextTick();
expect(CrawlerOverviewLogic.actions.clearTimeoutId).toHaveBeenCalled();
expect(CrawlerOverviewLogic.actions.fetchCrawlerData).toHaveBeenCalledTimes(0);
});
});
});
describe('on failure', () => {
it('creates a new timeout', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'createNewTimeoutForCrawlRequests');
http.get.mockReturnValueOnce(Promise.reject());
CrawlerOverviewLogic.actions.getLatestCrawlRequests();
await nextTick();
expect(CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests).toHaveBeenCalled();
});
});
});
});
describe('selectors', () => {
describe('mostRecentCrawlRequestStatus', () => {
it('is Success when there are no crawl requests', () => {
mount({
crawlRequests: [],
});
expect(CrawlerOverviewLogic.values.mostRecentCrawlRequestStatus).toEqual(
CrawlerStatus.Success
);
});
it('is Success when there are only crawl requests', () => {
mount({
crawlRequests: [
{
id: '2',
status: CrawlerStatus.Skipped,
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
{
id: '1',
status: CrawlerStatus.Skipped,
createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
],
});
expect(CrawlerOverviewLogic.values.mostRecentCrawlRequestStatus).toEqual(
CrawlerStatus.Success
);
});
it('is the first non-skipped crawl request status', () => {
mount({
crawlRequests: [
{
id: '3',
status: CrawlerStatus.Skipped,
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
{
id: '2',
status: CrawlerStatus.Failed,
createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
{
id: '1',
status: CrawlerStatus.Success,
createdAt: 'Mon, 29 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
},
],
});
expect(CrawlerOverviewLogic.values.mostRecentCrawlRequestStatus).toEqual(
CrawlerStatus.Failed
);
});
});
});
describe('events', () => {
describe('beforeUnmount', () => {
it('clears the timeout if there is one', () => {
jest.spyOn(global, 'setTimeout');
mount({
timeoutId: setTimeout(() => {}, 1),
});
unmount();
expect(setTimeout).toHaveBeenCalled();
});
it('does not crash if no timeout exists', () => {
mount({ timeoutId: null });
unmount();
});
});
});
});

View file

@ -12,115 +12,20 @@ import { flashAPIErrors, flashSuccessToast } from '../../../shared/flash_message
import { HttpLogic } from '../../../shared/http';
import { EngineLogic } from '../engine';
import {
CrawlerData,
CrawlerDomain,
CrawlRequest,
CrawlRequestFromServer,
CrawlerStatus,
} from './types';
import {
crawlerDataServerToClient,
crawlRequestServerToClient,
getDeleteDomainSuccessMessage,
} from './utils';
const POLLING_DURATION = 1000;
const POLLING_DURATION_ON_FAILURE = 5000;
export interface CrawlerOverviewValues {
crawlRequests: CrawlRequest[];
dataLoading: boolean;
domains: CrawlerDomain[];
mostRecentCrawlRequestStatus: CrawlerStatus;
timeoutId: NodeJS.Timeout | null;
}
import { CrawlerLogic } from './crawler_logic';
import { CrawlerDomain } from './types';
import { crawlerDataServerToClient, getDeleteDomainSuccessMessage } from './utils';
interface CrawlerOverviewActions {
clearTimeoutId(): void;
createNewTimeoutForCrawlRequests(duration: number): { duration: number };
deleteDomain(domain: CrawlerDomain): { domain: CrawlerDomain };
fetchCrawlerData(): void;
getLatestCrawlRequests(refreshData?: boolean): { refreshData?: boolean };
onCreateNewTimeout(timeoutId: NodeJS.Timeout): { timeoutId: NodeJS.Timeout };
onReceiveCrawlerData(data: CrawlerData): { data: CrawlerData };
onReceiveCrawlRequests(crawlRequests: CrawlRequest[]): { crawlRequests: CrawlRequest[] };
startCrawl(): void;
stopCrawl(): void;
}
export const CrawlerOverviewLogic = kea<
MakeLogicType<CrawlerOverviewValues, CrawlerOverviewActions>
>({
export const CrawlerOverviewLogic = kea<MakeLogicType<{}, CrawlerOverviewActions>>({
path: ['enterprise_search', 'app_search', 'crawler', 'crawler_overview'],
actions: {
clearTimeoutId: true,
createNewTimeoutForCrawlRequests: (duration) => ({ duration }),
deleteDomain: (domain) => ({ domain }),
fetchCrawlerData: true,
getLatestCrawlRequests: (refreshData) => ({ refreshData }),
onCreateNewTimeout: (timeoutId) => ({ timeoutId }),
onReceiveCrawlerData: (data) => ({ data }),
onReceiveCrawlRequests: (crawlRequests) => ({ crawlRequests }),
startCrawl: () => null,
stopCrawl: () => null,
},
reducers: {
dataLoading: [
true,
{
onReceiveCrawlerData: () => false,
},
],
domains: [
[],
{
onReceiveCrawlerData: (_, { data: { domains } }) => domains,
},
],
crawlRequests: [
[],
{
onReceiveCrawlRequests: (_, { crawlRequests }) => crawlRequests,
},
],
timeoutId: [
null,
{
clearTimeoutId: () => null,
onCreateNewTimeout: (_, { timeoutId }) => timeoutId,
},
],
},
selectors: ({ selectors }) => ({
mostRecentCrawlRequestStatus: [
() => [selectors.crawlRequests],
(crawlRequests: CrawlerOverviewValues['crawlRequests']) => {
const eligibleCrawlRequests = crawlRequests.filter(
(req) => req.status !== CrawlerStatus.Skipped
);
if (eligibleCrawlRequests.length === 0) {
return CrawlerStatus.Success;
}
return eligibleCrawlRequests[0].status;
},
],
}),
listeners: ({ actions, values }) => ({
fetchCrawlerData: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
const response = await http.get(`/api/app_search/engines/${engineName}/crawler`);
const crawlerData = crawlerDataServerToClient(response);
actions.onReceiveCrawlerData(crawlerData);
} catch (e) {
flashAPIErrors(e);
}
},
deleteDomain: async ({ domain }) => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
@ -135,84 +40,11 @@ export const CrawlerOverviewLogic = kea<
}
);
const crawlerData = crawlerDataServerToClient(response);
actions.onReceiveCrawlerData(crawlerData);
CrawlerLogic.actions.onReceiveCrawlerData(crawlerData);
flashSuccessToast(getDeleteDomainSuccessMessage(domain.url));
} catch (e) {
flashAPIErrors(e);
}
},
startCrawl: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
await http.post(`/api/app_search/engines/${engineName}/crawler/crawl_requests`);
actions.getLatestCrawlRequests();
} catch (e) {
flashAPIErrors(e);
}
},
stopCrawl: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
await http.post(`/api/app_search/engines/${engineName}/crawler/crawl_requests/cancel`);
actions.getLatestCrawlRequests();
} catch (e) {
flashAPIErrors(e);
}
},
createNewTimeoutForCrawlRequests: ({ duration }) => {
if (values.timeoutId) {
clearTimeout(values.timeoutId);
}
const timeoutIdId = setTimeout(() => {
actions.getLatestCrawlRequests();
}, duration);
actions.onCreateNewTimeout(timeoutIdId);
},
getLatestCrawlRequests: async ({ refreshData = true }) => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
const crawlRequestsFromServer: CrawlRequestFromServer[] = await http.get(
`/api/app_search/engines/${engineName}/crawler/crawl_requests`
);
const crawlRequests = crawlRequestsFromServer.map(crawlRequestServerToClient);
actions.onReceiveCrawlRequests(crawlRequests);
if (
[
CrawlerStatus.Pending,
CrawlerStatus.Starting,
CrawlerStatus.Running,
CrawlerStatus.Canceling,
].includes(crawlRequests[0]?.status)
) {
actions.createNewTimeoutForCrawlRequests(POLLING_DURATION);
} else if (
[CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].includes(
crawlRequests[0]?.status
)
) {
actions.clearTimeoutId();
if (refreshData) {
actions.fetchCrawlerData();
}
}
} catch (e) {
actions.createNewTimeoutForCrawlRequests(POLLING_DURATION_ON_FAILURE);
}
},
}),
events: ({ values }) => ({
beforeUnmount: () => {
if (values.timeoutId) {
clearTimeout(values.timeoutId);
}
},
}),
});

View file

@ -4,6 +4,9 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { setMockActions } from '../../../__mocks__/kea_logic';
import '../../../__mocks__/shallow_useeffect.mock';
import '../../__mocks__/engine_logic.mock';
import React from 'react';
@ -14,14 +17,25 @@ import { CrawlerRouter } from './crawler_router';
import { CrawlerSingleDomain } from './crawler_single_domain';
describe('CrawlerRouter', () => {
const mockActions = {
fetchCrawlerData: jest.fn(),
getLatestCrawlRequests: jest.fn(),
};
let wrapper: ShallowWrapper;
beforeEach(() => {
jest.clearAllMocks();
setMockActions(mockActions);
wrapper = shallow(<CrawlerRouter />);
});
it('renders a crawler single domain view', () => {
it('calls fetchCrawlerData and starts polling on page load', () => {
expect(mockActions.fetchCrawlerData).toHaveBeenCalledTimes(1);
expect(mockActions.getLatestCrawlRequests).toHaveBeenCalledWith(false);
});
it('renders a crawler views', () => {
expect(wrapper.find(CrawlerOverview)).toHaveLength(1);
expect(wrapper.find(CrawlerSingleDomain)).toHaveLength(1);
});

View file

@ -5,15 +5,26 @@
* 2.0.
*/
import React from 'react';
import React, { useEffect } from 'react';
import { Route, Switch } from 'react-router-dom';
import { useActions } from 'kea';
import { ENGINE_CRAWLER_DOMAIN_PATH, ENGINE_CRAWLER_PATH } from '../../routes';
import { CrawlerLogic } from './crawler_logic';
import { CrawlerOverview } from './crawler_overview';
import { CrawlerSingleDomain } from './crawler_single_domain';
export const CrawlerRouter: React.FC = () => {
const { fetchCrawlerData, getLatestCrawlRequests } = useActions(CrawlerLogic);
useEffect(() => {
fetchCrawlerData();
getLatestCrawlRequests(false);
}, []);
return (
<Switch>
<Route exact path={ENGINE_CRAWLER_PATH}>