[App Search] Initial logic for Crawler Overview (#101176)

* New CrawlerOverview component

* CrawlerRouter should use CrawlerOverview in dev mode

* New CrawlerOverviewLogic

* New crawler route

* Display domains data for CrawlerOverview in EuiCode

* Update types

* Clean up tests for Crawler utils

* Better todo commenting for CrawlerOverview tests

* Remove unused div from CrawlerOverview

* Rename CrawlerOverviewLogic.actios.setCrawlerData to onFetchCrawlerData

* Cleaning up CrawlerOverviewLogic

* Cleaning up CrawlerOverviewLogic tests

* Fix CrawlerPolicies capitalization

* Add Loading UX

* Cleaning up afterEachs across Crawler tests
This commit is contained in:
Byron Hulcher 2021-06-04 14:28:11 -04:00 committed by GitHub
parent 090d0abd11
commit 9a275de0f9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 589 additions and 7 deletions

View file

@ -19,14 +19,11 @@ describe('CrawlerLanding', () => {
let wrapper: ShallowWrapper;
beforeEach(() => {
jest.clearAllMocks();
setMockValues({ ...mockEngineValues });
wrapper = shallow(<CrawlerLanding />);
});
afterEach(() => {
jest.clearAllMocks();
});
it('contains an external documentation link', () => {
const externalDocumentationLink = wrapper.find('[data-test-subj="CrawlerDocumentationLink"]');

View file

@ -0,0 +1,66 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { rerender, setMockActions, setMockValues } from '../../../__mocks__';
import '../../../__mocks__/shallow_useeffect.mock';
import React from 'react';
import { shallow, ShallowWrapper } from 'enzyme';
import { EuiCode } from '@elastic/eui';
import { Loading } from '../../../shared/loading';
import { CrawlerOverview } from './crawler_overview';
const actions = {
fetchCrawlerData: jest.fn(),
};
const values = {
dataLoading: false,
domains: [],
};
describe('CrawlerOverview', () => {
let wrapper: ShallowWrapper;
beforeEach(() => {
jest.clearAllMocks();
setMockValues(values);
setMockActions(actions);
wrapper = shallow(<CrawlerOverview />);
});
it('renders', () => {
expect(wrapper.find(EuiCode)).toHaveLength(1);
});
it('calls fetchCrawlerData on page load', () => {
expect(actions.fetchCrawlerData).toHaveBeenCalledTimes(1);
});
// TODO after DomainsTable is built in a future PR
// it('contains a DomainsTable', () => {})
// TODO after CrawlRequestsTable is built in a future PR
// it('containss a CrawlRequestsTable,() => {})
// TODO after AddDomainForm is built in a future PR
// it('contains an AddDomainForm' () => {})
// TODO after empty state is added in a future PR
// it('has an empty state', () => {} )
it('shows an empty state when data is loading', () => {
setMockValues({ dataLoading: true });
rerender(wrapper);
expect(wrapper.find(Loading)).toHaveLength(1);
});
});

View file

@ -0,0 +1,41 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useEffect } from 'react';
import { useActions, useValues } from 'kea';
import { EuiCode, EuiPageHeader } from '@elastic/eui';
import { FlashMessages } from '../../../shared/flash_messages';
import { Loading } from '../../../shared/loading';
import { CRAWLER_TITLE } from './constants';
import { CrawlerOverviewLogic } from './crawler_overview_logic';
export const CrawlerOverview: React.FC = () => {
const { dataLoading, domains } = useValues(CrawlerOverviewLogic);
const { fetchCrawlerData } = useActions(CrawlerOverviewLogic);
useEffect(() => {
fetchCrawlerData();
}, []);
if (dataLoading) {
return <Loading />;
}
return (
<>
<EuiPageHeader pageTitle={CRAWLER_TITLE} />
<FlashMessages />
<EuiCode language="json">{JSON.stringify(domains, null, 2)}</EuiCode>
</>
);
};

View file

@ -0,0 +1,121 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { LogicMounter, mockHttpValues, mockFlashMessageHelpers } from '../../../__mocks__';
import '../../__mocks__/engine_logic.mock';
import { nextTick } from '@kbn/test/jest';
import { CrawlerOverviewLogic } from './crawler_overview_logic';
import { CrawlerPolicies, CrawlerRules, CrawlRule } from './types';
const DEFAULT_VALUES = {
dataLoading: true,
domains: [],
};
const DEFAULT_CRAWL_RULE: CrawlRule = {
id: '-',
policy: CrawlerPolicies.allow,
rule: CrawlerRules.regex,
pattern: '.*',
};
describe('CrawlerOverviewLogic', () => {
const { mount } = new LogicMounter(CrawlerOverviewLogic);
const { http } = mockHttpValues;
const { flashAPIErrors } = mockFlashMessageHelpers;
beforeEach(() => {
jest.clearAllMocks();
mount();
});
it('has expected default values', () => {
expect(CrawlerOverviewLogic.values).toEqual(DEFAULT_VALUES);
});
describe('actions', () => {
describe('onFetchCrawlerData', () => {
const crawlerData = {
domains: [
{
id: '507f1f77bcf86cd799439011',
createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000',
url: 'moviedatabase.com',
documentCount: 13,
sitemaps: [],
entryPoints: [],
crawlRules: [],
defaultCrawlRule: DEFAULT_CRAWL_RULE,
},
],
};
beforeEach(() => {
CrawlerOverviewLogic.actions.onFetchCrawlerData(crawlerData);
});
it('should set all received data as top-level values', () => {
expect(CrawlerOverviewLogic.values.domains).toEqual(crawlerData.domains);
});
it('should set dataLoading to false', () => {
expect(CrawlerOverviewLogic.values.dataLoading).toEqual(false);
});
});
});
describe('listeners', () => {
describe('fetchCrawlerData', () => {
it('calls onFetchCrawlerData with retrieved data that has been converted from server to client', async () => {
jest.spyOn(CrawlerOverviewLogic.actions, 'onFetchCrawlerData');
http.get.mockReturnValue(
Promise.resolve({
domains: [
{
id: '507f1f77bcf86cd799439011',
name: 'moviedatabase.com',
created_on: 'Mon, 31 Aug 2020 17:00:00 +0000',
document_count: 13,
sitemaps: [],
entry_points: [],
crawl_rules: [],
},
],
})
);
CrawlerOverviewLogic.actions.fetchCrawlerData();
await nextTick();
expect(http.get).toHaveBeenCalledWith('/api/app_search/engines/some-engine/crawler');
expect(CrawlerOverviewLogic.actions.onFetchCrawlerData).toHaveBeenCalledWith({
domains: [
{
id: '507f1f77bcf86cd799439011',
createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000',
url: 'moviedatabase.com',
documentCount: 13,
sitemaps: [],
entryPoints: [],
crawlRules: [],
},
],
});
});
it('calls flashApiErrors when there is an error', async () => {
http.get.mockReturnValue(Promise.reject('error'));
CrawlerOverviewLogic.actions.fetchCrawlerData();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith('error');
});
});
});
});

View file

@ -0,0 +1,64 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { kea, MakeLogicType } from 'kea';
import { flashAPIErrors } from '../../../shared/flash_messages';
import { HttpLogic } from '../../../shared/http';
import { EngineLogic } from '../engine';
import { CrawlerData, CrawlerDataFromServer, CrawlerDomain } from './types';
import { crawlerDataServerToClient } from './utils';
interface CrawlerOverviewValues {
dataLoading: boolean;
domains: CrawlerDomain[];
}
interface CrawlerOverviewActions {
fetchCrawlerData(): void;
onFetchCrawlerData(data: CrawlerData): { data: CrawlerData };
}
export const CrawlerOverviewLogic = kea<
MakeLogicType<CrawlerOverviewValues, CrawlerOverviewActions>
>({
path: ['enterprise_search', 'app_search', 'crawler', 'crawler_overview'],
actions: {
fetchCrawlerData: true,
onFetchCrawlerData: (data) => ({ data }),
},
reducers: {
dataLoading: [
true,
{
onFetchCrawlerData: () => false,
},
],
domains: [
[],
{
onFetchCrawlerData: (_, { data: { domains } }) => domains,
},
],
},
listeners: ({ actions }) => ({
fetchCrawlerData: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
const response = await http.get(`/api/app_search/engines/${engineName}/crawler`);
const crawlerData = crawlerDataServerToClient(response as CrawlerDataFromServer);
actions.onFetchCrawlerData(crawlerData);
} catch (e) {
flashAPIErrors(e);
}
},
}),
});

View file

@ -14,21 +14,32 @@ import { Switch } from 'react-router-dom';
import { shallow } from 'enzyme';
import { CrawlerLanding } from './crawler_landing';
import { CrawlerOverview } from './crawler_overview';
import { CrawlerRouter } from './crawler_router';
describe('CrawlerRouter', () => {
const OLD_ENV = process.env;
beforeEach(() => {
jest.clearAllMocks();
setMockValues({ ...mockEngineValues });
});
afterEach(() => {
jest.clearAllMocks();
process.env = OLD_ENV;
});
it('renders a landing page', () => {
it('renders a landing page by default', () => {
const wrapper = shallow(<CrawlerRouter />);
expect(wrapper.find(Switch)).toHaveLength(1);
expect(wrapper.find(CrawlerLanding)).toHaveLength(1);
});
it('renders a crawler overview in dev', () => {
process.env.NODE_ENV = 'development';
const wrapper = shallow(<CrawlerRouter />);
expect(wrapper.find(CrawlerOverview)).toHaveLength(1);
});
});

View file

@ -14,13 +14,14 @@ import { getEngineBreadcrumbs } from '../engine';
import { CRAWLER_TITLE } from './constants';
import { CrawlerLanding } from './crawler_landing';
import { CrawlerOverview } from './crawler_overview';
export const CrawlerRouter: React.FC = () => {
return (
<Switch>
<Route>
<SetPageChrome trail={getEngineBreadcrumbs([CRAWLER_TITLE])} />
<CrawlerLanding />
{process.env.NODE_ENV === 'development' ? <CrawlerOverview /> : <CrawlerLanding />}
</Route>
</Switch>
);

View file

@ -0,0 +1,67 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export enum CrawlerPolicies {
allow = 'allow',
deny = 'deny',
}
export enum CrawlerRules {
beginsWith = 'begins',
endsWith = 'ends',
contains = 'contains',
regex = 'regex',
}
export interface CrawlRule {
id: string;
policy: CrawlerPolicies;
rule: CrawlerRules;
pattern: string;
}
export interface EntryPoint {
id: string;
value: string;
}
export interface Sitemap {
id: string;
url: string;
}
export interface CrawlerDomain {
createdOn: string;
documentCount: number;
id: string;
lastCrawl?: string;
url: string;
crawlRules: CrawlRule[];
defaultCrawlRule?: CrawlRule;
entryPoints: EntryPoint[];
sitemaps: Sitemap[];
}
export interface CrawlerDomainFromServer {
id: string;
name: string;
created_on: string;
last_visited_at?: string;
document_count: number;
crawl_rules: CrawlRule[];
default_crawl_rule?: CrawlRule;
entry_points: EntryPoint[];
sitemaps: Sitemap[];
}
export interface CrawlerData {
domains: CrawlerDomain[];
}
export interface CrawlerDataFromServer {
domains: CrawlerDomainFromServer[];
}

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { CrawlerPolicies, CrawlerRules, CrawlRule, CrawlerDomainFromServer } from './types';
import { crawlerDomainServerToClient, crawlerDataServerToClient } from './utils';
const DEFAULT_CRAWL_RULE: CrawlRule = {
id: '-',
policy: CrawlerPolicies.allow,
rule: CrawlerRules.regex,
pattern: '.*',
};
describe('crawlerDomainServerToClient', () => {
it('converts the API payload into properties matching our code style', () => {
const id = '507f1f77bcf86cd799439011';
const name = 'moviedatabase.com';
const defaultServerPayload = {
id,
name,
created_on: 'Mon, 31 Aug 2020 17:00:00 +0000',
document_count: 13,
sitemaps: [],
entry_points: [],
crawl_rules: [],
};
const defaultClientPayload = {
id,
createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000',
url: name,
documentCount: 13,
sitemaps: [],
entryPoints: [],
crawlRules: [],
};
expect(crawlerDomainServerToClient(defaultServerPayload)).toStrictEqual(defaultClientPayload);
expect(
crawlerDomainServerToClient({
...defaultServerPayload,
last_visited_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
})
).toStrictEqual({ ...defaultClientPayload, lastCrawl: 'Mon, 31 Aug 2020 17:00:00 +0000' });
expect(
crawlerDomainServerToClient({
...defaultServerPayload,
default_crawl_rule: DEFAULT_CRAWL_RULE,
})
).toStrictEqual({ ...defaultClientPayload, defaultCrawlRule: DEFAULT_CRAWL_RULE });
});
});
describe('crawlerDataServerToClient', () => {
it('converts all domains from the server form to their client form', () => {
const domains: CrawlerDomainFromServer[] = [
{
id: 'x',
name: 'moviedatabase.com',
document_count: 13,
created_on: 'Mon, 31 Aug 2020 17:00:00 +0000',
sitemaps: [],
entry_points: [],
crawl_rules: [],
default_crawl_rule: DEFAULT_CRAWL_RULE,
},
{
id: 'y',
name: 'swiftype.com',
last_visited_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
document_count: 40,
created_on: 'Mon, 31 Aug 2020 17:00:00 +0000',
sitemaps: [],
entry_points: [],
crawl_rules: [],
},
];
const output = crawlerDataServerToClient({
domains,
});
expect(output.domains).toHaveLength(2);
expect(output.domains[0]).toEqual(crawlerDomainServerToClient(domains[0]));
expect(output.domains[1]).toEqual(crawlerDomainServerToClient(domains[1]));
});
});

View file

@ -0,0 +1,55 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
CrawlerDomain,
CrawlerDomainFromServer,
CrawlerData,
CrawlerDataFromServer,
} from './types';
export function crawlerDomainServerToClient(payload: CrawlerDomainFromServer): CrawlerDomain {
const {
id,
name,
sitemaps,
created_on: createdOn,
last_visited_at: lastCrawl,
document_count: documentCount,
crawl_rules: crawlRules,
default_crawl_rule: defaultCrawlRule,
entry_points: entryPoints,
} = payload;
const clientPayload: CrawlerDomain = {
id,
url: name,
documentCount,
createdOn,
crawlRules,
sitemaps,
entryPoints,
};
if (lastCrawl) {
clientPayload.lastCrawl = lastCrawl;
}
if (defaultCrawlRule) {
clientPayload.defaultCrawlRule = defaultCrawlRule;
}
return clientPayload;
}
export function crawlerDataServerToClient(payload: CrawlerDataFromServer): CrawlerData {
const { domains } = payload;
return {
domains: domains.map((domain) => crawlerDomainServerToClient(domain)),
};
}

View file

@ -0,0 +1,35 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { mockDependencies, mockRequestHandler, MockRouter } from '../../__mocks__';
import { registerCrawlerRoutes } from './crawler';
describe('crawler routes', () => {
describe('GET /api/app_search/engines/{name}/crawler', () => {
let mockRouter: MockRouter;
beforeEach(() => {
jest.clearAllMocks();
mockRouter = new MockRouter({
method: 'get',
path: '/api/app_search/engines/{name}/crawler',
});
registerCrawlerRoutes({
...mockDependencies,
router: mockRouter.router,
});
});
it('creates a request to enterprise search', () => {
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
path: '/api/as/v0/engines/:name/crawler',
});
});
});
});

View file

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { schema } from '@kbn/config-schema';
import { RouteDependencies } from '../../plugin';
export function registerCrawlerRoutes({
router,
enterpriseSearchRequestHandler,
}: RouteDependencies) {
router.get(
{
path: '/api/app_search/engines/{name}/crawler',
validate: {
params: schema.object({
name: schema.string(),
}),
},
},
enterpriseSearchRequestHandler.createRequest({
path: '/api/as/v0/engines/:name/crawler',
})
);
}

View file

@ -9,6 +9,7 @@ import { RouteDependencies } from '../../plugin';
import { registerAnalyticsRoutes } from './analytics';
import { registerApiLogsRoutes } from './api_logs';
import { registerCrawlerRoutes } from './crawler';
import { registerCredentialsRoutes } from './credentials';
import { registerCurationsRoutes } from './curations';
import { registerDocumentsRoutes, registerDocumentRoutes } from './documents';
@ -42,4 +43,5 @@ export const registerAppSearchRoutes = (dependencies: RouteDependencies) => {
registerResultSettingsRoutes(dependencies);
registerApiLogsRoutes(dependencies);
registerOnboardingRoutes(dependencies);
registerCrawlerRoutes(dependencies);
};