From a51787d5f63ffb8b0e7ce41a019133fd0078d650 Mon Sep 17 00:00:00 2001 From: Byron Hulcher Date: Thu, 5 Aug 2021 13:09:28 -0400 Subject: [PATCH] [App Search] Empty Crawler Single Domain view (#107694) * New route to retreive data for a single domain * New CrawlerSingleDomainLogic logic * New CrawlerSingleDomain view component * Add CrawlerSingleDomain to CrawlerRouter * Use different default text for page title while loading * Apply suggestions from code review Co-authored-by: Orhan Toy Co-authored-by: Orhan Toy --- .../crawler/crawler_router.test.tsx | 15 ++- .../components/crawler/crawler_router.tsx | 6 +- .../crawler/crawler_single_domain.test.tsx | 56 +++++++++++ .../crawler/crawler_single_domain.tsx | 50 ++++++++++ .../crawler_single_domain_logic.test.ts | 98 +++++++++++++++++++ .../crawler/crawler_single_domain_logic.ts | 68 +++++++++++++ .../server/routes/app_search/crawler.test.ts | 38 +++++++ .../server/routes/app_search/crawler.ts | 15 +++ 8 files changed, 341 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.tsx create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.test.ts create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.ts diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx index 587ba61ce27e91..3fa01538613195 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.test.tsx @@ -8,17 +8,22 @@ import React from 'react'; import { Switch } from 'react-router-dom'; -import { shallow } from 'enzyme'; +import { shallow, ShallowWrapper } from 'enzyme'; + +import { rerender } from '../../../test_helpers'; import { CrawlerLanding } from './crawler_landing'; import { CrawlerOverview } from './crawler_overview'; import { CrawlerRouter } from './crawler_router'; +import { CrawlerSingleDomain } from './crawler_single_domain'; describe('CrawlerRouter', () => { + let wrapper: ShallowWrapper; const OLD_ENV = process.env; beforeEach(() => { jest.clearAllMocks(); + wrapper = shallow(); }); afterEach(() => { @@ -26,16 +31,18 @@ describe('CrawlerRouter', () => { }); it('renders a landing page by default', () => { - const wrapper = shallow(); - expect(wrapper.find(Switch)).toHaveLength(1); expect(wrapper.find(CrawlerLanding)).toHaveLength(1); }); it('renders a crawler overview in dev', () => { process.env.NODE_ENV = 'development'; - const wrapper = shallow(); + rerender(wrapper); expect(wrapper.find(CrawlerOverview)).toHaveLength(1); }); + + it('renders a crawler single domain view', () => { + expect(wrapper.find(CrawlerSingleDomain)).toHaveLength(1); + }); }); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx index c5dd3907c9019e..3919740b0c6cb0 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_router.tsx @@ -8,10 +8,11 @@ import React from 'react'; import { Route, Switch } from 'react-router-dom'; -import { ENGINE_CRAWLER_PATH } from '../../routes'; +import { ENGINE_CRAWLER_DOMAIN_PATH, ENGINE_CRAWLER_PATH } from '../../routes'; import { CrawlerLanding } from './crawler_landing'; import { CrawlerOverview } from './crawler_overview'; +import { CrawlerSingleDomain } from './crawler_single_domain'; export const CrawlerRouter: React.FC = () => { return ( @@ -19,6 +20,9 @@ export const CrawlerRouter: React.FC = () => { {process.env.NODE_ENV === 'development' ? : } + + + ); }; diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx new file mode 100644 index 00000000000000..dd0966276dd68c --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.test.tsx @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { setMockActions, setMockValues } from '../../../__mocks__/kea_logic'; +import '../../../__mocks__/shallow_useeffect.mock'; +import '../../__mocks__/engine_logic.mock'; +import { mockUseParams } from '../../../__mocks__/react_router'; + +import React from 'react'; + +import { shallow } from 'enzyme'; + +import { EuiCode } from '@elastic/eui'; + +import { CrawlerSingleDomain } from './crawler_single_domain'; + +const MOCK_VALUES = { + dataLoading: false, + domain: { + url: 'https://elastic.co', + }, +}; + +const MOCK_ACTIONS = { + fetchDomainData: jest.fn(), +}; + +describe('CrawlerSingleDomain', () => { + beforeEach(() => { + jest.clearAllMocks(); + mockUseParams.mockReturnValue({ domainId: '507f1f77bcf86cd799439011' }); + setMockActions(MOCK_ACTIONS); + setMockValues(MOCK_VALUES); + }); + + it('renders', () => { + const wrapper = shallow(); + + expect(wrapper.find(EuiCode).render().text()).toContain('https://elastic.co'); + expect(wrapper.prop('pageHeader')).toEqual({ pageTitle: 'https://elastic.co' }); + }); + + it('uses a placeholder for the page title and page chrome if a domain has not been', () => { + setMockValues({ + ...MOCK_VALUES, + domain: null, + }); + + const wrapper = shallow(); + + expect(wrapper.prop('pageHeader')).toEqual({ pageTitle: 'Loading...' }); + }); +}); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.tsx new file mode 100644 index 00000000000000..bdcfa465c8c320 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain.tsx @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useEffect } from 'react'; + +import { useParams } from 'react-router-dom'; + +import { useActions, useValues } from 'kea'; + +import { EuiCode } from '@elastic/eui'; + +import { i18n } from '@kbn/i18n'; + +import { getEngineBreadcrumbs } from '../engine'; +import { AppSearchPageTemplate } from '../layout'; + +import { CRAWLER_TITLE } from './constants'; +import { CrawlerSingleDomainLogic } from './crawler_single_domain_logic'; + +export const CrawlerSingleDomain: React.FC = () => { + const { domainId } = useParams() as { domainId: string }; + + const { dataLoading, domain } = useValues(CrawlerSingleDomainLogic); + + const { fetchDomainData } = useActions(CrawlerSingleDomainLogic); + + const displayDomainUrl = domain + ? domain.url + : i18n.translate('xpack.enterpriseSearch.appSearch.crawler.singleDomain.loadingTitle', { + defaultMessage: 'Loading...', + }); + + useEffect(() => { + fetchDomainData(domainId); + }, []); + + return ( + + {JSON.stringify(domain, null, 2)} + + ); +}; diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.test.ts new file mode 100644 index 00000000000000..bb478a30ee5ef2 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.test.ts @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + LogicMounter, + mockHttpValues, + mockFlashMessageHelpers, +} from '../../../__mocks__/kea_logic'; +import '../../__mocks__/engine_logic.mock'; + +import { nextTick } from '@kbn/test/jest'; + +import { CrawlerSingleDomainLogic, CrawlerSingleDomainValues } from './crawler_single_domain_logic'; +import { CrawlerDomain } from './types'; + +const DEFAULT_VALUES: CrawlerSingleDomainValues = { + dataLoading: true, + domain: null, +}; + +describe('CrawlerSingleDomainLogic', () => { + const { mount } = new LogicMounter(CrawlerSingleDomainLogic); + const { http } = mockHttpValues; + const { flashAPIErrors } = mockFlashMessageHelpers; + + beforeEach(() => { + jest.clearAllMocks(); + mount(); + }); + + it('has expected default values', () => { + expect(CrawlerSingleDomainLogic.values).toEqual(DEFAULT_VALUES); + }); + + describe('actions', () => { + describe('onReceiveDomainData', () => { + const domain = { + id: '507f1f77bcf86cd799439011', + }; + + beforeEach(() => { + CrawlerSingleDomainLogic.actions.onReceiveDomainData(domain as CrawlerDomain); + }); + + it('should set the domain', () => { + expect(CrawlerSingleDomainLogic.values.domain).toEqual(domain); + }); + }); + }); + + describe('listeners', () => { + describe('fetchDomainData', () => { + it('updates logic with data that has been converted from server to client', async () => { + jest.spyOn(CrawlerSingleDomainLogic.actions, 'onReceiveDomainData'); + http.get.mockReturnValueOnce( + Promise.resolve({ + id: '507f1f77bcf86cd799439011', + name: 'https://elastic.co', + created_on: 'Mon, 31 Aug 2020 17:00:00 +0000', + document_count: 13, + sitemaps: [], + entry_points: [], + crawl_rules: [], + }) + ); + + CrawlerSingleDomainLogic.actions.fetchDomainData('507f1f77bcf86cd799439011'); + await nextTick(); + + expect(http.get).toHaveBeenCalledWith( + '/api/app_search/engines/some-engine/crawler/domains/507f1f77bcf86cd799439011' + ); + expect(CrawlerSingleDomainLogic.actions.onReceiveDomainData).toHaveBeenCalledWith({ + id: '507f1f77bcf86cd799439011', + createdOn: 'Mon, 31 Aug 2020 17:00:00 +0000', + url: 'https://elastic.co', + documentCount: 13, + sitemaps: [], + entryPoints: [], + crawlRules: [], + }); + }); + + it('displays any errors to the user', async () => { + http.get.mockReturnValueOnce(Promise.reject('error')); + + CrawlerSingleDomainLogic.actions.fetchDomainData('507f1f77bcf86cd799439011'); + await nextTick(); + + expect(flashAPIErrors).toHaveBeenCalledWith('error'); + }); + }); + }); +}); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.ts new file mode 100644 index 00000000000000..bccd67a4921d15 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/crawler_single_domain_logic.ts @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { kea, MakeLogicType } from 'kea'; + +import { flashAPIErrors } from '../../../shared/flash_messages'; + +import { HttpLogic } from '../../../shared/http'; +import { EngineLogic } from '../engine'; + +import { CrawlerDomain } from './types'; +import { crawlerDomainServerToClient } from './utils'; + +export interface CrawlerSingleDomainValues { + dataLoading: boolean; + domain: CrawlerDomain | null; +} + +interface CrawlerSingleDomainActions { + fetchDomainData(domainId: string): { domainId: string }; + onReceiveDomainData(domain: CrawlerDomain): { domain: CrawlerDomain }; +} + +export const CrawlerSingleDomainLogic = kea< + MakeLogicType +>({ + path: ['enterprise_search', 'app_search', 'crawler', 'crawler_single_domain'], + actions: { + fetchDomainData: (domainId) => ({ domainId }), + onReceiveDomainData: (domain) => ({ domain }), + }, + reducers: { + dataLoading: [ + true, + { + onReceiveDomainData: () => false, + }, + ], + domain: [ + null, + { + onReceiveDomainData: (_, { domain }) => domain, + }, + ], + }, + listeners: ({ actions }) => ({ + fetchDomainData: async ({ domainId }) => { + const { http } = HttpLogic.values; + const { engineName } = EngineLogic.values; + + try { + const response = await http.get( + `/api/app_search/engines/${engineName}/crawler/domains/${domainId}` + ); + + const domainData = crawlerDomainServerToClient(response); + + actions.onReceiveDomainData(domainData); + } catch (e) { + flashAPIErrors(e); + } + }, + }), +}); diff --git a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts index f54771e2bef9a7..ee41dce661451a 100644 --- a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts +++ b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.test.ts @@ -178,6 +178,44 @@ describe('crawler routes', () => { }); }); + describe('GET /api/app_search/engines/{name}/crawler/domains/{id}', () => { + let mockRouter: MockRouter; + + beforeEach(() => { + jest.clearAllMocks(); + mockRouter = new MockRouter({ + method: 'get', + path: '/api/app_search/engines/{name}/crawler/domains/{id}', + }); + + registerCrawlerRoutes({ + ...mockDependencies, + router: mockRouter.router, + }); + }); + + it('creates a request to enterprise search', () => { + expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({ + path: '/api/as/v0/engines/:name/crawler/domains/:id', + }); + }); + + it('validates correctly with name and id', () => { + const request = { params: { name: 'some-engine', id: '1234' } }; + mockRouter.shouldValidate(request); + }); + + it('fails validation without name', () => { + const request = { params: { id: '1234' } }; + mockRouter.shouldThrow(request); + }); + + it('fails validation without id', () => { + const request = { params: { name: 'test-engine' } }; + mockRouter.shouldThrow(request); + }); + }); + describe('POST /api/app_search/crawler/validate_url', () => { let mockRouter: MockRouter; diff --git a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts index 5404a9a00bdac1..29c1dd74582a30 100644 --- a/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts +++ b/x-pack/plugins/enterprise_search/server/routes/app_search/crawler.ts @@ -66,6 +66,21 @@ export function registerCrawlerRoutes({ }) ); + router.get( + { + path: '/api/app_search/engines/{name}/crawler/domains/{id}', + validate: { + params: schema.object({ + name: schema.string(), + id: schema.string(), + }), + }, + }, + enterpriseSearchRequestHandler.createRequest({ + path: '/api/as/v0/engines/:name/crawler/domains/:id', + }) + ); + router.delete( { path: '/api/app_search/engines/{name}/crawler/domains/{id}',