diff --git a/x-pack/legacy/plugins/infra/common/http_api/log_entries/common.ts b/x-pack/legacy/plugins/infra/common/http_api/log_entries/common.ts index 3eb7e278bf99c4..0b312223220071 100644 --- a/x-pack/legacy/plugins/infra/common/http_api/log_entries/common.ts +++ b/x-pack/legacy/plugins/infra/common/http_api/log_entries/common.ts @@ -10,3 +10,4 @@ export const logEntriesCursorRT = rt.type({ time: rt.number, tiebreaker: rt.number, }); +export type LogEntriesCursor = rt.TypeOf; diff --git a/x-pack/legacy/plugins/infra/common/http_api/log_entries/entries.ts b/x-pack/legacy/plugins/infra/common/http_api/log_entries/entries.ts new file mode 100644 index 00000000000000..97bdad23beb240 --- /dev/null +++ b/x-pack/legacy/plugins/infra/common/http_api/log_entries/entries.ts @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; +import { logEntriesCursorRT } from './common'; + +export const LOG_ENTRIES_PATH = '/api/log_entries/entries'; + +export const logEntriesBaseRequestRT = rt.intersection([ + rt.type({ + sourceId: rt.string, + startDate: rt.number, + endDate: rt.number, + }), + rt.partial({ + query: rt.string, + size: rt.number, + }), +]); + +export const logEntriesBeforeRequestRT = rt.intersection([ + logEntriesBaseRequestRT, + rt.type({ before: rt.union([logEntriesCursorRT, rt.literal('last')]) }), +]); + +export const logEntriesAfterRequestRT = rt.intersection([ + logEntriesBaseRequestRT, + rt.type({ after: rt.union([logEntriesCursorRT, rt.literal('first')]) }), +]); + +export const logEntriesCenteredRT = rt.intersection([ + logEntriesBaseRequestRT, + rt.type({ center: logEntriesCursorRT }), +]); + +export const logEntriesRequestRT = rt.union([ + logEntriesBaseRequestRT, + logEntriesBeforeRequestRT, + logEntriesAfterRequestRT, + logEntriesCenteredRT, +]); + +export type LogEntriesRequest = rt.TypeOf; + +// JSON value +const valueRT = rt.union([rt.string, rt.number, rt.boolean, rt.object, rt.null, rt.undefined]); + +export const logMessagePartRT = rt.union([ + rt.type({ + constant: rt.string, + }), + rt.type({ + field: rt.string, + value: valueRT, + highlights: rt.array(rt.string), + }), +]); + +export const logColumnRT = rt.union([ + rt.type({ columnId: rt.string, timestamp: rt.number }), + rt.type({ + columnId: rt.string, + field: rt.string, + value: rt.union([rt.string, rt.undefined]), + highlights: rt.array(rt.string), + }), + rt.type({ + columnId: rt.string, + message: rt.array(logMessagePartRT), + }), +]); + +export const logEntryRT = rt.type({ + id: rt.string, + cursor: logEntriesCursorRT, + columns: rt.array(logColumnRT), +}); + +export type LogMessagepart = rt.TypeOf; +export type LogColumn = rt.TypeOf; +export type LogEntry = rt.TypeOf; + +export const logEntriesResponseRT = rt.type({ + data: rt.type({ + entries: rt.array(logEntryRT), + topCursor: logEntriesCursorRT, + bottomCursor: logEntriesCursorRT, + }), +}); + +export type LogEntriesResponse = rt.TypeOf; diff --git a/x-pack/legacy/plugins/infra/common/http_api/log_entries/highlights.ts b/x-pack/legacy/plugins/infra/common/http_api/log_entries/highlights.ts new file mode 100644 index 00000000000000..516cd67f2764da --- /dev/null +++ b/x-pack/legacy/plugins/infra/common/http_api/log_entries/highlights.ts @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; +import { + logEntriesBaseRequestRT, + logEntriesBeforeRequestRT, + logEntriesAfterRequestRT, + logEntriesCenteredRT, + logEntryRT, +} from './entries'; +import { logEntriesCursorRT } from './common'; + +export const LOG_ENTRIES_HIGHLIGHTS_PATH = '/api/log_entries/highlights'; + +const highlightsRT = rt.type({ + highlightTerms: rt.array(rt.string), +}); + +export const logEntriesHighlightsBaseRequestRT = rt.intersection([ + logEntriesBaseRequestRT, + highlightsRT, +]); + +export const logEntriesHighlightsBeforeRequestRT = rt.intersection([ + logEntriesBeforeRequestRT, + highlightsRT, +]); + +export const logEntriesHighlightsAfterRequestRT = rt.intersection([ + logEntriesAfterRequestRT, + highlightsRT, +]); + +export const logEntriesHighlightsCenteredRequestRT = rt.intersection([ + logEntriesCenteredRT, + highlightsRT, +]); + +export const logEntriesHighlightsRequestRT = rt.union([ + logEntriesHighlightsBaseRequestRT, + logEntriesHighlightsBeforeRequestRT, + logEntriesHighlightsAfterRequestRT, + logEntriesHighlightsCenteredRequestRT, +]); + +export type LogEntriesHighlightsRequest = rt.TypeOf; + +export const logEntriesHighlightsResponseRT = rt.type({ + data: rt.array( + rt.type({ + topCursor: logEntriesCursorRT, + bottomCursor: logEntriesCursorRT, + entries: rt.array(logEntryRT), + }) + ), +}); + +export type LogEntriesHighlightsResponse = rt.TypeOf; diff --git a/x-pack/legacy/plugins/infra/common/http_api/log_entries/index.ts b/x-pack/legacy/plugins/infra/common/http_api/log_entries/index.ts index 8fed914c3dc8c9..490f295cbff68d 100644 --- a/x-pack/legacy/plugins/infra/common/http_api/log_entries/index.ts +++ b/x-pack/legacy/plugins/infra/common/http_api/log_entries/index.ts @@ -4,6 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ +export * from './common'; +export * from './entries'; +export * from './highlights'; export * from './item'; export * from './summary'; export * from './summary_highlights'; diff --git a/x-pack/legacy/plugins/infra/server/infra_server.ts b/x-pack/legacy/plugins/infra/server/infra_server.ts index 108e1b1e3f3925..f99589e1b52bd7 100644 --- a/x-pack/legacy/plugins/infra/server/infra_server.ts +++ b/x-pack/legacy/plugins/infra/server/infra_server.ts @@ -20,6 +20,8 @@ import { initMetadataRoute } from './routes/metadata'; import { initSnapshotRoute } from './routes/snapshot'; import { initNodeDetailsRoute } from './routes/node_details'; import { + initLogEntriesRoute, + initLogEntriesHighlightsRoute, initLogEntriesSummaryRoute, initLogEntriesSummaryHighlightsRoute, initLogEntriesItemRoute, @@ -43,6 +45,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => { initSnapshotRoute(libs); initNodeDetailsRoute(libs); initValidateLogAnalysisIndicesRoute(libs); + initLogEntriesRoute(libs); + initLogEntriesHighlightsRoute(libs); initLogEntriesSummaryRoute(libs); initLogEntriesSummaryHighlightsRoute(libs); initLogEntriesItemRoute(libs); diff --git a/x-pack/legacy/plugins/infra/server/lib/adapters/log_entries/kibana_log_entries_adapter.ts b/x-pack/legacy/plugins/infra/server/lib/adapters/log_entries/kibana_log_entries_adapter.ts index ec45171baa7b0a..b936d79a8edcdf 100644 --- a/x-pack/legacy/plugins/infra/server/lib/adapters/log_entries/kibana_log_entries_adapter.ts +++ b/x-pack/legacy/plugins/infra/server/lib/adapters/log_entries/kibana_log_entries_adapter.ts @@ -8,6 +8,7 @@ import { timeMilliseconds } from 'd3-time'; import * as runtimeTypes from 'io-ts'; +import { compact } from 'lodash'; import first from 'lodash/fp/first'; import get from 'lodash/fp/get'; import has from 'lodash/fp/has'; @@ -17,12 +18,14 @@ import { map, fold } from 'fp-ts/lib/Either'; import { identity, constant } from 'fp-ts/lib/function'; import { RequestHandlerContext } from 'src/core/server'; import { compareTimeKeys, isTimeKey, TimeKey } from '../../../../common/time'; -import { JsonObject } from '../../../../common/typed_json'; +import { JsonObject, JsonValue } from '../../../../common/typed_json'; import { LogEntriesAdapter, + LogEntriesParams, LogEntryDocument, LogEntryQuery, LogSummaryBucket, + LOG_ENTRIES_PAGE_SIZE, } from '../../domains/log_entries_domain'; import { InfraSourceConfiguration } from '../../sources'; import { SortedSearchHit } from '../framework'; @@ -82,6 +85,84 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter { return direction === 'asc' ? documents : documents.reverse(); } + public async getLogEntries( + requestContext: RequestHandlerContext, + sourceConfiguration: InfraSourceConfiguration, + fields: string[], + params: LogEntriesParams + ): Promise { + const { startDate, endDate, query, cursor, size, highlightTerm } = params; + + const { sortDirection, searchAfterClause } = processCursor(cursor); + + const highlightQuery = createHighlightQuery(highlightTerm, fields); + + const highlightClause = highlightQuery + ? { + highlight: { + boundary_scanner: 'word', + fields: fields.reduce( + (highlightFieldConfigs, fieldName) => ({ + ...highlightFieldConfigs, + [fieldName]: {}, + }), + {} + ), + fragment_size: 1, + number_of_fragments: 100, + post_tags: [''], + pre_tags: [''], + highlight_query: highlightQuery, + }, + } + : {}; + + const sort = { + [sourceConfiguration.fields.timestamp]: sortDirection, + [sourceConfiguration.fields.tiebreaker]: sortDirection, + }; + + const esQuery = { + allowNoIndices: true, + index: sourceConfiguration.logAlias, + ignoreUnavailable: true, + body: { + size: typeof size !== 'undefined' ? size : LOG_ENTRIES_PAGE_SIZE, + track_total_hits: false, + _source: fields, + query: { + bool: { + filter: [ + ...createFilterClauses(query, highlightQuery), + { + range: { + [sourceConfiguration.fields.timestamp]: { + gte: startDate, + lte: endDate, + format: TIMESTAMP_FORMAT, + }, + }, + }, + ], + }, + }, + sort, + ...highlightClause, + ...searchAfterClause, + }, + }; + + const esResult = await this.framework.callWithRequest( + requestContext, + 'search', + esQuery + ); + + const hits = sortDirection === 'asc' ? esResult.hits.hits : esResult.hits.hits.reverse(); + return mapHitsToLogEntryDocuments(hits, sourceConfiguration.fields.timestamp, fields); + } + + /** @deprecated */ public async getContainedLogEntryDocuments( requestContext: RequestHandlerContext, sourceConfiguration: InfraSourceConfiguration, @@ -319,6 +400,34 @@ function getLookupIntervals(start: number, direction: 'asc' | 'desc'): Array<[nu return intervals; } +function mapHitsToLogEntryDocuments( + hits: SortedSearchHit[], + timestampField: string, + fields: string[] +): LogEntryDocument[] { + return hits.map(hit => { + const logFields = fields.reduce<{ [fieldName: string]: JsonValue }>( + (flattenedFields, field) => { + if (has(field, hit._source)) { + flattenedFields[field] = get(field, hit._source); + } + return flattenedFields; + }, + {} + ); + + return { + gid: hit._id, + // timestamp: hit._source[timestampField], + // FIXME s/key/cursor/g + key: { time: hit.sort[0], tiebreaker: hit.sort[1] }, + fields: logFields, + highlights: hit.highlight || {}, + }; + }); +} + +/** @deprecated */ const convertHitToLogEntryDocument = (fields: string[]) => ( hit: SortedSearchHit ): LogEntryDocument => ({ @@ -352,9 +461,62 @@ const convertDateRangeBucketToSummaryBucket = ( })), }); +const createHighlightQuery = ( + highlightTerm: string | undefined, + fields: string[] +): LogEntryQuery | undefined => { + if (highlightTerm) { + return { + multi_match: { + fields, + lenient: true, + query: highlightTerm, + type: 'phrase', + }, + }; + } +}; + +const createFilterClauses = ( + filterQuery?: LogEntryQuery, + highlightQuery?: LogEntryQuery +): LogEntryQuery[] => { + if (filterQuery && highlightQuery) { + return [{ bool: { filter: [filterQuery, highlightQuery] } }]; + } + + return compact([filterQuery, highlightQuery]) as LogEntryQuery[]; +}; + const createQueryFilterClauses = (filterQuery: LogEntryQuery | undefined) => filterQuery ? [filterQuery] : []; +function processCursor( + cursor: LogEntriesParams['cursor'] +): { + sortDirection: 'asc' | 'desc'; + searchAfterClause: { search_after?: readonly [number, number] }; +} { + if (cursor) { + if ('before' in cursor) { + return { + sortDirection: 'desc', + searchAfterClause: + cursor.before !== 'last' + ? { search_after: [cursor.before.time, cursor.before.tiebreaker] as const } + : {}, + }; + } else if (cursor.after !== 'first') { + return { + sortDirection: 'asc', + searchAfterClause: { search_after: [cursor.after.time, cursor.after.tiebreaker] as const }, + }; + } + } + + return { sortDirection: 'asc', searchAfterClause: {} }; +} + const LogSummaryDateRangeBucketRuntimeType = runtimeTypes.intersection([ runtimeTypes.type({ doc_count: runtimeTypes.number, diff --git a/x-pack/legacy/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts b/x-pack/legacy/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts index 347f0dcf795bce..2f71d56e1e0e31 100644 --- a/x-pack/legacy/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts +++ b/x-pack/legacy/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts @@ -13,7 +13,10 @@ import { JsonObject } from '../../../../common/typed_json'; import { LogEntriesSummaryBucket, LogEntriesSummaryHighlightsBucket, + LogEntry, LogEntriesItem, + LogEntriesCursor, + LogColumn, } from '../../../../common/http_api'; import { InfraLogEntry, InfraLogMessageSegment } from '../../../graphql/types'; import { @@ -32,12 +35,84 @@ import { compileFormattingRules, } from './message'; +export interface LogEntriesParams { + startDate: number; + endDate: number; + size?: number; + query?: JsonObject; + cursor?: { before: LogEntriesCursor | 'last' } | { after: LogEntriesCursor | 'first' }; + highlightTerm?: string; +} +export interface LogEntriesAroundParams { + startDate: number; + endDate: number; + size?: number; + center: LogEntriesCursor; + query?: JsonObject; + highlightTerm?: string; +} + +export const LOG_ENTRIES_PAGE_SIZE = 200; + export class InfraLogEntriesDomain { constructor( private readonly adapter: LogEntriesAdapter, private readonly libs: { sources: InfraSources } ) {} + /* Name is temporary until we can clean up the GraphQL implementation */ + /* eslint-disable-next-line @typescript-eslint/camelcase */ + public async getLogEntriesAround__new( + requestContext: RequestHandlerContext, + sourceId: string, + params: LogEntriesAroundParams + ) { + const { startDate, endDate, center, query, size, highlightTerm } = params; + + /* + * For odd sizes we will round this value down for the first half, and up + * for the second. This keeps the center cursor right in the center. + * + * For even sizes the half before is one entry bigger than the half after. + * [1, 2, 3, 4, 5, *6*, 7, 8, 9, 10] + * | 5 entries | |4 entries| + */ + const halfSize = (size || LOG_ENTRIES_PAGE_SIZE) / 2; + + const entriesBefore = await this.getLogEntries(requestContext, sourceId, { + startDate, + endDate, + query, + cursor: { before: center }, + size: Math.floor(halfSize), + highlightTerm, + }); + + /* + * Elasticsearch's `search_after` returns documents after the specified cursor. + * - If we have documents before the center, we search after the last of + * those. The first document of the new group is the center. + * - If there were no documents, we search one milisecond before the + * center. It then becomes the first document. + */ + const cursorAfter = + entriesBefore.length > 0 + ? entriesBefore[entriesBefore.length - 1].cursor + : { time: center.time - 1, tiebreaker: 0 }; + + const entriesAfter = await this.getLogEntries(requestContext, sourceId, { + startDate, + endDate, + query, + cursor: { after: cursorAfter }, + size: Math.ceil(halfSize), + highlightTerm, + }); + + return [...entriesBefore, ...entriesAfter]; + } + + /** @deprecated */ public async getLogEntriesAround( requestContext: RequestHandlerContext, sourceId: string, @@ -102,6 +177,62 @@ export class InfraLogEntriesDomain { }; } + public async getLogEntries( + requestContext: RequestHandlerContext, + sourceId: string, + params: LogEntriesParams + ): Promise { + const { configuration } = await this.libs.sources.getSourceConfiguration( + requestContext, + sourceId + ); + + const messageFormattingRules = compileFormattingRules( + getBuiltinRules(configuration.fields.message) + ); + + const requiredFields = getRequiredFields(configuration, messageFormattingRules); + + const documents = await this.adapter.getLogEntries( + requestContext, + configuration, + requiredFields, + params + ); + + const entries = documents.map(doc => { + return { + id: doc.gid, + cursor: doc.key, + columns: configuration.logColumns.map( + (column): LogColumn => { + if ('timestampColumn' in column) { + return { + columnId: column.timestampColumn.id, + timestamp: doc.key.time, + }; + } else if ('messageColumn' in column) { + return { + columnId: column.messageColumn.id, + message: messageFormattingRules.format(doc.fields, doc.highlights), + }; + } else { + return { + columnId: column.fieldColumn.id, + field: column.fieldColumn.field, + value: stringify(doc.fields[column.fieldColumn.field]), + highlights: doc.highlights[column.fieldColumn.field] || [], + }; + } + } + ), + }; + }); + + return entries; + } + + /** @deprecated */ public async getLogEntriesBetween( requestContext: RequestHandlerContext, sourceId: string, @@ -133,6 +264,7 @@ export class InfraLogEntriesDomain { return entries; } + /** @deprecated */ public async getLogEntryHighlights( requestContext: RequestHandlerContext, sourceId: string, @@ -324,6 +456,13 @@ export interface LogEntriesAdapter { highlightQuery?: LogEntryQuery ): Promise; + getLogEntries( + requestContext: RequestHandlerContext, + sourceConfiguration: InfraSourceConfiguration, + fields: string[], + params: LogEntriesParams + ): Promise; + getContainedLogEntryDocuments( requestContext: RequestHandlerContext, sourceConfiguration: InfraSourceConfiguration, @@ -366,6 +505,7 @@ export interface LogSummaryBucket { topEntryKeys: TimeKey[]; } +/** @deprecated */ const convertLogDocumentToEntry = ( sourceId: string, logColumns: InfraSourceConfiguration['logColumns'], diff --git a/x-pack/legacy/plugins/infra/server/routes/log_entries/entries.ts b/x-pack/legacy/plugins/infra/server/routes/log_entries/entries.ts new file mode 100644 index 00000000000000..361535886ab22d --- /dev/null +++ b/x-pack/legacy/plugins/infra/server/routes/log_entries/entries.ts @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import Boom from 'boom'; + +import { pipe } from 'fp-ts/lib/pipeable'; +import { fold } from 'fp-ts/lib/Either'; +import { identity } from 'fp-ts/lib/function'; +import { schema } from '@kbn/config-schema'; + +import { throwErrors } from '../../../common/runtime_types'; + +import { InfraBackendLibs } from '../../lib/infra_types'; +import { + LOG_ENTRIES_PATH, + logEntriesRequestRT, + logEntriesResponseRT, +} from '../../../common/http_api/log_entries'; +import { parseFilterQuery } from '../../utils/serialized_query'; +import { LogEntriesParams } from '../../lib/domains/log_entries_domain'; + +const escapeHatch = schema.object({}, { allowUnknowns: true }); + +export const initLogEntriesRoute = ({ framework, logEntries }: InfraBackendLibs) => { + framework.registerRoute( + { + method: 'post', + path: LOG_ENTRIES_PATH, + validate: { body: escapeHatch }, + }, + async (requestContext, request, response) => { + try { + const payload = pipe( + logEntriesRequestRT.decode(request.body), + fold(throwErrors(Boom.badRequest), identity) + ); + + const { startDate, endDate, sourceId, query, size } = payload; + + let entries; + if ('center' in payload) { + entries = await logEntries.getLogEntriesAround__new(requestContext, sourceId, { + startDate, + endDate, + query: parseFilterQuery(query), + center: payload.center, + size, + }); + } else { + let cursor: LogEntriesParams['cursor']; + if ('before' in payload) { + cursor = { before: payload.before }; + } else if ('after' in payload) { + cursor = { after: payload.after }; + } + + entries = await logEntries.getLogEntries(requestContext, sourceId, { + startDate, + endDate, + query: parseFilterQuery(query), + cursor, + size, + }); + } + + return response.ok({ + body: logEntriesResponseRT.encode({ + data: { + entries, + topCursor: entries[0].cursor, + bottomCursor: entries[entries.length - 1].cursor, + }, + }), + }); + } catch (error) { + return response.internalError({ + body: error.message, + }); + } + } + ); +}; diff --git a/x-pack/legacy/plugins/infra/server/routes/log_entries/highlights.ts b/x-pack/legacy/plugins/infra/server/routes/log_entries/highlights.ts new file mode 100644 index 00000000000000..8af81a6ee313dc --- /dev/null +++ b/x-pack/legacy/plugins/infra/server/routes/log_entries/highlights.ts @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import Boom from 'boom'; + +import { pipe } from 'fp-ts/lib/pipeable'; +import { fold } from 'fp-ts/lib/Either'; +import { identity } from 'fp-ts/lib/function'; +import { schema } from '@kbn/config-schema'; + +import { throwErrors } from '../../../common/runtime_types'; + +import { InfraBackendLibs } from '../../lib/infra_types'; +import { + LOG_ENTRIES_HIGHLIGHTS_PATH, + logEntriesHighlightsRequestRT, + logEntriesHighlightsResponseRT, +} from '../../../common/http_api/log_entries'; +import { parseFilterQuery } from '../../utils/serialized_query'; +import { LogEntriesParams } from '../../lib/domains/log_entries_domain'; + +const escapeHatch = schema.object({}, { allowUnknowns: true }); + +export const initLogEntriesHighlightsRoute = ({ framework, logEntries }: InfraBackendLibs) => { + framework.registerRoute( + { + method: 'post', + path: LOG_ENTRIES_HIGHLIGHTS_PATH, + validate: { body: escapeHatch }, + }, + async (requestContext, request, response) => { + try { + const payload = pipe( + logEntriesHighlightsRequestRT.decode(request.body), + fold(throwErrors(Boom.badRequest), identity) + ); + + const { startDate, endDate, sourceId, query, size, highlightTerms } = payload; + + let entriesPerHighlightTerm; + + if ('center' in payload) { + entriesPerHighlightTerm = await Promise.all( + highlightTerms.map(highlightTerm => + logEntries.getLogEntriesAround__new(requestContext, sourceId, { + startDate, + endDate, + query: parseFilterQuery(query), + center: payload.center, + size, + highlightTerm, + }) + ) + ); + } else { + let cursor: LogEntriesParams['cursor']; + if ('before' in payload) { + cursor = { before: payload.before }; + } else if ('after' in payload) { + cursor = { after: payload.after }; + } + + entriesPerHighlightTerm = await Promise.all( + highlightTerms.map(highlightTerm => + logEntries.getLogEntries(requestContext, sourceId, { + startDate, + endDate, + query: parseFilterQuery(query), + cursor, + size, + highlightTerm, + }) + ) + ); + } + + return response.ok({ + body: logEntriesHighlightsResponseRT.encode({ + data: entriesPerHighlightTerm.map(entries => ({ + entries, + topCursor: entries[0].cursor, + bottomCursor: entries[entries.length - 1].cursor, + })), + }), + }); + } catch (error) { + return response.internalError({ + body: error.message, + }); + } + } + ); +}; diff --git a/x-pack/legacy/plugins/infra/server/routes/log_entries/index.ts b/x-pack/legacy/plugins/infra/server/routes/log_entries/index.ts index 8fed914c3dc8c9..1090d35d89b851 100644 --- a/x-pack/legacy/plugins/infra/server/routes/log_entries/index.ts +++ b/x-pack/legacy/plugins/infra/server/routes/log_entries/index.ts @@ -4,6 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ +export * from './entries'; +export * from './highlights'; export * from './item'; export * from './summary'; export * from './summary_highlights'; diff --git a/x-pack/test/api_integration/apis/infra/log_entries.ts b/x-pack/test/api_integration/apis/infra/log_entries.ts index 4020acc00c618e..8db1426a219d4e 100644 --- a/x-pack/test/api_integration/apis/infra/log_entries.ts +++ b/x-pack/test/api_integration/apis/infra/log_entries.ts @@ -9,6 +9,21 @@ import { ascending, pairs } from 'd3-array'; import gql from 'graphql-tag'; import { v4 as uuidv4 } from 'uuid'; +import { pipe } from 'fp-ts/lib/pipeable'; +import { identity } from 'fp-ts/lib/function'; +import { fold } from 'fp-ts/lib/Either'; + +import { + createPlainError, + throwErrors, +} from '../../../../legacy/plugins/infra/common/runtime_types'; + +import { + LOG_ENTRIES_PATH, + logEntriesRequestRT, + logEntriesResponseRT, +} from '../../../../legacy/plugins/infra/common/http_api'; + import { sharedFragments } from '../../../../legacy/plugins/infra/common/graphql/shared'; import { InfraTimeKey } from '../../../../legacy/plugins/infra/public/graphql/types'; import { FtrProviderContext } from '../../ftr_provider_context'; @@ -88,15 +103,209 @@ const logEntriesBetweenQuery = gql` ${sharedFragments.InfraLogEntryFields} `; +const COMMON_HEADERS = { + 'kbn-xsrf': 'some-xsrf-token', +}; + export default function({ getService }: FtrProviderContext) { const esArchiver = getService('esArchiver'); const client = getService('infraOpsGraphQLClient'); + const supertest = getService('supertest'); const sourceConfigurationService = getService('infraOpsSourceConfiguration'); describe('log entry apis', () => { before(() => esArchiver.load('infra/metrics_and_logs')); after(() => esArchiver.unload('infra/metrics_and_logs')); + describe('/log_entries/entries', () => { + describe('with the default source', () => { + before(() => esArchiver.load('empty_kibana')); + after(() => esArchiver.unload('empty_kibana')); + + it('works', async () => { + const { body } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: EARLIEST_KEY_WITH_DATA.time, + endDate: KEY_WITHIN_DATA_RANGE.time, + }) + ) + .expect(200); + + const logEntriesResponse = pipe( + logEntriesResponseRT.decode(body), + fold(throwErrors(createPlainError), identity) + ); + + const entries = logEntriesResponse.data.entries; + const firstEntry = entries[0]; + const lastEntry = entries[entries.length - 1]; + + // Has the default page size + expect(entries).to.have.length(200); + + // Cursors are set correctly + expect(firstEntry.cursor).to.eql(logEntriesResponse.data.topCursor); + expect(lastEntry.cursor).to.eql(logEntriesResponse.data.bottomCursor); + + // Entries fall within range + // @kbn/expect doesn't have a `lessOrEqualThan` or `moreOrEqualThan` comparators + expect(firstEntry.cursor.time >= EARLIEST_KEY_WITH_DATA.time).to.be(true); + expect(lastEntry.cursor.time <= KEY_WITHIN_DATA_RANGE.time).to.be(true); + }); + + it('Paginates correctly with `after`', async () => { + const { body: firstPageBody } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: EARLIEST_KEY_WITH_DATA.time, + endDate: KEY_WITHIN_DATA_RANGE.time, + size: 10, + }) + ); + const firstPage = pipe( + logEntriesResponseRT.decode(firstPageBody), + fold(throwErrors(createPlainError), identity) + ); + + const { body: secondPageBody } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: EARLIEST_KEY_WITH_DATA.time, + endDate: KEY_WITHIN_DATA_RANGE.time, + after: firstPage.data.bottomCursor, + size: 10, + }) + ); + const secondPage = pipe( + logEntriesResponseRT.decode(secondPageBody), + fold(throwErrors(createPlainError), identity) + ); + + const { body: bothPagesBody } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: EARLIEST_KEY_WITH_DATA.time, + endDate: KEY_WITHIN_DATA_RANGE.time, + size: 20, + }) + ); + const bothPages = pipe( + logEntriesResponseRT.decode(bothPagesBody), + fold(throwErrors(createPlainError), identity) + ); + + expect(bothPages.data.entries).to.eql([ + ...firstPage.data.entries, + ...secondPage.data.entries, + ]); + + expect(bothPages.data.topCursor).to.eql(firstPage.data.topCursor); + expect(bothPages.data.bottomCursor).to.eql(secondPage.data.bottomCursor); + }); + + it('Paginates correctly with `before`', async () => { + const { body: lastPageBody } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: KEY_WITHIN_DATA_RANGE.time, + endDate: LATEST_KEY_WITH_DATA.time, + before: 'last', + size: 10, + }) + ); + const lastPage = pipe( + logEntriesResponseRT.decode(lastPageBody), + fold(throwErrors(createPlainError), identity) + ); + + const { body: secondToLastPageBody } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: KEY_WITHIN_DATA_RANGE.time, + endDate: LATEST_KEY_WITH_DATA.time, + before: lastPage.data.topCursor, + size: 10, + }) + ); + const secondToLastPage = pipe( + logEntriesResponseRT.decode(secondToLastPageBody), + fold(throwErrors(createPlainError), identity) + ); + + const { body: bothPagesBody } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: KEY_WITHIN_DATA_RANGE.time, + endDate: LATEST_KEY_WITH_DATA.time, + before: 'last', + size: 20, + }) + ); + const bothPages = pipe( + logEntriesResponseRT.decode(bothPagesBody), + fold(throwErrors(createPlainError), identity) + ); + + expect(bothPages.data.entries).to.eql([ + ...secondToLastPage.data.entries, + ...lastPage.data.entries, + ]); + + expect(bothPages.data.topCursor).to.eql(secondToLastPage.data.topCursor); + expect(bothPages.data.bottomCursor).to.eql(lastPage.data.bottomCursor); + }); + + it('centers entries around a point', async () => { + const { body } = await supertest + .post(LOG_ENTRIES_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesRequestRT.encode({ + sourceId: 'default', + startDate: EARLIEST_KEY_WITH_DATA.time, + endDate: LATEST_KEY_WITH_DATA.time, + center: KEY_WITHIN_DATA_RANGE, + }) + ) + .expect(200); + const logEntriesResponse = pipe( + logEntriesResponseRT.decode(body), + fold(throwErrors(createPlainError), identity) + ); + + const entries = logEntriesResponse.data.entries; + const firstEntry = entries[0]; + const lastEntry = entries[entries.length - 1]; + + expect(entries).to.have.length(200); + expect(firstEntry.cursor.time >= EARLIEST_KEY_WITH_DATA.time).to.be(true); + expect(lastEntry.cursor.time <= LATEST_KEY_WITH_DATA.time).to.be(true); + }); + }); + }); + describe('logEntriesAround', () => { describe('with the default source', () => { before(() => esArchiver.load('empty_kibana')); diff --git a/x-pack/test/api_integration/apis/infra/log_entry_highlights.ts b/x-pack/test/api_integration/apis/infra/log_entry_highlights.ts index 88d7d8716d0f54..67529c77f70f78 100644 --- a/x-pack/test/api_integration/apis/infra/log_entry_highlights.ts +++ b/x-pack/test/api_integration/apis/infra/log_entry_highlights.ts @@ -8,6 +8,21 @@ import expect from '@kbn/expect'; import { ascending, pairs } from 'd3-array'; import gql from 'graphql-tag'; +import { pipe } from 'fp-ts/lib/pipeable'; +import { identity } from 'fp-ts/lib/function'; +import { fold } from 'fp-ts/lib/Either'; + +import { + createPlainError, + throwErrors, +} from '../../../../legacy/plugins/infra/common/runtime_types'; + +import { + LOG_ENTRIES_HIGHLIGHTS_PATH, + logEntriesHighlightsRequestRT, + logEntriesHighlightsResponseRT, +} from '../../../../legacy/plugins/infra/common/http_api'; + import { FtrProviderContext } from '../../ftr_provider_context'; import { sharedFragments } from '../../../../legacy/plugins/infra/common/graphql/shared'; import { InfraTimeKey } from '../../../../legacy/plugins/infra/public/graphql/types'; @@ -29,14 +44,151 @@ const KEY_AFTER_END = { tiebreaker: 0, }; +const COMMON_HEADERS = { + 'kbn-xsrf': 'some-xsrf-token', +}; + export default function({ getService }: FtrProviderContext) { const esArchiver = getService('esArchiver'); + const supertest = getService('supertest'); const client = getService('infraOpsGraphQLClient'); describe('log highlight apis', () => { before(() => esArchiver.load('infra/simple_logs')); after(() => esArchiver.unload('infra/simple_logs')); + describe('/log_entries/highlights', () => { + describe('with the default source', () => { + before(() => esArchiver.load('empty_kibana')); + after(() => esArchiver.unload('empty_kibana')); + + it('highlights built-in message column', async () => { + const { body } = await supertest + .post(LOG_ENTRIES_HIGHLIGHTS_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesHighlightsRequestRT.encode({ + sourceId: 'default', + startDate: KEY_BEFORE_START.time, + endDate: KEY_AFTER_END.time, + highlightTerms: ['message of document 0'], + }) + ) + .expect(200); + + const logEntriesHighlightsResponse = pipe( + logEntriesHighlightsResponseRT.decode(body), + fold(throwErrors(createPlainError), identity) + ); + + expect(logEntriesHighlightsResponse.data).to.have.length(1); + + const data = logEntriesHighlightsResponse.data[0]; + const entries = data.entries; + const firstEntry = entries[0]; + const lastEntry = entries[entries.length - 1]; + + // Finds expected entries + expect(entries).to.have.length(10); + + // Cursors are set correctly + expect(firstEntry.cursor).to.eql(data.topCursor); + expect(lastEntry.cursor).to.eql(data.bottomCursor); + + // Entries fall within range + // @kbn/expect doesn't have a `lessOrEqualThan` or `moreOrEqualThan` comparators + expect(firstEntry.cursor.time >= KEY_BEFORE_START.time).to.be(true); + expect(lastEntry.cursor.time <= KEY_AFTER_END.time).to.be(true); + + // All entries contain the highlights + entries.forEach(entry => { + entry.columns.forEach(column => { + if ('message' in column && 'highlights' in column.message[0]) { + expect(column.message[0].highlights).to.eql(['message', 'of', 'document', '0']); + } + }); + }); + }); + + it('highlights field columns', async () => { + const { body } = await supertest + .post(LOG_ENTRIES_HIGHLIGHTS_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesHighlightsRequestRT.encode({ + sourceId: 'default', + startDate: KEY_BEFORE_START.time, + endDate: KEY_AFTER_END.time, + highlightTerms: ['generate_test_data/simple_logs'], + }) + ) + .expect(200); + + const logEntriesHighlightsResponse = pipe( + logEntriesHighlightsResponseRT.decode(body), + fold(throwErrors(createPlainError), identity) + ); + + expect(logEntriesHighlightsResponse.data).to.have.length(1); + + const entries = logEntriesHighlightsResponse.data[0].entries; + + // Finds expected entries + expect(entries).to.have.length(50); + + // All entries contain the highlights + entries.forEach(entry => { + entry.columns.forEach(column => { + if ('field' in column && 'highlights' in column && column.highlights.length > 0) { + // https://github.com/elastic/kibana/issues/49959 + // expect(column.highlights).to.eql(['generate_test_data/simple_logs']); + expect(column.highlights).to.eql(['generate_test_data']); + } + }); + }); + }); + + it('applies the query as well as the highlight', async () => { + const { body } = await supertest + .post(LOG_ENTRIES_HIGHLIGHTS_PATH) + .set(COMMON_HEADERS) + .send( + logEntriesHighlightsRequestRT.encode({ + sourceId: 'default', + startDate: KEY_BEFORE_START.time, + endDate: KEY_AFTER_END.time, + query: JSON.stringify({ + multi_match: { query: 'host-a', type: 'phrase', lenient: true }, + }), + highlightTerms: ['message'], + }) + ) + .expect(200); + + const logEntriesHighlightsResponse = pipe( + logEntriesHighlightsResponseRT.decode(body), + fold(throwErrors(createPlainError), identity) + ); + + expect(logEntriesHighlightsResponse.data).to.have.length(1); + + const entries = logEntriesHighlightsResponse.data[0].entries; + + // Finds expected entries + expect(entries).to.have.length(25); + + // All entries contain the highlights + entries.forEach(entry => { + entry.columns.forEach(column => { + if ('message' in column && 'highlights' in column.message[0]) { + expect(column.message[0].highlights).to.eql(['message', 'message']); + } + }); + }); + }); + }); + }); + describe('logEntryHighlights', () => { describe('with the default source', () => { before(() => esArchiver.load('empty_kibana'));