Skip to content

Commit

Permalink
Neutral-naming in reporting plugin (#77371) (#77406)
Browse files Browse the repository at this point in the history
* refactor: πŸ’‘ remove "blacklist" reference in reporting plugin

* refactor: πŸ’‘ remove "whitelist" in reporting plugin

* fix: πŸ› correctly import function after refactor

* refactor: πŸ’‘ use "blocked" instead of "denied" terminology
  • Loading branch information
streamich authored Sep 15, 2020
1 parent d8a0688 commit 4d4dd79
Show file tree
Hide file tree
Showing 9 changed files with 32 additions and 32 deletions.
6 changes: 3 additions & 3 deletions x-pack/plugins/reporting/common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export const CSV_REPORTING_ACTION = 'downloadCsvReport';
export const CSV_BOM_CHARS = '\ufeff';
export const CSV_FORMULA_CHARS = ['=', '+', '-', '@'];

export const WHITELISTED_JOB_CONTENT_TYPES = [
export const ALLOWED_JOB_CONTENT_TYPES = [
'application/json',
'application/pdf',
CONTENT_TYPE_CSV,
Expand All @@ -34,7 +34,7 @@ export const WHITELISTED_JOB_CONTENT_TYPES = [
// See:
// https://github.com/chromium/chromium/blob/3611052c055897e5ebbc5b73ea295092e0c20141/services/network/public/cpp/header_util_unittest.cc#L50
// For a list of headers that chromium doesn't like
export const KBN_SCREENSHOT_HEADER_BLACKLIST = [
export const KBN_SCREENSHOT_HEADER_BLOCK_LIST = [
'accept-encoding',
'connection',
'content-length',
Expand All @@ -51,7 +51,7 @@ export const KBN_SCREENSHOT_HEADER_BLACKLIST = [
'keep-alive',
];

export const KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN = ['proxy-'];
export const KBN_SCREENSHOT_HEADER_BLOCK_LIST_STARTS_WITH_PATTERN = ['proxy-'];

export const UI_SETTINGS_CUSTOM_PDF_LOGO = 'xpackReporting:customPdfLogo';

Expand Down
20 changes: 10 additions & 10 deletions x-pack/plugins/reporting/server/browsers/network_policy.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -109,36 +109,36 @@ describe('Network Policy', () => {
expect(allowRequest(url, rules)).toEqual(false);
});

it('allows requests when hosts are whitelisted IP addresses', () => {
it('allows requests when hosts are allowed IP addresses', () => {
const url = 'http://192.168.1.1/cool/route/bro';
const rules = [{ allow: true, host: '192.168.1.1' }, { allow: false }];

expect(allowRequest(url, rules)).toEqual(true);
});

it('denies requests when hosts are blacklisted IP addresses', () => {
it('denies requests when hosts are from blocked IP addresses', () => {
const url = 'http://192.168.1.1/cool/route/bro';
const rules = [{ allow: false, host: '192.168.1.1' }, { allow: true }];

expect(allowRequest(url, rules)).toEqual(false);
});

it('allows requests when hosts are IP addresses not blacklisted', () => {
it('allows requests when hosts are IP addresses that are not blocked', () => {
const url = 'http://192.168.2.1/cool/route/bro';
const rules = [{ allow: false, host: '192.168.1.1' }, { allow: true }];

expect(allowRequest(url, rules)).toEqual(true);
});

it('denies requests when hosts are IP addresses not whitelisted', () => {
it('denies requests when hosts are IP addresses not explicitly allowed', () => {
const url = 'http://192.168.2.1/cool/route/bro';
const rules = [{ allow: true, host: '192.168.1.1' }, { allow: false }];

expect(allowRequest(url, rules)).toEqual(false);
});

describe('Common cases', () => {
it('allows whitelisting of certain routes based upon protocol', () => {
it('allows certain routes based upon protocol', () => {
const rules = [
{ allow: true, host: 'kibana.com', protocol: 'http:' },
{ allow: true, protocol: 'https:' },
Expand All @@ -150,39 +150,39 @@ describe('Network Policy', () => {
expect(allowRequest('http://bad.com/some/route', rules)).toEqual(false);
});

it('allows blacklisting of certain IPs', () => {
it('allows blocking of certain IPs', () => {
const rules = [{ allow: false, host: '169.254.0.0' }, { allow: true }];

expect(allowRequest('http://kibana.com/some/route', rules)).toEqual(true);
expect(allowRequest('http://bad.com/some/route', rules)).toEqual(true);
expect(allowRequest('https://169.254.0.0/some/route', rules)).toEqual(false);
});

it('allows whitelisting a single host on https', () => {
it('allows single host on https', () => {
const rules = [{ allow: true, host: 'kibana.com', protocol: 'https:' }, { allow: false }];

expect(allowRequest('http://kibana.com/some/route', rules)).toEqual(false);
expect(allowRequest('http://bad.com/some/route', rules)).toEqual(false);
expect(allowRequest('https://kibana.com/some/route', rules)).toEqual(true);
});

it('allows whitelisting a single protocol to http', () => {
it('allows single protocol to http', () => {
const rules = [{ allow: true, protocol: 'https:' }, { allow: false }];

expect(allowRequest('http://kibana.com/some/route', rules)).toEqual(false);
expect(allowRequest('http://bad.com/some/route', rules)).toEqual(false);
expect(allowRequest('https://good.com/some/route', rules)).toEqual(true);
});

it('allows whitelisting a single domain', () => {
it('allows single domain', () => {
const rules = [{ allow: true, host: 'kibana.com' }, { allow: false }];

expect(allowRequest('http://kibana.com/some/route', rules)).toEqual(true);
expect(allowRequest('http://www.kibana.com/some/route', rules)).toEqual(true);
expect(allowRequest('https://www-kibana.com/some/route', rules)).toEqual(false);
});

it('can blacklist bad protocols', () => {
it('can ban bad protocols', () => {
const rules = [
{ allow: true, protocol: 'http:' },
{ allow: true, protocol: 'https:' },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ export { decryptJobHeaders } from './decrypt_job_headers';
export { getConditionalHeaders } from './get_conditional_headers';
export { getCustomLogo } from './get_custom_logo';
export { getFullUrls } from './get_full_urls';
export { omitBlacklistedHeaders } from './omit_blacklisted_headers';
export { omitBlockedHeaders } from './omit_blocked_headers';
export { validateUrls } from './validate_urls';
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { omitBlacklistedHeaders } from './index';
import { omitBlockedHeaders } from './index';

test(`omits blacklisted headers`, async () => {
test(`omits blocked headers`, async () => {
const permittedHeaders = {
foo: 'bar',
baz: 'quix',
};

const blacklistedHeaders = {
const blockedHeaders = {
'accept-encoding': '',
connection: 'upgrade',
'content-length': '',
Expand All @@ -24,7 +24,7 @@ test(`omits blacklisted headers`, async () => {
trailer: 's are for trucks',
};

const filteredHeaders = await omitBlacklistedHeaders({
const filteredHeaders = await omitBlockedHeaders({
job: {
title: 'cool-job-bro',
type: 'csv',
Expand All @@ -36,7 +36,7 @@ test(`omits blacklisted headers`, async () => {
},
decryptedHeaders: {
...permittedHeaders,
...blacklistedHeaders,
...blockedHeaders,
},
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
*/
import { omitBy } from 'lodash';
import {
KBN_SCREENSHOT_HEADER_BLACKLIST,
KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN,
KBN_SCREENSHOT_HEADER_BLOCK_LIST,
KBN_SCREENSHOT_HEADER_BLOCK_LIST_STARTS_WITH_PATTERN,
} from '../../../common/constants';

export const omitBlacklistedHeaders = <TaskPayloadType>({
export const omitBlockedHeaders = <TaskPayloadType>({
job,
decryptedHeaders,
}: {
Expand All @@ -20,8 +20,8 @@ export const omitBlacklistedHeaders = <TaskPayloadType>({
decryptedHeaders,
(_value, header: string) =>
header &&
(KBN_SCREENSHOT_HEADER_BLACKLIST.includes(header) ||
KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN.some((pattern) =>
(KBN_SCREENSHOT_HEADER_BLOCK_LIST.includes(header) ||
KBN_SCREENSHOT_HEADER_BLOCK_LIST_STARTS_WITH_PATTERN.some((pattern) =>
header?.startsWith(pattern)
))
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import {
decryptJobHeaders,
getConditionalHeaders,
getFullUrls,
omitBlacklistedHeaders,
omitBlockedHeaders,
} from '../../common';
import { generatePngObservableFactory } from '../lib/generate_png';
import { TaskPayloadPNG } from '../types';
Expand All @@ -37,7 +37,7 @@ export const runTaskFnFactory: QueuedPngExecutorFactory = function executeJobFac
const jobLogger = logger.clone([jobId]);
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })),
map((decryptedHeaders) => omitBlacklistedHeaders({ job, decryptedHeaders })),
map((decryptedHeaders) => omitBlockedHeaders({ job, decryptedHeaders })),
map((filteredHeaders) => getConditionalHeaders({ config, job, filteredHeaders })),
mergeMap((conditionalHeaders) => {
const urls = getFullUrls({ config, job });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import {
getConditionalHeaders,
getCustomLogo,
getFullUrls,
omitBlacklistedHeaders,
omitBlockedHeaders,
} from '../../common';
import { generatePdfObservableFactory } from '../lib/generate_pdf';
import { TaskPayloadPDF } from '../types';
Expand All @@ -40,7 +40,7 @@ export const runTaskFnFactory: QueuedPdfExecutorFactory = function executeJobFac
const jobLogger = logger.clone([jobId]);
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })),
map((decryptedHeaders) => omitBlacklistedHeaders({ job, decryptedHeaders })),
map((decryptedHeaders) => omitBlockedHeaders({ job, decryptedHeaders })),
map((filteredHeaders) => getConditionalHeaders({ config, job, filteredHeaders })),
mergeMap((conditionalHeaders) =>
getCustomLogo({ reporting, config, job, conditionalHeaders })
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import { i18n } from '@kbn/i18n';
import { ReportingCore } from '../..';
import { API_DIAGNOSE_URL } from '../../../common/constants';
import { omitBlacklistedHeaders } from '../../export_types/common';
import { omitBlockedHeaders } from '../../export_types/common';
import { getAbsoluteUrlFactory } from '../../export_types/common/get_absolute_url';
import { generatePngObservableFactory } from '../../export_types/png/lib/generate_png';
import { LevelLogger as Logger } from '../../lib';
Expand Down Expand Up @@ -65,7 +65,7 @@ export const registerDiagnoseScreenshot = (reporting: ReportingCore, logger: Log
};

const headers = {
headers: omitBlacklistedHeaders({
headers: omitBlockedHeaders({
job: null,
decryptedHeaders,
}),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

import { kibanaResponseFactory } from 'kibana/server';
import { ReportingCore } from '../../';
import { WHITELISTED_JOB_CONTENT_TYPES } from '../../../common/constants';
import { ALLOWED_JOB_CONTENT_TYPES } from '../../../common/constants';
import { ReportingUser } from '../../types';
import { getDocumentPayloadFactory } from './get_document_payload';
import { jobsQueryFactory } from './jobs_query';
Expand Down Expand Up @@ -48,7 +48,7 @@ export function downloadJobResponseHandlerFactory(reporting: ReportingCore) {

const payload = getDocumentPayload(doc);

if (!payload.contentType || !WHITELISTED_JOB_CONTENT_TYPES.includes(payload.contentType)) {
if (!payload.contentType || !ALLOWED_JOB_CONTENT_TYPES.includes(payload.contentType)) {
return res.badRequest({
body: `Unsupported content-type of ${payload.contentType} specified by job output`,
});
Expand Down

0 comments on commit 4d4dd79

Please sign in to comment.