Skip to content

Commit

Permalink
Address feedback.
Browse files Browse the repository at this point in the history
  • Loading branch information
lukeelmers committed Feb 3, 2021
1 parent f3067a6 commit 2283752
Show file tree
Hide file tree
Showing 6 changed files with 45 additions and 60 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ import { createReadStream } from 'fs';
import { getResponsePayloadBytes } from './get_payload_size';

describe('getPayloadSize', () => {
afterEach(() => mockFs.restore());

describe('handles Buffers', () => {
test('with ascii characters', () => {
const payload = 'heya';
Expand All @@ -28,7 +26,9 @@ describe('getPayloadSize', () => {
});
});

describe('handles Streams', () => {
describe('handles fs streams', () => {
afterEach(() => mockFs.restore());

test('with ascii characters', async () => {
mockFs({ 'test.txt': 'heya' });
const readStream = createReadStream('test.txt');
Expand Down
21 changes: 8 additions & 13 deletions packages/kbn-legacy-logging/src/utils/get_payload_size.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,12 @@ import type { ReadStream } from 'fs';
import type { ResponseObject } from '@hapi/hapi';

const isBuffer = (obj: unknown): obj is Buffer => Buffer.isBuffer(obj);
const isObject = (obj: unknown): obj is Record<string, any> =>
const isObject = (obj: unknown): obj is Record<string, unknown> =>
typeof obj === 'object' && obj !== null;
const isReadStream = (obj: unknown): obj is ReadStream =>
const isFsReadStream = (obj: unknown): obj is ReadStream =>
typeof obj === 'object' && obj !== null && 'bytesRead' in obj;
const isString = (obj: unknown): obj is string => typeof obj === 'string';

function getContentLength(headers: Record<string, any>): string | string[] | void {
for (const h of Object.keys(headers)) {
if (h.toLowerCase() === 'content-length') {
return headers[h];
}
}
}

/**
* Attempts to determine the size (in bytes) of a hapi/good
* responsePayload based on the payload type. Falls back to
Expand All @@ -42,8 +34,7 @@ export function getResponsePayloadBytes(
payload: ResponseObject['source'],
headers: Record<string, any> = {}
): number | undefined {
const contentLength = getContentLength(headers);

const contentLength = headers['content-length'];
if (contentLength) {
const val = parseInt(
// hapi response headers can be `string | string[]`, so we need to handle both cases
Expand All @@ -52,15 +43,19 @@ export function getResponsePayloadBytes(
);
return !isNaN(val) ? val : undefined;
}

if (isBuffer(payload)) {
return payload.byteLength;
}
if (isReadStream(payload)) {

if (isFsReadStream(payload)) {
return payload.bytesRead;
}

if (isString(payload)) {
return Buffer.byteLength(payload);
}

if (isObject(payload)) {
return Buffer.byteLength(JSON.stringify(payload));
}
Expand Down
9 changes: 5 additions & 4 deletions src/core/server/http/logging/get_payload_size.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
*/

import type { Request } from '@hapi/hapi';
import { Boom } from '@hapi/boom';
import Boom from '@hapi/boom';

import mockFs from 'mock-fs';
import { createReadStream } from 'fs';
Expand All @@ -22,10 +22,9 @@ describe('getPayloadSize', () => {
let logger: MockedLogger;

beforeEach(() => (logger = loggerMock.create()));
afterEach(() => mockFs.restore());

test('handles Boom errors', () => {
const boomError = new Boom('oops');
const boomError = Boom.badRequest();
const payload = boomError.output.payload;
const result = getResponsePayloadBytes(boomError, logger);
expect(result).toBe(JSON.stringify(payload).length);
Expand Down Expand Up @@ -55,7 +54,9 @@ describe('getPayloadSize', () => {
});
});

describe('handles Streams', () => {
describe('handles fs streams', () => {
afterEach(() => mockFs.restore());

test('with ascii characters', async () => {
mockFs({ 'test.txt': 'heya' });
const source = createReadStream('test.txt');
Expand Down
28 changes: 11 additions & 17 deletions src/core/server/http/logging/get_payload_size.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,24 +16,10 @@ type Response = Request['response'];
const isBuffer = (src: unknown, res: Response): src is Buffer => {
return !isBoom(res) && res.variety === 'buffer' && res.source === src;
};
const isReadStream = (src: unknown, res: Response): src is ReadStream => {
const isFsReadStream = (src: unknown, res: Response): src is ReadStream => {
return !isBoom(res) && res.variety === 'stream' && res.source === src;
};

function getContentLength(res: Response): string | string[] | void {
const headers = isBoom(res)
? (res.output.headers as Record<string, string | string[]>)
: res.headers;

if (headers) {
for (const h of Object.keys(headers)) {
if (h.toLowerCase() === 'content-length') {
return headers[h];
}
}
}
}

/**
* Attempts to determine the size (in bytes) of a Hapi response
* body based on the payload type. Falls back to `undefined`
Expand All @@ -45,7 +31,11 @@ function getContentLength(res: Response): string | string[] | void {
*/
export function getResponsePayloadBytes(response: Response, log: Logger): number | undefined {
try {
const contentLength = getContentLength(response);
const headers = isBoom(response)
? (response.output.headers as Record<string, string | string[]>)
: response.headers;

const contentLength = headers && headers['content-length'];
if (contentLength) {
const val = parseInt(
// hapi response headers can be `string | string[]`, so we need to handle both cases
Expand All @@ -54,15 +44,19 @@ export function getResponsePayloadBytes(response: Response, log: Logger): number
);
return !isNaN(val) ? val : undefined;
}

if (isBoom(response)) {
return Buffer.byteLength(JSON.stringify(response.output.payload));
}

if (isBuffer(response.source, response)) {
return response.source.byteLength;
}
if (isReadStream(response.source, response)) {

if (isFsReadStream(response.source, response)) {
return response.source.bytesRead;
}

if (response.variety === 'plain') {
return typeof response.source === 'string'
? Buffer.byteLength(response.source)
Expand Down
19 changes: 4 additions & 15 deletions src/core/server/http/logging/get_response_log.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
*/

import type { Request } from '@hapi/hapi';
import { Boom } from '@hapi/boom';
import Boom from '@hapi/boom';
import { loggerMock, MockedLogger } from '../../logging/logger.mock';
import { getEcsResponseLog } from './get_response_log';

Expand All @@ -26,7 +26,7 @@ interface RequestFixtureOptions {
mime?: string;
path?: string;
query?: Record<string, any>;
response?: Record<string, any> | Boom;
response?: Record<string, any> | Boom.Boom;
}

function createMockHapiRequest({
Expand Down Expand Up @@ -142,10 +142,10 @@ describe('getEcsResponseLog', () => {

test('handles Boom errors in the response', () => {
const req = createMockHapiRequest({
response: new Boom('oops'),
response: Boom.badRequest(),
});
const result = getEcsResponseLog(req, logger);
expect(result.http.response.status_code).toBe(500);
expect(result.http.response.status_code).toBe(400);
});

describe('filters sensitive headers', () => {
Expand All @@ -169,17 +169,6 @@ describe('getEcsResponseLog', () => {
}
`);
});

test('is case-insensitive when filtering headers', () => {
const req = createMockHapiRequest({
headers: { Authorization: 'a', COOKIE: 'b', 'user-agent': 'hi' },
response: { headers: { 'content-length': 123, 'Set-Cookie': 'c' } },
});
const result = getEcsResponseLog(req, logger);
expect(result.http.request.headers.Authorization).toBe('[REDACTED]');
expect(result.http.request.headers.COOKIE).toBe('[REDACTED]');
expect(result.http.response.headers['Set-Cookie']).toBe('[REDACTED]');
});
});

describe('ecs', () => {
Expand Down
22 changes: 14 additions & 8 deletions src/core/server/http/logging/get_response_log.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,20 @@ const FORBIDDEN_HEADERS = ['authorization', 'cookie', 'set-cookie'];
const REDACTED_HEADER_TEXT = '[REDACTED]';

// We are excluding sensitive headers by default, until we have a log filtering mechanism.
function redactSensitiveHeaders(headers: Record<string, unknown>): Record<string, unknown> {
Object.keys(headers).forEach((key) => {
if (FORBIDDEN_HEADERS.includes(key.toLowerCase())) {
headers[key] = REDACTED_HEADER_TEXT;
}
});

return headers;
function redactSensitiveHeaders(
headers: Record<string, string | string[]>
): Record<string, string | string[]> {
return (
headers &&
Object.keys(headers).reduce(
(acc, key) => ({
// Create a shallow copy to prevent mutating the original headers
...acc,
[key]: FORBIDDEN_HEADERS.includes(key) ? REDACTED_HEADER_TEXT : headers[key],
}),
{} as Record<string, string | string[]>
)
);
}

/**
Expand Down

0 comments on commit 2283752

Please sign in to comment.