Skip to content

Commit

Permalink
[7.x] typescript-ify portions of src/optimize (#64688) (#64729)
Browse files Browse the repository at this point in the history
  • Loading branch information
Spencer authored Apr 29, 2020
1 parent 94a1e16 commit be3b59e
Show file tree
Hide file tree
Showing 10 changed files with 232 additions and 121 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,13 @@
*/

import { isAbsolute, extname, join } from 'path';
import LruCache from 'lru-cache';

import Hapi from 'hapi';
import * as UiSharedDeps from '@kbn/ui-shared-deps';

import { createDynamicAssetResponse } from './dynamic_asset_response';
import { assertIsNpUiPluginPublicDirs } from '../np_ui_plugin_public_dirs';
import { FileHashCache } from './file_hash_cache';
import { assertIsNpUiPluginPublicDirs, NpUiPluginPublicDirs } from '../np_ui_plugin_public_dirs';
import { fromRoot } from '../../core/server/utils';

/**
Expand All @@ -44,11 +47,17 @@ export function createBundlesRoute({
basePublicPath,
builtCssPath,
npUiPluginPublicDirs = [],
}: {
regularBundlesPath: string;
dllBundlesPath: string;
basePublicPath: string;
builtCssPath: string;
npUiPluginPublicDirs?: NpUiPluginPublicDirs;
}) {
// rather than calculate the fileHash on every request, we
// provide a cache object to `resolveDynamicAssetResponse()` that
// will store the 100 most recently used hashes.
const fileHashCache = new LruCache(100);
const fileHashCache = new FileHashCache();
assertIsNpUiPluginPublicDirs(npUiPluginPublicDirs);

if (typeof regularBundlesPath !== 'string' || !isAbsolute(regularBundlesPath)) {
Expand Down Expand Up @@ -122,6 +131,12 @@ function buildRouteForBundles({
bundlesPath,
fileHashCache,
replacePublicPath = true,
}: {
publicPath: string;
routePath: string;
bundlesPath: string;
fileHashCache: FileHashCache;
replacePublicPath?: boolean;
}) {
return {
method: 'GET',
Expand All @@ -130,7 +145,7 @@ function buildRouteForBundles({
auth: false,
ext: {
onPreHandler: {
method(request, h) {
method(request: Hapi.Request, h: Hapi.ResponseToolkit) {
const ext = extname(request.params.path);

if (ext !== '.js' && ext !== '.css') {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,20 @@
*/

import { resolve } from 'path';
import { open, fstat, createReadStream, close } from 'fs';
import Fs from 'fs';
import { promisify } from 'util';

import Boom from 'boom';
import { fromNode as fcb } from 'bluebird';
import Hapi from 'hapi';

import { FileHashCache } from './file_hash_cache';
import { getFileHash } from './file_hash';
import { replacePlaceholder } from '../public_path_placeholder';

const asyncOpen = promisify(Fs.open);
const asyncClose = promisify(Fs.close);
const asyncFstat = promisify(Fs.fstat);

/**
* Create a Hapi response for the requested path. This is designed
* to replicate a subset of the features provided by Hapi's Inert
Expand All @@ -44,39 +50,46 @@ import { replacePlaceholder } from '../public_path_placeholder';
* - cached hash/etag is based on the file on disk, but modified
* by the public path so that individual public paths have
* different etags, but can share a cache
*
* @param {Object} options
* @property {Hapi.Request} options.request
* @property {string} options.bundlesPath
* @property {string} options.publicPath
* @property {LruCache} options.fileHashCache
*/
export async function createDynamicAssetResponse(options) {
const { request, h, bundlesPath, publicPath, fileHashCache, replacePublicPath } = options;
export async function createDynamicAssetResponse({
request,
h,
bundlesPath,
publicPath,
fileHashCache,
replacePublicPath,
}: {
request: Hapi.Request;
h: Hapi.ResponseToolkit;
bundlesPath: string;
publicPath: string;
fileHashCache: FileHashCache;
replacePublicPath: boolean;
}) {
let fd: number | undefined;

let fd;
try {
const path = resolve(bundlesPath, request.params.path);

// prevent path traversal, only process paths that resolve within bundlesPath
if (!path.startsWith(bundlesPath)) {
throw Boom.forbidden(null, 'EACCES');
throw Boom.forbidden(undefined, 'EACCES');
}

// we use and manage a file descriptor mostly because
// that's what Inert does, and since we are accessing
// the file 2 or 3 times per request it seems logical
fd = await fcb(cb => open(path, 'r', cb));
fd = await asyncOpen(path, 'r');

const stat = await fcb(cb => fstat(fd, cb));
const stat = await asyncFstat(fd);
const hash = await getFileHash(fileHashCache, path, stat, fd);

const read = createReadStream(null, {
const read = Fs.createReadStream(null as any, {
fd,
start: 0,
autoClose: true,
});
fd = null; // read stream is now responsible for fd
fd = undefined; // read stream is now responsible for fd

const content = replacePublicPath ? replacePlaceholder(read, publicPath) : read;
const etag = replacePublicPath ? `${hash}-${publicPath}` : hash;
Expand All @@ -91,8 +104,8 @@ export async function createDynamicAssetResponse(options) {
} catch (error) {
if (fd) {
try {
await fcb(cb => close(fd, cb));
} catch (error) {
await asyncClose(fd);
} catch (_) {
// ignore errors from close, we already have one to report
// and it's very likely they are the same
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,17 @@
*/

import { createHash } from 'crypto';
import { createReadStream } from 'fs';
import Fs from 'fs';

import * as Rx from 'rxjs';
import { merge, mergeMap, takeUntil } from 'rxjs/operators';
import { takeUntil, map } from 'rxjs/operators';

import { FileHashCache } from './file_hash_cache';

/**
* Get the hash of a file via a file descriptor
* @param {LruCache} cache
* @param {string} path
* @param {Fs.Stat} stat
* @param {Fs.FileDescriptor} fd
* @return {Promise<string>}
*/
export async function getFileHash(cache, path, stat, fd) {
export async function getFileHash(cache: FileHashCache, path: string, stat: Fs.Stats, fd: number) {
const key = `${path}:${stat.ino}:${stat.size}:${stat.mtime.getTime()}`;

const cached = cache.get(key);
Expand All @@ -40,17 +37,21 @@ export async function getFileHash(cache, path, stat, fd) {
}

const hash = createHash('sha1');
const read = createReadStream(null, {
const read = Fs.createReadStream(null as any, {
fd,
start: 0,
autoClose: false,
});

const promise = Rx.fromEvent(read, 'data')
.pipe(
merge(Rx.fromEvent(read, 'error').pipe(mergeMap(Rx.throwError))),
takeUntil(Rx.fromEvent(read, 'end'))
const promise = Rx.merge(
Rx.fromEvent<Buffer>(read, 'data'),
Rx.fromEvent<Error>(read, 'error').pipe(
map(error => {
throw error;
})
)
)
.pipe(takeUntil(Rx.fromEvent(read, 'end')))
.forEach(chunk => hash.update(chunk))
.then(() => hash.digest('hex'))
.catch(error => {
Expand Down
36 changes: 36 additions & 0 deletions src/optimize/bundles_route/file_hash_cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import LruCache from 'lru-cache';

export class FileHashCache {
private lru = new LruCache<string, Promise<string>>(100);

get(key: string) {
return this.lru.get(key);
}

set(key: string, value: Promise<string>) {
this.lru.set(key, value);
}

del(key: string) {
this.lru.del(key);
}
}
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
* under the License.
*/

export function createProxyBundlesRoute({ host, port }) {
export function createProxyBundlesRoute({ host, port }: { host: string; port: number }) {
return [
buildProxyRouteForBundles('/bundles/', host, port),
buildProxyRouteForBundles('/built_assets/dlls/', host, port),
buildProxyRouteForBundles('/built_assets/css/', host, port),
];
}

function buildProxyRouteForBundles(routePath, host, port) {
function buildProxyRouteForBundles(routePath: string, host: string, port: number) {
return {
path: `${routePath}{path*}`,
method: 'GET',
Expand Down
71 changes: 2 additions & 69 deletions src/optimize/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,72 +17,5 @@
* under the License.
*/

import FsOptimizer from './fs_optimizer';
import { createBundlesRoute } from './bundles_route';
import { DllCompiler } from './dynamic_dll_plugin';
import { fromRoot } from '../core/server/utils';
import { getNpUiPluginPublicDirs } from './np_ui_plugin_public_dirs';

export default async (kbnServer, server, config) => {
if (!config.get('optimize.enabled')) return;

// the watch optimizer sets up two threads, one is the server listening
// on 5601 and the other is a server listening on 5602 that builds the
// bundles in a "middleware" style.
//
// the server listening on 5601 may be restarted a number of times, depending
// on the watch setup managed by the cli. It proxies all bundles/* and built_assets/dlls/*
// requests to the other server. The server on 5602 is long running, in order
// to prevent complete rebuilds of the optimize content.
const watch = config.get('optimize.watch');
if (watch) {
return await kbnServer.mixin(require('./watch/watch'));
}

const { uiBundles } = kbnServer;
server.route(
createBundlesRoute({
regularBundlesPath: uiBundles.getWorkingDir(),
dllBundlesPath: DllCompiler.getRawDllConfig().outputPath,
basePublicPath: config.get('server.basePath'),
builtCssPath: fromRoot('built_assets/css'),
npUiPluginPublicDirs: getNpUiPluginPublicDirs(kbnServer),
})
);

// in prod, only bundle when something is missing or invalid
const reuseCache = config.get('optimize.useBundleCache')
? await uiBundles.areAllBundleCachesValid()
: false;

// we might not have any work to do
if (reuseCache) {
server.log(['debug', 'optimize'], `All bundles are cached and ready to go!`);
return;
}

await uiBundles.resetBundleDir();

// only require the FsOptimizer when we need to
const optimizer = new FsOptimizer({
logWithMetadata: (tags, message, metadata) => server.logWithMetadata(tags, message, metadata),
uiBundles,
profile: config.get('optimize.profile'),
sourceMaps: config.get('optimize.sourceMaps'),
workers: config.get('optimize.workers'),
});

server.log(
['info', 'optimize'],
`Optimizing and caching ${uiBundles.getDescription()}. This may take a few minutes`
);

const start = Date.now();
await optimizer.run();
const seconds = ((Date.now() - start) / 1000).toFixed(2);

server.log(
['info', 'optimize'],
`Optimization of ${uiBundles.getDescription()} complete in ${seconds} seconds`
);
};
import { optimizeMixin } from './optimize_mixin';
export default optimizeMixin;
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,14 @@
* under the License.
*/

export function getNpUiPluginPublicDirs(kbnServer) {
import KbnServer from '../legacy/server/kbn_server';

export type NpUiPluginPublicDirs = Array<{
id: string;
path: string;
}>;

export function getNpUiPluginPublicDirs(kbnServer: KbnServer): NpUiPluginPublicDirs {
return Array.from(kbnServer.newPlatform.__internals.uiPlugins.internal.entries()).map(
([id, { publicTargetDir }]) => ({
id,
Expand All @@ -26,17 +33,17 @@ export function getNpUiPluginPublicDirs(kbnServer) {
);
}

export function isNpUiPluginPublicDirs(something) {
export function isNpUiPluginPublicDirs(x: any): x is NpUiPluginPublicDirs {
return (
Array.isArray(something) &&
something.every(
Array.isArray(x) &&
x.every(
s => typeof s === 'object' && s && typeof s.id === 'string' && typeof s.path === 'string'
)
);
}

export function assertIsNpUiPluginPublicDirs(something) {
if (!isNpUiPluginPublicDirs(something)) {
export function assertIsNpUiPluginPublicDirs(x: any): asserts x is NpUiPluginPublicDirs {
if (!isNpUiPluginPublicDirs(x)) {
throw new TypeError(
'npUiPluginPublicDirs must be an array of objects with string `id` and `path` properties'
);
Expand Down
Loading

0 comments on commit be3b59e

Please sign in to comment.