Skip to content

Commit

Permalink
fix(vercel): Update request-transforms.ts (#5514)
Browse files Browse the repository at this point in the history
* fix(vercel): Update set-cookie header hanlder

* Update from SvelteKit adapter

* Updated changeset
  • Loading branch information
JuanM04 authored Dec 2, 2022
1 parent 3c44033 commit a1885ea
Show file tree
Hide file tree
Showing 4 changed files with 152 additions and 57 deletions.
5 changes: 5 additions & 0 deletions .changeset/plenty-tigers-pretend.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@astrojs/vercel': patch
---

Updated request-transform methods
4 changes: 3 additions & 1 deletion packages/integrations/vercel/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,11 @@
"dependencies": {
"@astrojs/webapi": "^1.1.1",
"@vercel/nft": "^0.22.1",
"fast-glob": "^3.2.11"
"fast-glob": "^3.2.11",
"set-cookie-parser": "^2.5.1"
},
"devDependencies": {
"@types/set-cookie-parser": "^2.4.2",
"astro": "workspace:*",
"astro-scripts": "workspace:*",
"chai": "^4.3.6",
Expand Down
189 changes: 134 additions & 55 deletions packages/integrations/vercel/src/serverless/request-transform.ts
Original file line number Diff line number Diff line change
@@ -1,64 +1,104 @@
import type { App } from 'astro/app';
import type { IncomingMessage, ServerResponse } from 'node:http';
import { Readable } from 'node:stream';
import { splitCookiesString } from 'set-cookie-parser';

const clientAddressSymbol = Symbol.for('astro.clientAddress');

/*
Credits to the SvelteKit team
https://github.com/sveltejs/kit/blob/69913e9fda054fa6a62a80e2bb4ee7dca1005796/packages/kit/src/node.js
https://github.com/sveltejs/kit/blob/dd380b38c322272b414a7ec3ac2911f2db353f5c/packages/kit/src/exports/node/index.js
*/

function get_raw_body(req: IncomingMessage) {
return new Promise<Uint8Array | null>((fulfil, reject) => {
const h = req.headers;
function get_raw_body(req: IncomingMessage, body_size_limit?: number): ReadableStream | null {
const h = req.headers;

if (!h['content-type']) {
return fulfil(null);
}
if (!h['content-type']) {
return null;
}

req.on('error', reject);
const content_length = Number(h['content-length']);

// check if no request body
if (
(req.httpVersionMajor === 1 && isNaN(content_length) && h['transfer-encoding'] == null) ||
content_length === 0
) {
return null;
}

const length = Number(h['content-length']);
let length = content_length;

// https://github.com/jshttp/type-is/blob/c1f4388c71c8a01f79934e68f630ca4a15fffcd6/index.js#L81-L95
if (isNaN(length) && h['transfer-encoding'] == null) {
return fulfil(null);
if (body_size_limit) {
if (!length) {
length = body_size_limit;
} else if (length > body_size_limit) {
throw new Error(
`Received content-length of ${length}, but only accept up to ${body_size_limit} bytes.`
);
}
}

let data = new Uint8Array(length || 0);
if (req.destroyed) {
const readable = new ReadableStream();
readable.cancel();
return readable;
}

if (length > 0) {
let offset = 0;
req.on('data', (chunk) => {
const new_len = offset + Buffer.byteLength(chunk);
let size = 0;
let cancelled = false;

if (new_len > length) {
return reject({
status: 413,
reason: 'Exceeded "Content-Length" limit',
});
}
return new ReadableStream({
start(controller) {
req.on('error', (error) => {
cancelled = true;
controller.error(error);
});

data.set(chunk, offset);
offset = new_len;
req.on('end', () => {
if (cancelled) return;
controller.close();
});
} else {

req.on('data', (chunk) => {
const new_data = new Uint8Array(data.length + chunk.length);
new_data.set(data, 0);
new_data.set(chunk, data.length);
data = new_data;
if (cancelled) return;

size += chunk.length;
if (size > length) {
cancelled = true;
controller.error(
new Error(
`request body size exceeded ${
content_length ? "'content-length'" : 'BODY_SIZE_LIMIT'
} of ${length}`
)
);
return;
}

controller.enqueue(chunk);

if (controller.desiredSize === null || controller.desiredSize <= 0) {
req.pause();
}
});
}
},

pull() {
req.resume();
},

req.on('end', () => {
fulfil(data);
});
cancel(reason) {
cancelled = true;
req.destroy(reason);
},
});
}

export async function getRequest(base: string, req: IncomingMessage): Promise<Request> {
export async function getRequest(
base: string,
req: IncomingMessage,
bodySizeLimit?: number
): Promise<Request> {
let headers = req.headers as Record<string, string>;
if (req.httpVersionMajor === 2) {
// we need to strip out the HTTP/2 pseudo-headers because node-fetch's
Expand All @@ -72,40 +112,79 @@ export async function getRequest(base: string, req: IncomingMessage): Promise<Re
const request = new Request(base + req.url, {
method: req.method,
headers,
body: await get_raw_body(req), // TODO stream rather than buffer
body: get_raw_body(req, bodySizeLimit),
});
Reflect.set(request, clientAddressSymbol, headers['x-forwarded-for']);
return request;
}

export async function setResponse(
app: App,
res: ServerResponse,
response: Response
): Promise<void> {
export async function setResponse(app: App, res: ServerResponse, response: Response) {
const headers = Object.fromEntries(response.headers);
let cookies: string[] = [];

if (response.headers.has('set-cookie')) {
// @ts-expect-error (headers.raw() is non-standard)
headers['set-cookie'] = response.headers.raw()['set-cookie'];
const header = response.headers.get('set-cookie')!;
const split = splitCookiesString(header);
cookies = split;
}

if (app.setCookieHeaders) {
const setCookieHeaders: Array<string> = Array.from(app.setCookieHeaders(response));
if (setCookieHeaders.length) {
res.setHeader('Set-Cookie', setCookieHeaders);
}
const setCookieHeaders = Array.from(app.setCookieHeaders(response));
cookies.push(...setCookieHeaders);
}

res.writeHead(response.status, headers);
res.writeHead(response.status, { ...headers, 'set-cookie': cookies });

if (response.body instanceof Readable) {
response.body.pipe(res);
} else {
if (response.body) {
res.write(await response.arrayBuffer());
}
if (!response.body) {
res.end();
return;
}

if (response.body.locked) {
res.write(
'Fatal error: Response body is locked. ' +
`This can happen when the response was already read (for example through 'response.json()' or 'response.text()').`
);
res.end();
return;
}

const reader = response.body.getReader();

if (res.destroyed) {
reader.cancel();
return;
}

const cancel = (error?: Error) => {
res.off('close', cancel);
res.off('error', cancel);

// If the reader has already been interrupted with an error earlier,
// then it will appear here, it is useless, but it needs to be catch.
reader.cancel(error).catch(() => {});
if (error) res.destroy(error);
};

res.on('close', cancel);
res.on('error', cancel);

next();
async function next() {
try {
for (;;) {
const { done, value } = await reader.read();

if (done) break;

if (!res.write(value)) {
res.once('drain', next);
return;
}
}
res.end();
} catch (error) {
cancel(error instanceof Error ? error : new Error(String(error)));
}
}
}
11 changes: 10 additions & 1 deletion pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit a1885ea

Please sign in to comment.