Skip to content

Commit

Permalink
[Flight] Update stale blocked values in createModelResolver (facebo…
Browse files Browse the repository at this point in the history
…ok#28669)

Alternative to facebook#28620.

Instead of emitting lazy references to not-yet-emitted models in the
Flight Server, this fixes the observed issue in
unstubbable/ai-rsc-test#1 by adjusting the lazy
model resolution in the Flight Client to update stale blocked root
models, before assigning them as chunk values. In addition, the element
props are not outlined anymore in the Flight Server to avoid having to
also handle their staleness in blocked elements.

fixes facebook#28595
  • Loading branch information
unstubbable authored and AndyPengc12 committed Apr 15, 2024
1 parent 008f838 commit 084d1be
Show file tree
Hide file tree
Showing 3 changed files with 128 additions and 13 deletions.
7 changes: 7 additions & 0 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -584,6 +584,13 @@ function createModelResolver<T>(
}
return value => {
parentObject[key] = value;

// If this is the root object for a model reference, where `blocked.value`
// is a stale `null`, the resolved value can be used directly.
if (key === '' && blocked.value === null) {
blocked.value = value;
}

blocked.deps--;
if (blocked.deps === 0) {
if (chunk.status !== BLOCKED) {
Expand Down
104 changes: 104 additions & 0 deletions packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -812,6 +812,110 @@ describe('ReactFlightDOM', () => {
expect(reportedErrors).toEqual([]);
});

it('should handle streaming async server components', async () => {
const reportedErrors = [];

const Row = async ({current, next}) => {
const chunk = await next;

if (chunk.done) {
return chunk.value;
}

return (
<Suspense fallback={chunk.value}>
<Row current={chunk.value} next={chunk.next} />
</Suspense>
);
};

function createResolvablePromise() {
let _resolve, _reject;

const promise = new Promise((resolve, reject) => {
_resolve = resolve;
_reject = reject;
});

return {promise, resolve: _resolve, reject: _reject};
}

function createSuspendedChunk(initialValue) {
const {promise, resolve, reject} = createResolvablePromise();

return {
row: (
<Suspense fallback={initialValue}>
<Row current={initialValue} next={promise} />
</Suspense>
),
resolve,
reject,
};
}

function makeDelayedText() {
const {promise, resolve, reject} = createResolvablePromise();
async function DelayedText() {
const data = await promise;
return <div>{data}</div>;
}
return [DelayedText, resolve, reject];
}

const [Posts, resolvePostsData] = makeDelayedText();
const [Photos, resolvePhotosData] = makeDelayedText();
const suspendedChunk = createSuspendedChunk(<p>loading</p>);
const {writable, readable} = getTestStream();
const {pipe} = ReactServerDOMServer.renderToPipeableStream(
suspendedChunk.row,
webpackMap,
{
onError(error) {
reportedErrors.push(error);
},
},
);
pipe(writable);
const response = ReactServerDOMClient.createFromReadableStream(readable);
const container = document.createElement('div');
const root = ReactDOMClient.createRoot(container);

function ClientRoot() {
return use(response);
}

await act(() => {
root.render(<ClientRoot />);
});

expect(container.innerHTML).toBe('<p>loading</p>');

const donePromise = createResolvablePromise();

const value = (
<Suspense fallback={<p>loading posts and photos</p>}>
<Posts />
<Photos />
</Suspense>
);

await act(async () => {
suspendedChunk.resolve({value, done: false, next: donePromise.promise});
donePromise.resolve({value, done: true});
});

expect(container.innerHTML).toBe('<p>loading posts and photos</p>');

await act(async () => {
await resolvePostsData('posts');
await resolvePhotosData('photos');
});

expect(container.innerHTML).toBe('<div>posts</div><div>photos</div>');
expect(reportedErrors).toEqual([]);
});

it('should preserve state of client components on refetch', async () => {
// Client

Expand Down
30 changes: 17 additions & 13 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -243,11 +243,16 @@ export type ReactClientValue =

type ReactClientObject = {+[key: string]: ReactClientValue};

// task status
const PENDING = 0;
const COMPLETED = 1;
const ABORTED = 3;
const ERRORED = 4;

// object reference status
const SEEN_BUT_NOT_YET_OUTLINED = -1;
const NEVER_OUTLINED = -2;

type Task = {
id: number,
status: 0 | 1 | 3 | 4,
Expand Down Expand Up @@ -280,7 +285,7 @@ export type Request = {
writtenSymbols: Map<symbol, number>,
writtenClientReferences: Map<ClientReferenceKey, number>,
writtenServerReferences: Map<ServerReference<any>, number>,
writtenObjects: WeakMap<Reference, number>, // -1 means "seen" but not outlined.
writtenObjects: WeakMap<Reference, number>,
identifierPrefix: string,
identifierCount: number,
taintCleanupQueue: Array<string | bigint>,
Expand Down Expand Up @@ -1125,8 +1130,7 @@ function serializeMap(
const writtenObjects = request.writtenObjects;
const existingId = writtenObjects.get(key);
if (existingId === undefined) {
// Mark all object keys as seen so that they're always outlined.
writtenObjects.set(key, -1);
writtenObjects.set(key, SEEN_BUT_NOT_YET_OUTLINED);
}
}
}
Expand All @@ -1142,8 +1146,7 @@ function serializeSet(request: Request, set: Set<ReactClientValue>): string {
const writtenObjects = request.writtenObjects;
const existingId = writtenObjects.get(key);
if (existingId === undefined) {
// Mark all object keys as seen so that they're always outlined.
writtenObjects.set(key, -1);
writtenObjects.set(key, SEEN_BUT_NOT_YET_OUTLINED);
}
}
}
Expand Down Expand Up @@ -1328,8 +1331,7 @@ function renderModelDestructive(
// This is the ID we're currently emitting so we need to write it
// once but if we discover it again, we refer to it by id.
modelRoot = null;
} else if (existingId === -1) {
// Seen but not yet outlined.
} else if (existingId === SEEN_BUT_NOT_YET_OUTLINED) {
// TODO: If we throw here we can treat this as suspending which causes an outline
// but that is able to reuse the same task if we're already in one but then that
// will be a lazy future value rather than guaranteed to exist but maybe that's good.
Expand All @@ -1348,7 +1350,10 @@ function renderModelDestructive(
} else {
// This is the first time we've seen this object. We may never see it again
// so we'll inline it. Mark it as seen. If we see it again, we'll outline.
writtenObjects.set(value, -1);
writtenObjects.set(value, SEEN_BUT_NOT_YET_OUTLINED);
// The element's props are marked as "never outlined" so that they are inlined into
// the same row as the element itself.
writtenObjects.set((value: any).props, NEVER_OUTLINED);
}

const element: React$Element<any> = (value: any);
Expand Down Expand Up @@ -1477,19 +1482,18 @@ function renderModelDestructive(
// This is the ID we're currently emitting so we need to write it
// once but if we discover it again, we refer to it by id.
modelRoot = null;
} else if (existingId === -1) {
// Seen but not yet outlined.
} else if (existingId === SEEN_BUT_NOT_YET_OUTLINED) {
const newId = outlineModel(request, (value: any));
return serializeByValueID(newId);
} else {
} else if (existingId !== NEVER_OUTLINED) {
// We've already emitted this as an outlined object, so we can
// just refer to that by its existing ID.
return serializeByValueID(existingId);
}
} else {
// This is the first time we've seen this object. We may never see it again
// so we'll inline it. Mark it as seen. If we see it again, we'll outline.
writtenObjects.set(value, -1);
writtenObjects.set(value, SEEN_BUT_NOT_YET_OUTLINED);
}

if (isArray(value)) {
Expand Down Expand Up @@ -2007,7 +2011,7 @@ function renderConsoleValue(
return serializeInfinitePromise();
}

if (existingId !== undefined && existingId !== -1) {
if (existingId !== undefined && existingId >= 0) {
// We've already emitted this as a real object, so we can
// just refer to that by its existing ID.
return serializeByValueID(existingId);
Expand Down

0 comments on commit 084d1be

Please sign in to comment.