Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Flight] model halted references explicitly #30731

Merged
merged 1 commit into from
Aug 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ import {
enableRefAsProp,
enableFlightReadableStream,
enableOwnerStacks,
enableHalt,
} from 'shared/ReactFeatureFlags';

import {
Expand Down Expand Up @@ -1995,6 +1996,20 @@ function resolvePostponeDev(
}
}

function resolveBlocked(response: Response, id: number): void {
const chunks = response._chunks;
const chunk = chunks.get(id);
if (!chunk) {
chunks.set(id, createBlockedChunk(response));
} else if (chunk.status === PENDING) {
// This chunk as contructed via other means but it is actually a blocked chunk
// so we update it here. We check the status because it might have been aborted
// before we attempted to resolve it.
const blockedChunk: BlockedChunk<mixed> = (chunk: any);
blockedChunk.status = BLOCKED;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think technically maybe you need to check if it is pending first. Because it could've maybe been rejected already if we aborted between the reference and this row.

}
}

function resolveHint<Code: HintCode>(
response: Response,
code: Code,
Expand Down Expand Up @@ -2621,6 +2636,13 @@ function processFullStringRow(
}
}
// Fallthrough
case 35 /* "#" */: {
if (enableHalt) {
resolveBlocked(response, id);
return;
}
}
// Fallthrough
default: /* """ "{" "[" "t" "f" "n" "0" - "9" */ {
// We assume anything else is JSON.
resolveModel(response, id, row);
Expand Down Expand Up @@ -2677,6 +2699,7 @@ export function processBinaryChunk(
i++;
} else if (
(resolvedRowTag > 64 && resolvedRowTag < 91) /* "A"-"Z" */ ||
resolvedRowTag === 35 /* "#" */ ||
resolvedRowTag === 114 /* "r" */ ||
resolvedRowTag === 120 /* "x" */
) {
Expand Down
101 changes: 101 additions & 0 deletions packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2856,4 +2856,105 @@ describe('ReactFlightDOM', () => {
jest.advanceTimersByTime('100');
expect(await race).toBe('timeout');
});

// @gate enableHalt
it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
const controller = new AbortController();
function ComponentThatAborts() {
controller.abort();
return null;
}

async function Greeting() {
await 1;
return 'hello world';
}

async function Farewell() {
return 'goodbye world';
}

async function Wrapper() {
return (
<Suspense fallback="loading too...">
<ComponentThatAborts />
</Suspense>
);
}

function App() {
return (
<div>
<Suspense fallback="loading...">
<Greeting />
</Suspense>
<Wrapper />
<Suspense fallback="loading three...">
<Farewell />
</Suspense>
</div>
);
}

const errors = [];
const {pendingResult} = await serverAct(() => {
return {
pendingResult: ReactServerDOMStaticServer.prerenderToNodeStream(
<App />,
{},
{
onError(x) {
errors.push(x);
},
signal: controller.signal,
},
),
};
});

controller.abort();

const {prelude} = await pendingResult;
expect(errors).toEqual([]);

const response = ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
);

const {writable: fizzWritable, readable: fizzReadable} = getTestStream();

function ClientApp() {
return use(response);
}
let abortFizz;
await serverAct(async () => {
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
React.createElement(ClientApp),
{
onError(error, errorInfo) {
errors.push(error);
},
},
);
pipe(fizzWritable);
abortFizz = abort;
});

await serverAct(() => {
abortFizz('boom');
});

// one error per boundary
expect(errors).toEqual(['boom', 'boom', 'boom']);

const container = document.createElement('div');
await readInto(container, fizzReadable);
expect(getMeaningfulChildren(container)).toEqual(
<div>
{'loading...'}
{'loading too...'}
{'loading three...'}
</div>,
);
});
});
49 changes: 25 additions & 24 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ function serializeThenable(
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, newTask.id, reusableInfinitePromiseModel);
emitBlockedChunk(request, newTask.id);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -1818,7 +1818,6 @@ function serializeLazyID(id: number): string {
function serializeInfinitePromise(): string {
return '$@';
}
const reusableInfinitePromiseModel = stringify(serializeInfinitePromise());

function serializePromiseID(id: number): string {
return '$@' + id.toString(16);
Expand Down Expand Up @@ -2176,9 +2175,6 @@ function renderModel(
if (typeof x.then === 'function') {
if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
Expand Down Expand Up @@ -2232,9 +2228,6 @@ function renderModel(

if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
Expand Down Expand Up @@ -2976,6 +2969,12 @@ function emitPostponeChunk(
request.completedErrorChunks.push(processedChunk);
}

function emitBlockedChunk(request: Request, id: number): void {
const row = serializeRowHeader('#', id) + '\n';
const processedChunk = stringToChunk(row);
request.completedErrorChunks.push(processedChunk);
}

function emitErrorChunk(
request: Request,
id: number,
Expand Down Expand Up @@ -3725,7 +3724,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitBlockedChunk(request, task.id);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3753,7 +3752,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitBlockedChunk(request, task.id);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3798,6 +3797,7 @@ function performWork(request: Request): void {
currentRequest = request;
prepareToUseHooksForRequest(request);

const hadAbortableTasks = request.abortableTasks.size > 0;
try {
const pingedTasks = request.pingedTasks;
request.pingedTasks = [];
Expand All @@ -3808,10 +3808,11 @@ function performWork(request: Request): void {
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
if (request.abortableTasks.size === 0) {
// we're done rendering
const onAllReady = request.onAllReady;
onAllReady();
if (hadAbortableTasks && request.abortableTasks.size === 0) {
// We can ping after completing but if this happens there already
// wouldn't be any abortable tasks. So we only call allReady after
// the work which actually completed the last pending task
allReady(request);
}
} catch (error) {
logRecoverableError(request, error, null);
Expand All @@ -3836,15 +3837,6 @@ function abortTask(task: Task, request: Request, errorId: number): void {
request.completedErrorChunks.push(processedChunk);
}

function haltTask(task: Task, request: Request): void {
if (task.status === RENDERING) {
// This task will be aborted by the render
return;
}
task.status = ABORTED;
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
}

function flushCompletedChunks(
request: Request,
destination: Destination,
Expand Down Expand Up @@ -4023,6 +4015,7 @@ export function abort(request: Request, reason: mixed): void {
}
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
allReady(request);
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
Expand Down Expand Up @@ -4078,8 +4071,11 @@ export function halt(request: Request, reason: mixed): void {
// to that row from every row that's still remaining.
if (abortableTasks.size > 0) {
request.pendingChunks++;
gnoff marked this conversation as resolved.
Show resolved Hide resolved
abortableTasks.forEach(task => haltTask(task, request));
const errorId = request.nextChunkId++;
emitBlockedChunk(request, errorId);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
allReady(request);
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
Expand All @@ -4094,3 +4090,8 @@ export function halt(request: Request, reason: mixed): void {
fatalError(request, error);
}
}

function allReady(request: Request) {
const onAllReady = request.onAllReady;
onAllReady();
}
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we unify the rest of halt/abort, then this helper doesn't become necessary.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll do in the next where we re-add the error and postpone logging

Loading