Skip to content

Commit

Permalink
[Flight] model halted references explicitly (#30731)
Browse files Browse the repository at this point in the history
using infinitely suspending promises isn't right because this will parse
as a promise which is only appropriate if the value we're halting at is
a promise. Instead we need to have a special marker type that says this
reference will never resolve. Additionally flight client needs to not
error any halted references when the stream closes because they will
otherwise appear as an error

addresses:
#30705 (comment)
  • Loading branch information
gnoff authored Aug 19, 2024
1 parent d2413bf commit 9d082b5
Show file tree
Hide file tree
Showing 3 changed files with 149 additions and 24 deletions.
23 changes: 23 additions & 0 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ import {
enableRefAsProp,
enableFlightReadableStream,
enableOwnerStacks,
enableHalt,
} from 'shared/ReactFeatureFlags';

import {
Expand Down Expand Up @@ -1986,6 +1987,20 @@ function resolvePostponeDev(
}
}

function resolveBlocked(response: Response, id: number): void {
const chunks = response._chunks;
const chunk = chunks.get(id);
if (!chunk) {
chunks.set(id, createBlockedChunk(response));
} else if (chunk.status === PENDING) {
// This chunk as contructed via other means but it is actually a blocked chunk
// so we update it here. We check the status because it might have been aborted
// before we attempted to resolve it.
const blockedChunk: BlockedChunk<mixed> = (chunk: any);
blockedChunk.status = BLOCKED;
}
}

function resolveHint<Code: HintCode>(
response: Response,
code: Code,
Expand Down Expand Up @@ -2612,6 +2627,13 @@ function processFullStringRow(
}
}
// Fallthrough
case 35 /* "#" */: {
if (enableHalt) {
resolveBlocked(response, id);
return;
}
}
// Fallthrough
default: /* """ "{" "[" "t" "f" "n" "0" - "9" */ {
// We assume anything else is JSON.
resolveModel(response, id, row);
Expand Down Expand Up @@ -2668,6 +2690,7 @@ export function processBinaryChunk(
i++;
} else if (
(resolvedRowTag > 64 && resolvedRowTag < 91) /* "A"-"Z" */ ||
resolvedRowTag === 35 /* "#" */ ||
resolvedRowTag === 114 /* "r" */ ||
resolvedRowTag === 120 /* "x" */
) {
Expand Down
101 changes: 101 additions & 0 deletions packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2856,4 +2856,105 @@ describe('ReactFlightDOM', () => {
jest.advanceTimersByTime('100');
expect(await race).toBe('timeout');
});

// @gate enableHalt
it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
const controller = new AbortController();
function ComponentThatAborts() {
controller.abort();
return null;
}

async function Greeting() {
await 1;
return 'hello world';
}

async function Farewell() {
return 'goodbye world';
}

async function Wrapper() {
return (
<Suspense fallback="loading too...">
<ComponentThatAborts />
</Suspense>
);
}

function App() {
return (
<div>
<Suspense fallback="loading...">
<Greeting />
</Suspense>
<Wrapper />
<Suspense fallback="loading three...">
<Farewell />
</Suspense>
</div>
);
}

const errors = [];
const {pendingResult} = await serverAct(() => {
return {
pendingResult: ReactServerDOMStaticServer.prerenderToNodeStream(
<App />,
{},
{
onError(x) {
errors.push(x);
},
signal: controller.signal,
},
),
};
});

controller.abort();

const {prelude} = await pendingResult;
expect(errors).toEqual([]);

const response = ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
);

const {writable: fizzWritable, readable: fizzReadable} = getTestStream();

function ClientApp() {
return use(response);
}
let abortFizz;
await serverAct(async () => {
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
React.createElement(ClientApp),
{
onError(error, errorInfo) {
errors.push(error);
},
},
);
pipe(fizzWritable);
abortFizz = abort;
});

await serverAct(() => {
abortFizz('boom');
});

// one error per boundary
expect(errors).toEqual(['boom', 'boom', 'boom']);

const container = document.createElement('div');
await readInto(container, fizzReadable);
expect(getMeaningfulChildren(container)).toEqual(
<div>
{'loading...'}
{'loading too...'}
{'loading three...'}
</div>,
);
});
});
49 changes: 25 additions & 24 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ function serializeThenable(
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, newTask.id, reusableInfinitePromiseModel);
emitBlockedChunk(request, newTask.id);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -1820,7 +1820,6 @@ function serializeLazyID(id: number): string {
function serializeInfinitePromise(): string {
return '$@';
}
const reusableInfinitePromiseModel = stringify(serializeInfinitePromise());

function serializePromiseID(id: number): string {
return '$@' + id.toString(16);
Expand Down Expand Up @@ -2208,9 +2207,6 @@ function renderModel(
if (typeof x.then === 'function') {
if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
Expand Down Expand Up @@ -2264,9 +2260,6 @@ function renderModel(

if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
Expand Down Expand Up @@ -3008,6 +3001,12 @@ function emitPostponeChunk(
request.completedErrorChunks.push(processedChunk);
}

function emitBlockedChunk(request: Request, id: number): void {
const row = serializeRowHeader('#', id) + '\n';
const processedChunk = stringToChunk(row);
request.completedErrorChunks.push(processedChunk);
}

function emitErrorChunk(
request: Request,
id: number,
Expand Down Expand Up @@ -3757,7 +3756,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitBlockedChunk(request, task.id);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3785,7 +3784,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitBlockedChunk(request, task.id);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3830,6 +3829,7 @@ function performWork(request: Request): void {
currentRequest = request;
prepareToUseHooksForRequest(request);

const hadAbortableTasks = request.abortableTasks.size > 0;
try {
const pingedTasks = request.pingedTasks;
request.pingedTasks = [];
Expand All @@ -3840,10 +3840,11 @@ function performWork(request: Request): void {
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
if (request.abortableTasks.size === 0) {
// we're done rendering
const onAllReady = request.onAllReady;
onAllReady();
if (hadAbortableTasks && request.abortableTasks.size === 0) {
// We can ping after completing but if this happens there already
// wouldn't be any abortable tasks. So we only call allReady after
// the work which actually completed the last pending task
allReady(request);
}
} catch (error) {
logRecoverableError(request, error, null);
Expand All @@ -3868,15 +3869,6 @@ function abortTask(task: Task, request: Request, errorId: number): void {
request.completedErrorChunks.push(processedChunk);
}

function haltTask(task: Task, request: Request): void {
if (task.status === RENDERING) {
// This task will be aborted by the render
return;
}
task.status = ABORTED;
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
}

function flushCompletedChunks(
request: Request,
destination: Destination,
Expand Down Expand Up @@ -4055,6 +4047,7 @@ export function abort(request: Request, reason: mixed): void {
}
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
allReady(request);
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
Expand Down Expand Up @@ -4110,8 +4103,11 @@ export function halt(request: Request, reason: mixed): void {
// to that row from every row that's still remaining.
if (abortableTasks.size > 0) {
request.pendingChunks++;
abortableTasks.forEach(task => haltTask(task, request));
const errorId = request.nextChunkId++;
emitBlockedChunk(request, errorId);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
allReady(request);
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
Expand All @@ -4126,3 +4122,8 @@ export function halt(request: Request, reason: mixed): void {
fatalError(request, error);
}
}

function allReady(request: Request) {
const onAllReady = request.onAllReady;
onAllReady();
}

0 comments on commit 9d082b5

Please sign in to comment.