Skip to content

Commit

Permalink
[Segment Cache] Remove task map
Browse files Browse the repository at this point in the history
Upon reconsideration, I don't think we need to store the tasks in a map.
I had thought we would use this to dedupe multiple prefetch tasks to the
same URL, but we already dedupe by URL when accessing the route entry.
In the future, there will also be multiple kinds of prefetch tasks, so
it will be totally possible for the same URL to have multiple tasks
associated with it; for example, a prefetch initiated by a <Link>
entering the viewport, versus a prefetch initiated by an imperative
prefetch() call. The <Link> prefetch should be cancelable, but the
imperative prefetch should not. So it just makes more sense for them to
be separate tasks.
  • Loading branch information
acdlite committed Dec 2, 2024
1 parent 7c24763 commit f42434f
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 55 deletions.
9 changes: 5 additions & 4 deletions packages/next/src/client/components/segment-cache/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ export function requestRouteCacheEntryFromCache(
couldBeIntercepted: false,
}
const key = task.key
const requestPromise = fetchRouteOnCacheMiss(pendingEntry, key)
const requestPromise = fetchRouteOnCacheMiss(pendingEntry, task)
trackPrefetchRequestBandwidth(requestPromise)
routeCache.set(key.id, pendingEntry)
return pendingEntry
Expand Down Expand Up @@ -322,12 +322,13 @@ function rejectSegmentCacheEntry(

async function fetchRouteOnCacheMiss(
entry: PendingRouteCacheEntry,
key: RouteCacheKey
task: PrefetchTask
): Promise<void> {
// This function is allowed to use async/await because it contains the actual
// fetch that gets issued on a cache miss. Notice though that it does not
// return anything; it writes the result to the cache entry directly, then
// pings the scheduler to unblock the corresponding prefetch task.
const key = task.key
const href = key.href
try {
const response = await fetchSegmentPrefetchResponse(href, '/_tree')
Expand Down Expand Up @@ -373,13 +374,13 @@ async function fetchRouteOnCacheMiss(
// Either the connection itself failed, or something bad happened while
// decoding the response.
rejectRouteCacheEntry(entry, Date.now() + 10 * 1000)
pingPrefetchTask(key)
pingPrefetchTask(task)
} finally {
// The request for the route tree is was blocking this task from prefetching
// the segments. Now that the route tree is ready, notify the scheduler to
// unblock the prefetch task. We do this even if the request failed, so the
// scheduler can dispose of the task.
pingPrefetchTask(key)
pingPrefetchTask(task)
}
}

Expand Down
73 changes: 22 additions & 51 deletions packages/next/src/client/components/segment-cache/scheduler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
type FulfilledRouteCacheEntry,
type RouteCacheEntry,
} from './cache'
import type { RouteCacheKey, RouteCacheKeyId } from './cache-key'
import type { RouteCacheKey } from './cache-key'

const scheduleMicrotask =
typeof queueMicrotask === 'function'
Expand Down Expand Up @@ -50,9 +50,9 @@ export type PrefetchTask = {
* True if the prefetch is blocked by network data. We remove tasks from the
* queue once they are blocked, and add them back when they receive data.
*
* isBlocked also indicates whether the task is currently in the queue;
* tasks are removed from the queue (but not the map) when they are blocked.
* Use this to avoid queueing the same task multiple times.
* isBlocked also indicates whether the task is currently in the queue; tasks
* are removed from the queue when they are blocked. Use this to avoid
* queueing the same task multiple times.
*/
isBlocked: boolean

Expand Down Expand Up @@ -88,7 +88,6 @@ const enum PrefetchTaskExitStatus {
}

const taskHeap: Array<PrefetchTask> = []
const taskMap: Map<RouteCacheKeyId, PrefetchTask> = new Map()

// This is intentionally low so that when a navigation happens, the browser's
// internal network queue is not already saturated with prefetch requests.
Expand All @@ -108,24 +107,6 @@ let didScheduleMicrotask = false
* @param key The RouteCacheKey to prefetch.
*/
export function schedulePrefetchTask(key: RouteCacheKey): void {
const existingTask = taskMap.get(key.id)
if (existingTask !== undefined) {
// A task for this URL already exists, but we should bump it to the top of
// the queue by assigning a new sortId. However, if it already has the
// highest sortId that's been assigned, then we can bail out.
//
// Note that this check is not equivalent to checking if it's at the front
// of the queue, because there could be tasks that have a higher sortId but
// are not currently queued (because they are blocked).
if (existingTask.sortId === sortIdCounter) {
// No-op
return
}
existingTask.sortId = sortIdCounter++
heapSift(taskHeap, existingTask)
return
}

// Spawn a new prefetch task
const task: PrefetchTask = {
key,
Expand All @@ -134,7 +115,6 @@ export function schedulePrefetchTask(key: RouteCacheKey): void {
_heapIndex: -1,
}
heapPush(taskHeap, task)
taskMap.set(key.id, task)

// Schedule an async task to process the queue.
//
Expand Down Expand Up @@ -200,15 +180,8 @@ function onPrefetchRequestCompletion(): void {
* prefetch. The corresponding task will be added back to the queue (unless the
* task has been canceled in the meantime).
*/
export function pingPrefetchTask(key: RouteCacheKey) {
export function pingPrefetchTask(task: PrefetchTask) {
// "Ping" a prefetch that's already in progress to notify it of new data.
const task = taskMap.get(key.id)
if (task === undefined) {
// There's no matching prefetch task, which means it must have either
// already completed or been canceled. This can also happen if an entry is
// read from the cache outside the context of a prefetch.
return
}
if (!task.isBlocked) {
// Prefetch is already queued.
return
Expand Down Expand Up @@ -246,11 +219,7 @@ function processQueueInMicrotask() {
task = heapPop(taskHeap)
continue
case PrefetchTaskExitStatus.Done:
// The prefetch is complete. Remove the task from the map so it can be
// garbage collected.
taskMap.delete(task.key.id)

// Continue to the next task
// The prefetch is complete. Continue to the next task.
task = heapPop(taskHeap)
continue
default: {
Expand Down Expand Up @@ -364,20 +333,22 @@ function heapPop(heap: Array<PrefetchTask>): PrefetchTask | null {
return first
}

function heapSift(heap: Array<PrefetchTask>, node: PrefetchTask) {
const index = node._heapIndex
if (index !== -1) {
const parentIndex = (index - 1) >>> 1
const parent = heap[parentIndex]
if (compareQueuePriority(parent, node) > 0) {
// The parent is larger. Sift up.
heapSiftUp(heap, node, index)
} else {
// The parent is smaller (or equal). Sift down.
heapSiftDown(heap, node, index)
}
}
}
// Not currently used, but will be once we add the ability to update a
// task's priority.
// function heapSift(heap: Array<PrefetchTask>, node: PrefetchTask) {
// const index = node._heapIndex
// if (index !== -1) {
// const parentIndex = (index - 1) >>> 1
// const parent = heap[parentIndex]
// if (compareQueuePriority(parent, node) > 0) {
// // The parent is larger. Sift up.
// heapSiftUp(heap, node, index)
// } else {
// // The parent is smaller (or equal). Sift down.
// heapSiftDown(heap, node, index)
// }
// }
// }

function heapSiftUp(
heap: Array<PrefetchTask>,
Expand Down

0 comments on commit f42434f

Please sign in to comment.