Skip to content

Commit

Permalink
[Turbopack] add more tracing to dirty flagging (#71482)
Browse files Browse the repository at this point in the history
### What?

trace make task dirty

add tracing why tasks are marked as dirty

report reason when a page is reloaded
  • Loading branch information
sokra authored Oct 21, 2024
1 parent 24da437 commit dfc5331
Show file tree
Hide file tree
Showing 11 changed files with 187 additions and 44 deletions.
1 change: 1 addition & 0 deletions packages/next/src/server/dev/hot-reloader-turbopack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,7 @@ export async function createHotReloaderTurbopack(
// reload, only this client is out of date.
const reloadAction: ReloadPageAction = {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: `error in HMR event subscription for ${id}: ${e}`,
}
sendToClient(client, reloadAction)
client.close()
Expand Down
1 change: 1 addition & 0 deletions packages/next/src/server/dev/hot-reloader-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ interface RemovedPageAction {

export interface ReloadPageAction {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE
data: string
}

interface ServerComponentChangesAction {
Expand Down
10 changes: 8 additions & 2 deletions packages/next/src/server/dev/hot-reloader-webpack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,10 @@ export default class HotReloaderWebpack implements NextJsHotReloaderInterface {
public clearHmrServerError(): void {
if (this.hmrServerError) {
this.setHmrServerError(null)
this.send({ action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE })
this.send({
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: 'clear hmr server error',
})
}
}

Expand Down Expand Up @@ -1359,7 +1362,10 @@ export default class HotReloaderWebpack implements NextJsHotReloaderInterface {
this.serverPrevDocumentHash = documentChunk.hash || null

// Notify reload to reload the page, as _document.js was changed (different hash)
this.send({ action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE })
this.send({
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: '_document has changed',
})
}
)

Expand Down
46 changes: 34 additions & 12 deletions packages/next/src/server/dev/turbopack-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -479,8 +479,11 @@ export async function handleRouteType({
pages: [page],
}
},
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
(e) => {
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: `error in ${page} data subscription: ${e}`,
}
}
)
hooks?.subscribeToChanges(
Expand All @@ -492,8 +495,11 @@ export async function handleRouteType({
event: HMR_ACTIONS_SENT_TO_BROWSER.CLIENT_CHANGES,
}
},
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
(e) => {
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: `error in ${page} html subscription: ${e}`,
}
}
)
if (entrypoints.global.document) {
Expand All @@ -502,10 +508,16 @@ export async function handleRouteType({
false,
entrypoints.global.document,
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: '_document has changed (page route)',
}
},
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
(e) => {
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: `error in _document subscription (page route): ${e}`,
}
}
)
}
Expand Down Expand Up @@ -1075,6 +1087,7 @@ export async function handlePagesErrorRoute({
() => {
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: '_app has changed (error route)',
}
}
)
Expand All @@ -1096,10 +1109,16 @@ export async function handlePagesErrorRoute({
false,
entrypoints.global.document,
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: '_document has changed (error route)',
}
},
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
(e) => {
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: `error in _document subscription (error route): ${e}`,
}
}
)
}
Expand All @@ -1122,8 +1141,11 @@ export async function handlePagesErrorRoute({
// https://github.com/vercel/next.js/blob/08d7a7e5189a835f5dcb82af026174e587575c0e/packages/next/src/client/page-bootstrap.ts#L69-L71
return { event: HMR_ACTIONS_SENT_TO_BROWSER.CLIENT_CHANGES }
},
() => {
return { action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE }
(e) => {
return {
action: HMR_ACTIONS_SENT_TO_BROWSER.RELOAD_PAGE,
data: `error in _error subscription: ${e}`,
}
}
)
}
Expand Down
22 changes: 15 additions & 7 deletions turbopack/crates/turbo-tasks-backend/src/backend/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ use crate::{
operation::{
get_aggregation_number, is_root_node, AggregatedDataUpdate, AggregationUpdateJob,
AggregationUpdateQueue, CleanupOldEdgesOperation, ConnectChildOperation,
ExecuteContext, ExecuteContextImpl, Operation, OutdatedEdge, TaskGuard,
ExecuteContext, ExecuteContextImpl, Operation, OutdatedEdge, TaskDirtyCause, TaskGuard,
},
storage::{get, get_many, get_mut, iter_many, remove, Storage},
},
Expand Down Expand Up @@ -779,7 +779,11 @@ impl<B: BackingStorage> TurboTasksBackendInner<B> {
task_id: TaskId,
turbo_tasks: &dyn TurboTasksBackendApi<TurboTasksBackend<B>>,
) {
operation::InvalidateOperation::run(smallvec![task_id], self.execute_context(turbo_tasks));
operation::InvalidateOperation::run(
smallvec![task_id],
TaskDirtyCause::Unknown,
self.execute_context(turbo_tasks),
);
}

fn invalidate_tasks(
Expand All @@ -789,6 +793,7 @@ impl<B: BackingStorage> TurboTasksBackendInner<B> {
) {
operation::InvalidateOperation::run(
tasks.iter().copied().collect(),
TaskDirtyCause::Unknown,
self.execute_context(turbo_tasks),
);
}
Expand All @@ -800,6 +805,7 @@ impl<B: BackingStorage> TurboTasksBackendInner<B> {
) {
operation::InvalidateOperation::run(
tasks.iter().copied().collect(),
TaskDirtyCause::Unknown,
self.execute_context(turbo_tasks),
);
}
Expand All @@ -817,9 +823,11 @@ impl<B: BackingStorage> TurboTasksBackendInner<B> {
task.invalidate_serialization();
}

fn get_task_description(&self, task: TaskId) -> std::string::String {
let task_type = self.lookup_task_type(task).expect("Task not found");
task_type.to_string()
fn get_task_description(&self, task_id: TaskId) -> std::string::String {
self.lookup_task_type(task_id).map_or_else(
|| format!("{task_id:?} transient"),
|task_type| task_type.to_string(),
)
}

fn try_get_function_id(&self, task_id: TaskId) -> Option<FunctionId> {
Expand Down Expand Up @@ -1163,7 +1171,7 @@ impl<B: BackingStorage> TurboTasksBackendInner<B> {
.get(&cell.type_id)
.map_or(false, |range| range.contains(&cell.index)) =>
{
Some(OutdatedEdge::RemovedCellDependent(task))
Some(OutdatedEdge::RemovedCellDependent(task, cell.type_id))
}
_ => None,
},
Expand Down Expand Up @@ -1194,7 +1202,7 @@ impl<B: BackingStorage> TurboTasksBackendInner<B> {
.get(&cell.type_id)
.map_or(false, |range| range.contains(&cell.index)) =>
{
Some(OutdatedEdge::RemovedCellDependent(task))
Some(OutdatedEdge::RemovedCellDependent(task, cell.type_id))
}
_ => None,
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@ use indexmap::map::Entry;
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use turbo_tasks::{FxIndexMap, FxIndexSet, SessionId, TaskId};
use turbo_tasks::{FxIndexMap, FxIndexSet, SessionId, TaskId, TraitTypeId};

use crate::{
backend::{
operation::{invalidate::make_task_dirty, ExecuteContext, Operation, TaskGuard},
operation::{
invalidate::{make_task_dirty, TaskDirtyCause},
ExecuteContext, Operation, TaskGuard,
},
storage::{get, get_many, iter_many, remove, update, update_count},
TaskDataCategory,
},
Expand Down Expand Up @@ -101,8 +104,9 @@ pub enum AggregationUpdateJob {
upper_ids: Vec<TaskId>,
update: AggregatedDataUpdate,
},
Invalidate {
InvalidateDueToCollectiblesChange {
task_ids: SmallVec<[TaskId; 4]>,
collectible_type: TraitTypeId,
},
BalanceEdge {
upper_id: TaskId,
Expand Down Expand Up @@ -284,8 +288,9 @@ impl AggregatedDataUpdate {
}
);
if !dependent.is_empty() {
queue.push(AggregationUpdateJob::Invalidate {
queue.push(AggregationUpdateJob::InvalidateDueToCollectiblesChange {
task_ids: dependent,
collectible_type: ty,
})
}
}
Expand Down Expand Up @@ -556,9 +561,17 @@ impl AggregationUpdateQueue {
AggregationUpdateJob::AggregatedDataUpdate { upper_ids, update } => {
self.aggregated_data_update(upper_ids, ctx, update);
}
AggregationUpdateJob::Invalidate { task_ids } => {
AggregationUpdateJob::InvalidateDueToCollectiblesChange {
task_ids,
collectible_type,
} => {
for task_id in task_ids {
make_task_dirty(task_id, self, ctx);
make_task_dirty(
task_id,
TaskDirtyCause::CollectiblesChange { collectible_type },
self,
ctx,
);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::mem::take;

use serde::{Deserialize, Serialize};
use turbo_tasks::TaskId;
use turbo_tasks::{TaskId, ValueTypeId};

use crate::{
backend::{
Expand All @@ -10,7 +10,7 @@ use crate::{
get_aggregation_number, get_uppers, is_aggregating_node, AggregationUpdateJob,
AggregationUpdateQueue,
},
invalidate::make_task_dirty,
invalidate::{make_task_dirty, TaskDirtyCause},
AggregatedDataUpdate, ExecuteContext, Operation, TaskGuard,
},
storage::{update, update_count},
Expand Down Expand Up @@ -41,7 +41,7 @@ pub enum OutdatedEdge {
CellDependency(CellRef),
OutputDependency(TaskId),
CollectiblesDependency(CollectiblesRef),
RemovedCellDependent(TaskId),
RemovedCellDependent(TaskId, ValueTypeId),
}

impl CleanupOldEdgesOperation {
Expand Down Expand Up @@ -177,8 +177,13 @@ impl Operation for CleanupOldEdgesOperation {
});
}
}
OutdatedEdge::RemovedCellDependent(task_id) => {
make_task_dirty(task_id, queue, ctx);
OutdatedEdge::RemovedCellDependent(task_id, value_type) => {
make_task_dirty(
task_id,
TaskDirtyCause::CellRemoved { value_type },
queue,
ctx,
);
}
}
}
Expand Down
Loading

0 comments on commit dfc5331

Please sign in to comment.