Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[beta] backports #136017

Merged
merged 12 commits into from
Jan 26, 2025
18 changes: 9 additions & 9 deletions compiler/rustc_codegen_gcc/src/debuginfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,15 +113,15 @@ fn make_mir_scope<'gcc, 'tcx>(
let scope_data = &mir.source_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope {
make_mir_scope(cx, _instance, mir, variables, debug_context, instantiated, parent);
debug_context.scopes[parent].unwrap()
debug_context.scopes[parent]
} else {
// The root is the function itself.
let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
debug_context.scopes[scope] = Some(DebugScope {
debug_context.scopes[scope] = DebugScope {
file_start_pos: file.start_pos,
file_end_pos: file.end_position(),
..debug_context.scopes[scope].unwrap()
});
..debug_context.scopes[scope]
};
instantiated.insert(scope);
return;
};
Expand All @@ -130,7 +130,7 @@ fn make_mir_scope<'gcc, 'tcx>(
if !vars.contains(scope) && scope_data.inlined.is_none() {
// Do not create a DIScope if there are no variables defined in this
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
debug_context.scopes[scope] = Some(parent_scope);
debug_context.scopes[scope] = parent_scope;
instantiated.insert(scope);
return;
}
Expand All @@ -157,12 +157,12 @@ fn make_mir_scope<'gcc, 'tcx>(
// TODO(tempdragon): dbg_scope: Add support for scope extension here.
inlined_at.or(p_inlined_at);

debug_context.scopes[scope] = Some(DebugScope {
debug_context.scopes[scope] = DebugScope {
dbg_scope,
inlined_at,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_position(),
});
};
instantiated.insert(scope);
}

Expand Down Expand Up @@ -232,12 +232,12 @@ impl<'gcc, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
}

// Initialize fn debug context (including scopes).
let empty_scope = Some(DebugScope {
let empty_scope = DebugScope {
dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
inlined_at: None,
file_start_pos: BytePos(0),
file_end_pos: BytePos(0),
});
};
let mut fn_debug_context = FunctionDebugContext {
scopes: IndexVec::from_elem(empty_scope, mir.source_scopes.as_slice()),
inlined_function_scopes: Default::default(),
Expand Down
59 changes: 22 additions & 37 deletions compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use rustc_middle::mir::{Body, SourceScope};
use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv};
use rustc_middle::ty::{self, Instance};
use rustc_session::config::DebugInfo;
use rustc_span::{BytePos, hygiene};
use rustc_span::{BytePos, DUMMY_SP, hygiene};

use super::metadata::file_metadata;
use super::utils::DIB;
Expand Down Expand Up @@ -85,23 +85,15 @@ fn make_mir_scope<'ll, 'tcx>(
discriminators,
parent,
);
if let Some(parent_scope) = debug_context.scopes[parent] {
parent_scope
} else {
// If the parent scope could not be represented then no children
// can be either.
debug_context.scopes[scope] = None;
instantiated.insert(scope);
return;
}
debug_context.scopes[parent]
} else {
// The root is the function itself.
let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
debug_context.scopes[scope] = Some(DebugScope {
debug_context.scopes[scope] = DebugScope {
file_start_pos: file.start_pos,
file_end_pos: file.end_position(),
..debug_context.scopes[scope].unwrap()
});
..debug_context.scopes[scope]
};
instantiated.insert(scope);
return;
};
Expand All @@ -112,7 +104,7 @@ fn make_mir_scope<'ll, 'tcx>(
{
// Do not create a DIScope if there are no variables defined in this
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
debug_context.scopes[scope] = Some(parent_scope);
debug_context.scopes[scope] = parent_scope;
instantiated.insert(scope);
return;
}
Expand Down Expand Up @@ -145,14 +137,7 @@ fn make_mir_scope<'ll, 'tcx>(
},
};

let mut debug_scope = Some(DebugScope {
dbg_scope,
inlined_at: parent_scope.inlined_at,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_position(),
});

if let Some((_, callsite_span)) = scope_data.inlined {
let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
let callsite_span = hygiene::walk_chain_collapsed(callsite_span, mir.span);
let callsite_scope = parent_scope.adjust_dbg_scope_for_span(cx, callsite_span);
let loc = cx.dbg_loc(callsite_scope, parent_scope.inlined_at, callsite_span);
Expand All @@ -175,29 +160,29 @@ fn make_mir_scope<'ll, 'tcx>(
// Note further that we can't key this hashtable on the span itself,
// because these spans could have distinct SyntaxContexts. We have
// to key on exactly what we're giving to LLVM.
let inlined_at = match discriminators.entry(callsite_span.lo()) {
match discriminators.entry(callsite_span.lo()) {
Entry::Occupied(mut o) => {
*o.get_mut() += 1;
// NB: We have to emit *something* here or we'll fail LLVM IR verification
// in at least some circumstances (see issue #135322) so if the required
// discriminant cannot be encoded fall back to the dummy location.
unsafe { llvm::LLVMRustDILocationCloneWithBaseDiscriminator(loc, *o.get()) }
.unwrap_or_else(|| {
cx.dbg_loc(callsite_scope, parent_scope.inlined_at, DUMMY_SP)
})
}
Entry::Vacant(v) => {
v.insert(0);
Some(loc)
}
};
match inlined_at {
Some(inlined_at) => {
debug_scope.as_mut().unwrap().inlined_at = Some(inlined_at);
}
None => {
// LLVM has a maximum discriminator that it can encode (currently
// it uses 12 bits for 4096 possible values). If we exceed that
// there is little we can do but drop the debug info.
debug_scope = None;
loc
}
}
}
});

debug_context.scopes[scope] = debug_scope;
debug_context.scopes[scope] = DebugScope {
dbg_scope,
inlined_at: inlined_at.or(parent_scope.inlined_at),
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_position(),
};
instantiated.insert(scope);
}
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,12 +293,12 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}

// Initialize fn debug context (including scopes).
let empty_scope = Some(DebugScope {
let empty_scope = DebugScope {
dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
inlined_at: None,
file_start_pos: BytePos(0),
file_end_pos: BytePos(0),
});
};
let mut fn_debug_context = FunctionDebugContext {
scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes),
inlined_function_scopes: Default::default(),
Expand Down
6 changes: 2 additions & 4 deletions compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@ use crate::traits::*;

pub struct FunctionDebugContext<'tcx, S, L> {
/// Maps from source code to the corresponding debug info scope.
/// May be None if the backend is not capable of representing the scope for
/// some reason.
pub scopes: IndexVec<mir::SourceScope, Option<DebugScope<S, L>>>,
pub scopes: IndexVec<mir::SourceScope, DebugScope<S, L>>,

/// Maps from an inlined function to its debug info declaration.
pub inlined_function_scopes: FxHashMap<Instance<'tcx>, S>,
Expand Down Expand Up @@ -232,7 +230,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
&self,
source_info: mir::SourceInfo,
) -> Option<(Bx::DIScope, Option<Bx::DILocation>, Span)> {
let scope = &self.debug_context.as_ref()?.scopes[source_info.scope]?;
let scope = &self.debug_context.as_ref()?.scopes[source_info.scope];
let span = hygiene::walk_chain_collapsed(source_info.span, self.mir.span);
Some((scope.adjust_dbg_scope_for_span(self.cx, span), scope.inlined_at, span))
}
Expand Down
9 changes: 1 addition & 8 deletions compiler/rustc_mir_build/src/thir/pattern/check_match.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1086,14 +1086,7 @@ fn find_fallback_pattern_typo<'tcx>(
let vis = cx.tcx.visibility(item.owner_id);
if vis.is_accessible_from(parent, cx.tcx) {
accessible.push(item_name);
let path = if item_name == name {
// We know that the const wasn't in scope because it has the exact
// same name, so we suggest the full path.
with_no_trimmed_paths!(cx.tcx.def_path_str(item.owner_id))
} else {
// The const is likely just typoed, and nothing else.
cx.tcx.def_path_str(item.owner_id)
};
let path = with_no_trimmed_paths!(cx.tcx.def_path_str(item.owner_id));
accessible_path.push(path);
} else if name == item_name {
// The const exists somewhere in this crate, but it can't be imported
Expand Down
5 changes: 3 additions & 2 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,9 @@ pub struct Parser<'a> {
}

// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
// nonterminals. Make sure it doesn't unintentionally get bigger.
#[cfg(all(target_pointer_width = "64", not(target_arch = "s390x")))]
// nonterminals. Make sure it doesn't unintentionally get bigger. We only check a few arches
// though, because `TokenTypeSet(u128)` alignment varies on others, changing the total size.
#[cfg(all(target_pointer_width = "64", any(target_arch = "aarch64", target_arch = "x86_64")))]
rustc_data_structures::static_assert_size!(Parser<'_>, 288);

/// Stores span information about a closure.
Expand Down
69 changes: 45 additions & 24 deletions compiler/rustc_trait_selection/src/traits/coherence.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

use std::fmt::Debug;

use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_errors::{Diag, EmissionGuarantee};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
Expand Down Expand Up @@ -116,28 +116,39 @@ pub fn overlapping_impls(
return None;
}

let _overlap_with_bad_diagnostics = overlap(
tcx,
TrackAmbiguityCauses::No,
skip_leak_check,
impl1_def_id,
impl2_def_id,
overlap_mode,
)?;

// In the case where we detect an error, run the check again, but
// this time tracking intercrate ambiguity causes for better
// diagnostics. (These take time and can lead to false errors.)
let overlap = overlap(
tcx,
TrackAmbiguityCauses::Yes,
skip_leak_check,
impl1_def_id,
impl2_def_id,
overlap_mode,
)
.unwrap();
Some(overlap)
if tcx.next_trait_solver_in_coherence() {
overlap(
tcx,
TrackAmbiguityCauses::Yes,
skip_leak_check,
impl1_def_id,
impl2_def_id,
overlap_mode,
)
} else {
let _overlap_with_bad_diagnostics = overlap(
tcx,
TrackAmbiguityCauses::No,
skip_leak_check,
impl1_def_id,
impl2_def_id,
overlap_mode,
)?;

// In the case where we detect an error, run the check again, but
// this time tracking intercrate ambiguity causes for better
// diagnostics. (These take time and can lead to false errors.)
let overlap = overlap(
tcx,
TrackAmbiguityCauses::Yes,
skip_leak_check,
impl1_def_id,
impl2_def_id,
overlap_mode,
)
.unwrap();
Some(overlap)
}
}

fn fresh_impl_header<'tcx>(infcx: &InferCtxt<'tcx>, impl_def_id: DefId) -> ty::ImplHeader<'tcx> {
Expand Down Expand Up @@ -615,6 +626,7 @@ fn compute_intercrate_ambiguity_causes<'tcx>(
}

struct AmbiguityCausesVisitor<'a, 'tcx> {
cache: FxHashSet<Goal<'tcx, ty::Predicate<'tcx>>>,
causes: &'a mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>,
}

Expand All @@ -624,6 +636,10 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a, 'tcx> {
}

fn visit_goal(&mut self, goal: &InspectGoal<'_, 'tcx>) {
if !self.cache.insert(goal.goal()) {
return;
}

let infcx = goal.infcx();
for cand in goal.candidates() {
cand.visit_nested_in_probe(self);
Expand Down Expand Up @@ -748,5 +764,10 @@ fn search_ambiguity_causes<'tcx>(
goal: Goal<'tcx, ty::Predicate<'tcx>>,
causes: &mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>,
) {
infcx.probe(|_| infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor { causes }));
infcx.probe(|_| {
infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor {
cache: Default::default(),
causes,
})
});
}
3 changes: 0 additions & 3 deletions library/std/src/sys/pal/windows/fs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -328,9 +328,6 @@ impl File {
mem::size_of::<c::FILE_ALLOCATION_INFO>() as u32,
);
if result == 0 {
if api::get_last_error().code != 0 {
panic!("FILE_ALLOCATION_INFO failed!!!");
}
let eof = c::FILE_END_OF_FILE_INFO { EndOfFile: 0 };
let result = c::SetFileInformationByHandle(
handle.as_raw_handle(),
Expand Down
5 changes: 5 additions & 0 deletions src/bootstrap/bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -1264,6 +1264,11 @@ def bootstrap(args):
config_toml = ""

profile = RustBuild.get_toml_static(config_toml, "profile")
is_non_git_source = not os.path.exists(os.path.join(rust_root, ".git"))

if profile is None and is_non_git_source:
profile = "dist"

if profile is not None:
# Allows creating alias for profile names, allowing
# profiles to be renamed while maintaining back compatibility
Expand Down
8 changes: 7 additions & 1 deletion src/bootstrap/src/core/build_steps/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1790,7 +1790,13 @@ impl Step for Assemble {
// When using `download-ci-llvm`, some of the tools
// may not exist, so skip trying to copy them.
if src_path.exists() {
builder.copy_link(&src_path, &libdir_bin.join(&tool_exe));
// There is a chance that these tools are being installed from an external LLVM.
// Use `Builder::resolve_symlink_and_copy` instead of `Builder::copy_link` to ensure
// we are copying the original file not the symlinked path, which causes issues for
// tarball distribution.
//
// See https://github.com/rust-lang/rust/issues/135554.
builder.resolve_symlink_and_copy(&src_path, &libdir_bin.join(&tool_exe));
}
}
}
Expand Down
Loading
Loading