diff --git a/.mailmap b/.mailmap index f63b299ffd57e..a928606b693e5 100644 --- a/.mailmap +++ b/.mailmap @@ -78,6 +78,7 @@ Eric Holk Eric Holmes Eric Reed Erick Tryzelaar +Esteban Küber Evgeny Sologubov Falco Hirschenberger Felix S. Klock II Felix S Klock II diff --git a/RELEASES.md b/RELEASES.md index 0b394cc8e4c99..15ab97e26fc78 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -98,7 +98,7 @@ Misc [cargo/5877]: https://github.com/rust-lang/cargo/pull/5877/ [cargo/5878]: https://github.com/rust-lang/cargo/pull/5878/ [cargo/5995]: https://github.com/rust-lang/cargo/pull/5995/ -[proc-macros]: https://doc.rust-lang.org/book/2018-edition/ch19-06-macros.html +[proc-macros]: https://doc.rust-lang.org/nightly/book/2018-edition/ch19-06-macros.html [`Ipv4Addr::BROADCAST`]: https://doc.rust-lang.org/nightly/std/net/struct.Ipv4Addr.html#associatedconstant.BROADCAST [`Ipv4Addr::LOCALHOST`]: https://doc.rust-lang.org/nightly/std/net/struct.Ipv4Addr.html#associatedconstant.LOCALHOST diff --git a/config.toml.example b/config.toml.example index e8cb0cba6b1f9..093b8f9e526ec 100644 --- a/config.toml.example +++ b/config.toml.example @@ -277,6 +277,10 @@ # compiler. #codegen-units = 1 +# Sets the number of codegen units to build the standard library with, +# regardless of what the codegen-unit setting for the rest of the compiler is. +#codegen-units-std = 1 + # Whether or not debug assertions are enabled for the compiler and standard # library. Also enables compilation of debug! and trace! logging macros. #debug-assertions = false diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index ffc5adbebb34f..b5dc0090c8b9c 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -632,6 +632,9 @@ def build_bootstrap(self): target_features += ["-crt-static"] if target_features: env["RUSTFLAGS"] += "-C target-feature=" + (",".join(target_features)) + " " + target_linker = self.get_toml("linker", build_section) + if target_linker is not None: + env["RUSTFLAGS"] += "-C linker=" + target_linker + " " env["PATH"] = os.path.join(self.bin_root(), "bin") + \ os.pathsep + env["PATH"] diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 71a89cd6d76b4..5abc0455b5871 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -1119,10 +1119,15 @@ impl<'a> Builder<'a> { cargo.arg("-v"); } - // This must be kept before the thinlto check, as we set codegen units - // to 1 forcibly there. - if let Some(n) = self.config.rust_codegen_units { - cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); + match (mode, self.config.rust_codegen_units_std, self.config.rust_codegen_units) { + (Mode::Std, Some(n), _) | + (Mode::Test, Some(n), _) | + (_, _, Some(n)) => { + cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); + } + _ => { + // Don't set anything + } } if self.config.rust_optimize { diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 69d45acdedaf9..d009d9645c35a 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -69,7 +69,7 @@ impl Step for Std { if builder.config.keep_stage.contains(&compiler.stage) { builder.info("Warning: Using a potentially old libstd. This may not behave well."); builder.ensure(StdLink { - compiler: compiler, + compiler, target_compiler: compiler, target, }); @@ -358,7 +358,7 @@ impl Step for Test { if builder.config.keep_stage.contains(&compiler.stage) { builder.info("Warning: Using a potentially old libtest. This may not behave well."); builder.ensure(TestLink { - compiler: compiler, + compiler, target_compiler: compiler, target, }); @@ -480,7 +480,7 @@ impl Step for Rustc { if builder.config.keep_stage.contains(&compiler.stage) { builder.info("Warning: Using a potentially old librustc. This may not behave well."); builder.ensure(RustcLink { - compiler: compiler, + compiler, target_compiler: compiler, target, }); @@ -816,8 +816,8 @@ fn copy_codegen_backends_to_sysroot(builder: &Builder, let filename = file.file_name().unwrap().to_str().unwrap(); // change `librustc_codegen_llvm-xxxxxx.so` to `librustc_codegen_llvm-llvm.so` let target_filename = { - let dash = filename.find("-").unwrap(); - let dot = filename.find(".").unwrap(); + let dash = filename.find('-').unwrap(); + let dot = filename.find('.').unwrap(); format!("{}-{}{}", &filename[..dash], backend, diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index a9d330e06a15d..3eb6e8d84e877 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -95,6 +95,7 @@ pub struct Config { // rust codegen options pub rust_optimize: bool, pub rust_codegen_units: Option, + pub rust_codegen_units_std: Option, pub rust_debug_assertions: bool, pub rust_debuginfo: bool, pub rust_debuginfo_lines: bool, @@ -294,6 +295,7 @@ impl Default for StringOrBool { struct Rust { optimize: Option, codegen_units: Option, + codegen_units_std: Option, debug_assertions: Option, debuginfo: Option, debuginfo_lines: Option, @@ -580,6 +582,8 @@ impl Config { Some(n) => config.rust_codegen_units = Some(n), None => {} } + + config.rust_codegen_units_std = rust.codegen_units_std; } if let Some(ref t) = toml.target { diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index 0cf84a62986a5..ddb894eb1f659 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -393,6 +393,13 @@ def set(key, value): targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", target) +def is_number(value): + try: + float(value) + return True + except: + return False + # Here we walk through the constructed configuration we have from the parsed # command line arguments. We then apply each piece of configuration by # basically just doing a `sed` to change the various configuration line to what @@ -406,7 +413,11 @@ def to_toml(value): elif isinstance(value, list): return '[' + ', '.join(map(to_toml, value)) + ']' elif isinstance(value, str): - return "'" + value + "'" + # Don't put quotes around numeric values + if is_number(value): + return value + else: + return "'" + value + "'" else: raise RuntimeError('no toml') diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 4de899ee2d599..fea6302d0a119 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -1447,8 +1447,8 @@ impl Step for Extended { tarballs.extend(rls_installer.clone()); tarballs.extend(clippy_installer.clone()); tarballs.extend(rustfmt_installer.clone()); - tarballs.extend(llvm_tools_installer.clone()); - tarballs.extend(lldb_installer.clone()); + tarballs.extend(llvm_tools_installer); + tarballs.extend(lldb_installer); tarballs.push(analysis_installer); tarballs.push(std_installer); if builder.config.docs { diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 2084b8bdb65ff..1211d485d1c12 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -93,8 +93,7 @@ impl Default for Subcommand { impl Flags { pub fn parse(args: &[String]) -> Flags { let mut extra_help = String::new(); - let mut subcommand_help = format!( - "\ + let mut subcommand_help = String::from("\ Usage: x.py [options] [...] Subcommands: @@ -365,8 +364,8 @@ Arguments: } let cmd = match subcommand.as_str() { - "build" => Subcommand::Build { paths: paths }, - "check" => Subcommand::Check { paths: paths }, + "build" => Subcommand::Build { paths }, + "check" => Subcommand::Check { paths }, "test" => Subcommand::Test { paths, bless: matches.opt_present("bless"), @@ -386,9 +385,9 @@ Arguments: paths, test_args: matches.opt_strs("test-args"), }, - "doc" => Subcommand::Doc { paths: paths }, + "doc" => Subcommand::Doc { paths }, "clean" => { - if paths.len() > 0 { + if !paths.is_empty() { println!("\nclean does not take a path argument\n"); usage(1, &opts, &subcommand_help, &extra_help); } @@ -413,11 +412,11 @@ Arguments: keep_stage: matches.opt_strs("keep-stage") .into_iter().map(|j| j.parse().unwrap()) .collect(), - host: split(matches.opt_strs("host")) + host: split(&matches.opt_strs("host")) .into_iter() .map(|x| INTERNER.intern_string(x)) .collect::>(), - target: split(matches.opt_strs("target")) + target: split(&matches.opt_strs("target")) .into_iter() .map(|x| INTERNER.intern_string(x)) .collect::>(), @@ -425,7 +424,7 @@ Arguments: jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()), cmd, incremental: matches.opt_present("incremental"), - exclude: split(matches.opt_strs("exclude")) + exclude: split(&matches.opt_strs("exclude")) .into_iter() .map(|p| p.into()) .collect::>(), @@ -488,7 +487,7 @@ impl Subcommand { } } -fn split(s: Vec) -> Vec { +fn split(s: &[String]) -> Vec { s.iter() .flat_map(|s| s.split(',')) .map(|s| s.to_string()) diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 4e26c98a62759..ba601249ea895 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -765,10 +765,10 @@ impl Build { let path = match which { GitRepo::Rustc => { - let sha = self.rust_info.sha().expect("failed to find sha"); + let sha = self.rust_sha().unwrap_or(channel::CFG_RELEASE_NUM); format!("/rustc/{}", sha) } - GitRepo::Llvm => format!("/rustc/llvm"), + GitRepo::Llvm => String::from("/rustc/llvm"), }; Some(format!("{}={}", self.src.display(), path)) } @@ -783,7 +783,7 @@ impl Build { fn cflags(&self, target: Interned, which: GitRepo) -> Vec { // Filter out -O and /O (the optimization flags) that we picked up from // cc-rs because the build scripts will determine that for themselves. - let mut base = self.cc[&target].args().iter() + let mut base: Vec = self.cc[&target].args().iter() .map(|s| s.to_string_lossy().into_owned()) .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) .collect::>(); @@ -806,10 +806,10 @@ impl Build { if let Some(map) = self.debuginfo_map(which) { let cc = self.cc(target); if cc.ends_with("clang") || cc.ends_with("gcc") { - base.push(format!("-fdebug-prefix-map={}", map).into()); + base.push(format!("-fdebug-prefix-map={}", map)); } else if cc.ends_with("clang-cl.exe") { base.push("-Xclang".into()); - base.push(format!("-fdebug-prefix-map={}", map).into()); + base.push(format!("-fdebug-prefix-map={}", map)); } } base diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 30fb4a86b6af9..448967ef0c2e1 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -353,7 +353,7 @@ fn configure_cmake(builder: &Builder, // definitely causes problems since all the env vars are pointing to // 32-bit libraries. // - // To hack aroudn this... again... we pass an argument that's + // To hack around this... again... we pass an argument that's // unconditionally passed in the sccache shim. This'll get CMake to // correctly diagnose it's doing a 32-bit compilation and LLVM will // internally configure itself appropriately. @@ -361,7 +361,7 @@ fn configure_cmake(builder: &Builder, cfg.env("SCCACHE_EXTRA_ARGS", "-m32"); } - // If ccache is configured we inform the build a little differently hwo + // If ccache is configured we inform the build a little differently how // to invoke ccache while also invoking our compilers. } else if let Some(ref ccache) = builder.config.ccache { cfg.define("CMAKE_C_COMPILER", ccache) diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index 724cb5841f48f..2eb11b06e4e08 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -74,7 +74,7 @@ pub fn check(build: &mut Build) { // one is present as part of the PATH then that can lead to the system // being unable to identify the files properly. See // https://github.com/rust-lang/rust/issues/34959 for more details. - if cfg!(windows) && path.to_string_lossy().contains("\"") { + if cfg!(windows) && path.to_string_lossy().contains('\"') { panic!("PATH contains invalid character '\"'"); } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index fe04a91011ee5..e411e0d17342e 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -521,7 +521,7 @@ impl Step for RustdocTheme { fn make_run(run: RunConfig) { let compiler = run.builder.compiler(run.builder.top_stage, run.host); - run.builder.ensure(RustdocTheme { compiler: compiler }); + run.builder.ensure(RustdocTheme { compiler }); } fn run(self, builder: &Builder) { @@ -584,9 +584,9 @@ impl Step for RustdocJS { }); builder.run(&mut command); } else { - builder.info(&format!( + builder.info( "No nodejs found, skipping \"src/test/rustdoc-js\" tests" - )); + ); } } } @@ -653,7 +653,7 @@ impl Step for Tidy { } let _folder = builder.fold_output(|| "tidy"); - builder.info(&format!("tidy check")); + builder.info("tidy check"); try_run(builder, &mut cmd); } @@ -1052,7 +1052,7 @@ impl Step for Compiletest { let hostflags = flags.clone(); cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); - let mut targetflags = flags.clone(); + let mut targetflags = flags; targetflags.push(format!( "-Lnative={}", builder.test_helpers_out(target).display() @@ -1168,9 +1168,9 @@ impl Step for Compiletest { } } if suite == "run-make-fulldeps" && !builder.config.llvm_enabled { - builder.info(&format!( + builder.info( "Ignoring run-make test suite as they generally don't work without LLVM" - )); + ); return; } @@ -1692,10 +1692,10 @@ impl Step for Crate { // The javascript shim implements the syscall interface so that test // output can be correctly reported. if !builder.config.wasm_syscall { - builder.info(&format!( + builder.info( "Libstd was built without `wasm_syscall` feature enabled: \ test output may not be visible." - )); + ); } // On the wasm32-unknown-unknown target we're using LTO which is @@ -1891,7 +1891,7 @@ impl Step for Distcheck { /// Run "distcheck", a 'make check' from a tarball fn run(self, builder: &Builder) { - builder.info(&format!("Distcheck")); + builder.info("Distcheck"); let dir = builder.out.join("tmp").join("distcheck"); let _ = fs::remove_dir_all(&dir); t!(fs::create_dir_all(&dir)); @@ -1919,7 +1919,7 @@ impl Step for Distcheck { ); // Now make sure that rust-src has all of libstd's dependencies - builder.info(&format!("Distcheck rust-src")); + builder.info("Distcheck rust-src"); let dir = builder.out.join("tmp").join("distcheck-src"); let _ = fs::remove_dir_all(&dir); t!(fs::create_dir_all(&dir)); diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index 15d27c613423a..978e3602e7dbc 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -148,7 +148,7 @@ impl Step for ToolBuild { } }); - if is_expected && duplicates.len() != 0 { + if is_expected && !duplicates.is_empty() { println!("duplicate artfacts found when compiling a tool, this \ typically means that something was recompiled because \ a transitive dependency has different features activated \ @@ -170,7 +170,7 @@ impl Step for ToolBuild { println!(" `{}` additionally enabled features {:?} at {:?}", prev.0, &prev_features - &cur_features, prev.1); } - println!(""); + println!(); println!("to fix this you will probably want to edit the local \ src/tools/rustc-workspace-hack/Cargo.toml crate, as \ that will update the dependency graph to ensure that \ @@ -188,7 +188,7 @@ impl Step for ToolBuild { if !is_optional_tool { exit(1); } else { - return None; + None } } else { let cargo_out = builder.cargo_out(compiler, self.mode, target) @@ -251,7 +251,7 @@ pub fn prepare_tool_cargo( if let Some(date) = info.commit_date() { cargo.env("CFG_COMMIT_DATE", date); } - if features.len() > 0 { + if !features.is_empty() { cargo.arg("--features").arg(&features.join(", ")); } cargo diff --git a/src/ci/run.sh b/src/ci/run.sh index a2c271f0fc895..8e0eb8fec4325 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -40,6 +40,7 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-sccache" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-locked-deps" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-cargo-native-static" +RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.codegen-units-std=1" if [ "$DIST_SRC" = "" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-dist-src" diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index de8f57ee79666..0ee03bc4c6e00 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -251,6 +251,7 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { dir: RelationDir) -> RelateResult<'tcx, Generalization<'tcx>> { + debug!("generalize(ty={:?}, for_vid={:?}, dir={:?}", ty, for_vid, dir); // Determine the ambient variance within which `ty` appears. // The surrounding equation is: // @@ -273,8 +274,15 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { root_ty: ty, }; - let ty = generalize.relate(&ty, &ty)?; + let ty = match generalize.relate(&ty, &ty) { + Ok(ty) => ty, + Err(e) => { + debug!("generalize: failure {:?}", e); + return Err(e); + } + }; let needs_wf = generalize.needs_wf; + debug!("generalize: success {{ {:?}, {:?} }}", ty, needs_wf); Ok(Generalization { ty, needs_wf }) } } diff --git a/src/librustc/infer/equate.rs b/src/librustc/infer/equate.rs index 854960492c9bd..c7b5ddb83410f 100644 --- a/src/librustc/infer/equate.rs +++ b/src/librustc/infer/equate.rs @@ -74,6 +74,9 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> let infcx = self.fields.infcx; let a = infcx.type_variables.borrow_mut().replace_if_possible(a); let b = infcx.type_variables.borrow_mut().replace_if_possible(b); + + debug!("{}.tys: replacements ({:?}, {:?})", self.tag(), a, b); + match (&a.sty, &b.sty) { (&ty::Infer(TyVar(a_id)), &ty::Infer(TyVar(b_id))) => { infcx.type_variables.borrow_mut().equate(a_id, b_id); diff --git a/src/librustc/infer/outlives/obligations.rs b/src/librustc/infer/outlives/obligations.rs index 332859d4f81db..5db850f1588b6 100644 --- a/src/librustc/infer/outlives/obligations.rs +++ b/src/librustc/infer/outlives/obligations.rs @@ -458,7 +458,7 @@ where ); debug!("projection_must_outlive: unique declared bound appears in trait ref"); self.delegate - .push_sub_region_constraint(origin.clone(), region, unique_bound); + .push_sub_region_constraint(origin, region, unique_bound); return; } diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 96278e5140c8a..c82603bf56057 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -749,7 +749,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> { a // LUB(a,a) = a } - _ => self.combine_vars(tcx, Lub, a, b, origin.clone()), + _ => self.combine_vars(tcx, Lub, a, b, origin), } } @@ -771,7 +771,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> { a // GLB(a,a) = a } - _ => self.combine_vars(tcx, Glb, a, b, origin.clone()), + _ => self.combine_vars(tcx, Glb, a, b, origin), } } diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs index 897e9cc2a381f..f21f949c9f5cd 100644 --- a/src/librustc/macros.rs +++ b/src/librustc/macros.rs @@ -83,7 +83,14 @@ macro_rules! __impl_stable_hash_field { macro_rules! impl_stable_hash_for { // FIXME(mark-i-m): Some of these should be `?` rather than `*`. See the git blame and change // them back when `?` is supported again. - (enum $enum_name:path { $( $variant:ident $( ( $($field:ident $(-> $delegate:tt)*),* ) )* ),* $(,)* }) => { + (enum $enum_name:path { + $( $variant:ident + // this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`, + // when it should be only one or the other + $( ( $($field:ident $(-> $delegate:tt)*),* ) )* + $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )* + ),* $(,)* + }) => { impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $enum_name { #[inline] fn hash_stable(&self, @@ -94,8 +101,9 @@ macro_rules! impl_stable_hash_for { match *self { $( - $variant $( ( $(ref $field),* ) )* => { + $variant $( ( $(ref $field),* ) )* $( { $(ref $named_field),* } )* => { $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*)* + $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)*) );*)* } )* } @@ -133,10 +141,11 @@ macro_rules! impl_stable_hash_for { } }; - (impl<$tcx:lifetime $(, $T:ident)*> for struct $struct_name:path { - $($field:ident),* $(,)* + (impl<$tcx:lifetime $(, $lt:lifetime $(: $lt_bound:lifetime)*)* $(, $T:ident)*> for struct $struct_name:path { + $($field:ident $(-> $delegate:tt)*),* $(,)* }) => { - impl<'a, $tcx, $($T,)*> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name + impl<'a, $tcx, $($lt $(: $lt_bound)*,)* $($T,)*> + ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),* { #[inline] @@ -147,7 +156,7 @@ macro_rules! impl_stable_hash_for { $(ref $field),* } = *self; - $( $field.hash_stable(__ctx, __hasher));* + $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );* } } }; diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 9d296e67da1ae..7c3d4713572b9 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -1674,6 +1674,10 @@ impl<'tcx> Statement<'tcx> { /// Changes a statement to a nop. This is both faster than deleting instructions and avoids /// invalidating statement indices in `Location`s. pub fn make_nop(&mut self) { + // `Statement` contributes significantly to peak memory usage. Make + // sure it doesn't get bigger. + static_assert!(STATEMENT_IS_AT_MOST_56_BYTES: mem::size_of::>() <= 56); + self.kind = StatementKind::Nop } @@ -1737,7 +1741,7 @@ pub enum StatementKind<'tcx> { /// - `Contravariant` -- requires that `T_y :> T` /// - `Invariant` -- requires that `T_y == T` /// - `Bivariant` -- no effect - AscribeUserType(Place<'tcx>, ty::Variance, UserTypeAnnotation<'tcx>), + AscribeUserType(Place<'tcx>, ty::Variance, Box>), /// No-op. Useful for deleting instructions without affecting statement indices. Nop, diff --git a/src/librustc/mir/traversal.rs b/src/librustc/mir/traversal.rs index db1bc3e7519c2..a1e2b7a06468d 100644 --- a/src/librustc/mir/traversal.rs +++ b/src/librustc/mir/traversal.rs @@ -34,6 +34,7 @@ pub struct Preorder<'a, 'tcx: 'a> { mir: &'a Mir<'tcx>, visited: BitSet, worklist: Vec, + root_is_start_block: bool, } impl<'a, 'tcx> Preorder<'a, 'tcx> { @@ -44,6 +45,7 @@ impl<'a, 'tcx> Preorder<'a, 'tcx> { mir, visited: BitSet::new_empty(mir.basic_blocks().len()), worklist, + root_is_start_block: root == START_BLOCK, } } } @@ -75,15 +77,19 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> { fn size_hint(&self) -> (usize, Option) { // All the blocks, minus the number of blocks we've visited. - let remaining = self.mir.basic_blocks().len() - self.visited.count(); + let upper = self.mir.basic_blocks().len() - self.visited.count(); - // We will visit all remaining blocks exactly once. - (remaining, Some(remaining)) + let lower = if self.root_is_start_block { + // We will visit all remaining blocks exactly once. + upper + } else { + self.worklist.len() + }; + + (lower, Some(upper)) } } -impl<'a, 'tcx> ExactSizeIterator for Preorder<'a, 'tcx> {} - /// Postorder traversal of a graph. /// /// Postorder traversal is when each node is visited after all of it's @@ -105,7 +111,8 @@ impl<'a, 'tcx> ExactSizeIterator for Preorder<'a, 'tcx> {} pub struct Postorder<'a, 'tcx: 'a> { mir: &'a Mir<'tcx>, visited: BitSet, - visit_stack: Vec<(BasicBlock, Successors<'a>)> + visit_stack: Vec<(BasicBlock, Successors<'a>)>, + root_is_start_block: bool, } impl<'a, 'tcx> Postorder<'a, 'tcx> { @@ -113,7 +120,8 @@ impl<'a, 'tcx> Postorder<'a, 'tcx> { let mut po = Postorder { mir, visited: BitSet::new_empty(mir.basic_blocks().len()), - visit_stack: Vec::new() + visit_stack: Vec::new(), + root_is_start_block: root == START_BLOCK, }; @@ -214,15 +222,19 @@ impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> { fn size_hint(&self) -> (usize, Option) { // All the blocks, minus the number of blocks we've visited. - let remaining = self.mir.basic_blocks().len() - self.visited.count(); + let upper = self.mir.basic_blocks().len() - self.visited.count(); - // We will visit all remaining blocks exactly once. - (remaining, Some(remaining)) + let lower = if self.root_is_start_block { + // We will visit all remaining blocks exactly once. + upper + } else { + self.visit_stack.len() + }; + + (lower, Some(upper)) } } -impl<'a, 'tcx> ExactSizeIterator for Postorder<'a, 'tcx> {} - /// Reverse postorder traversal of a graph /// /// Reverse postorder is the reverse order of a postorder traversal. diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 0b6acb97e82dc..15a0adc3c0692 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -239,7 +239,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let msg = format!("type mismatch resolving `{}`", predicate); let error_id = (DiagnosticMessageId::ErrorId(271), - Some(obligation.cause.span), msg.clone()); + Some(obligation.cause.span), msg); let fresh = self.tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id); if fresh { let mut diag = struct_span_err!( @@ -379,7 +379,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } if let Some(t) = self.get_parent_trait_ref(&obligation.cause.code) { - flags.push(("parent_trait".to_owned(), Some(t.to_string()))); + flags.push(("parent_trait".to_owned(), Some(t))); } if let Some(k) = obligation.cause.span.compiler_desugaring_kind() { diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index b266fbe0d1145..4eda47d31ebb5 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -594,7 +594,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // But for now, let's classify this as an overflow: let recursion_limit = *selcx.tcx().sess.recursion_limit.get(); - let obligation = Obligation::with_depth(cause.clone(), + let obligation = Obligation::with_depth(cause, recursion_limit, param_env, projection_ty); diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 49f3717935493..2ea16823cc65d 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -1522,6 +1522,33 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { .map(|v| v.get(tcx)) } + /// Determines whether can we safely cache the result + /// of selecting an obligation. This is almost always 'true', + /// except when dealing with certain ParamCandidates. + /// + /// Ordinarily, a ParamCandidate will contain no inference variables, + /// since it was usually produced directly from a DefId. However, + /// certain cases (currently only librustdoc's blanket impl finder), + /// a ParamEnv may be explicitly constructed with inference types. + /// When this is the case, we do *not* want to cache the resulting selection + /// candidate. This is due to the fact that it might not always be possible + /// to equate the obligation's trait ref and the candidate's trait ref, + /// if more constraints end up getting added to an inference variable. + /// + /// Because of this, we always want to re-run the full selection + /// process for our obligation the next time we see it, since + /// we might end up picking a different SelectionCandidate (or none at all) + fn can_cache_candidate(&self, + result: &SelectionResult<'tcx, SelectionCandidate<'tcx>> + ) -> bool { + match result { + Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => { + !trait_ref.skip_binder().input_types().any(|t| t.walk().any(|t_| t_.is_ty_infer())) + }, + _ => true + } + } + fn insert_candidate_cache( &mut self, param_env: ty::ParamEnv<'tcx>, @@ -1531,6 +1558,14 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ) { let tcx = self.tcx(); let trait_ref = cache_fresh_trait_pred.skip_binder().trait_ref; + + if !self.can_cache_candidate(&candidate) { + debug!("insert_candidate_cache(trait_ref={:?}, candidate={:?} -\ + candidate is not cacheable", trait_ref, candidate); + return; + + } + if self.can_use_global_caches(param_env) { if let Err(Overflow) = candidate { // Don't cache overflow globally; we only produce this diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 1a9f86306325f..1686e3e0e0c09 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1200,7 +1200,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { cstore, global_arenas: &arenas.global, global_interners: interners, - dep_graph: dep_graph.clone(), + dep_graph, types: common_types, trait_map, export_map: resolutions.export_map.into_iter().map(|(k, v)| { diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index da5c5f47c08ac..bf8d02313035c 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -695,7 +695,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { let mut err = self.cannot_act_on_moved_value(use_span, verb, msg, - Some(nl.to_string()), + Some(nl), Origin::Ast); let need_note = match lp.ty.sty { ty::Closure(id, _) => { diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index d9f223daf6091..8aa2e4641d4f9 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -347,7 +347,7 @@ impl<'a, 'tcx> MoveData<'tcx> { lp = base_lp.clone(); } - self.add_move_helper(tcx, orig_lp.clone(), id, kind); + self.add_move_helper(tcx, orig_lp, id, kind); } fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -359,7 +359,7 @@ impl<'a, 'tcx> MoveData<'tcx> { id, kind); - let path_index = self.move_path(tcx, lp.clone()); + let path_index = self.move_path(tcx, lp); let move_index = MoveIndex(self.moves.borrow().len()); let next_move = self.path_first_move(path_index); @@ -402,7 +402,7 @@ impl<'a, 'tcx> MoveData<'tcx> { } } - self.add_assignment_helper(tcx, lp.clone(), assign_id, span); + self.add_assignment_helper(tcx, lp, assign_id, span); } fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, diff --git a/src/librustc_codegen_llvm/base.rs b/src/librustc_codegen_llvm/base.rs index 614a562846e86..a9119d49e8b20 100644 --- a/src/librustc_codegen_llvm/base.rs +++ b/src/librustc_codegen_llvm/base.rs @@ -784,7 +784,7 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, !tcx.sess.opts.output_types.should_codegen() { let ongoing_codegen = write::start_async_codegen( tcx, - time_graph.clone(), + time_graph, metadata, rx, 1); diff --git a/src/librustc_codegen_utils/link.rs b/src/librustc_codegen_utils/link.rs index 2a1fbe6ace541..66e98793f420d 100644 --- a/src/librustc_codegen_utils/link.rs +++ b/src/librustc_codegen_utils/link.rs @@ -138,7 +138,7 @@ pub fn filename_for_input(sess: &Session, let suffix = &sess.target.target.options.exe_suffix; let out_filename = outputs.path(OutputType::Exe); if suffix.is_empty() { - out_filename.to_path_buf() + out_filename } else { out_filename.with_extension(&suffix[1..]) } diff --git a/src/librustc_mir/borrow_check/error_reporting.rs b/src/librustc_mir/borrow_check/error_reporting.rs index b6bb130d3e27f..b2b92a6f85784 100644 --- a/src/librustc_mir/borrow_check/error_reporting.rs +++ b/src/librustc_mir/borrow_check/error_reporting.rs @@ -1615,7 +1615,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { ProjectionElem::Index(..) | ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => { - self.describe_field(&proj.base, field).to_string() + self.describe_field(&proj.base, field) } }, } diff --git a/src/librustc_mir/borrow_check/nll/type_check/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/mod.rs index bbbe02fda6a84..ff4651dfb45cd 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/mod.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/mod.rs @@ -1225,7 +1225,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { ); }; } - StatementKind::AscribeUserType(ref place, variance, c_ty) => { + StatementKind::AscribeUserType(ref place, variance, box c_ty) => { let place_ty = place.ty(mir, tcx).to_ty(tcx); if let Err(terr) = self.relate_type_and_user_type( place_ty, diff --git a/src/librustc_mir/build/expr/as_place.rs b/src/librustc_mir/build/expr/as_place.rs index 820822b7f5bbd..8d6b6bb5c74f1 100644 --- a/src/librustc_mir/build/expr/as_place.rs +++ b/src/librustc_mir/build/expr/as_place.rs @@ -147,7 +147,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { kind: StatementKind::AscribeUserType( place.clone(), Variance::Invariant, - user_ty, + box user_ty, ), }, ); @@ -167,7 +167,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { kind: StatementKind::AscribeUserType( Place::Local(temp.clone()), Variance::Invariant, - user_ty, + box user_ty, ), }, ); diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index a864b39e15791..2f271e072c6e3 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -265,7 +265,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { block, Statement { source_info, - kind: StatementKind::FakeRead(FakeReadCause::ForLet, place.clone()), + kind: StatementKind::FakeRead(FakeReadCause::ForLet, place), }, ); @@ -314,9 +314,9 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { Statement { source_info: ty_source_info, kind: StatementKind::AscribeUserType( - place.clone(), + place, ty::Variance::Invariant, - ascription_user_ty, + box ascription_user_ty, ), }, ); @@ -1323,7 +1323,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { kind: StatementKind::AscribeUserType( ascription.source.clone(), ty::Variance::Covariant, - ascription.user_ty, + box ascription.user_ty, ), }, ); diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index c7da9c4fbd792..9e6f32909bdea 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -324,7 +324,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let ref_ty = self.hir.tcx().mk_ref(region, tam); // let lhs_ref_place = &lhs; - let ref_rvalue = Rvalue::Ref(region, BorrowKind::Shared, place.clone()); + let ref_rvalue = Rvalue::Ref(region, BorrowKind::Shared, place); let lhs_ref_place = self.temp(ref_ty, test.span); self.cfg.push_assign(block, source_info, &lhs_ref_place, ref_rvalue); let val = Operand::Move(lhs_ref_place); diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index e6dd0107e9198..6ea4628de24e9 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -644,7 +644,7 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, }).collect() }); - let mut builder = Builder::new(hir.clone(), + let mut builder = Builder::new(hir, span, arguments.len(), safety, @@ -714,7 +714,7 @@ fn construct_const<'a, 'gcx, 'tcx>( let ty = hir.tables().expr_ty_adjusted(ast_expr); let owner_id = tcx.hir.body_owner(body_id); let span = tcx.hir.span(owner_id); - let mut builder = Builder::new(hir.clone(), span, 0, Safety::Safe, ty, ty_span,vec![]); + let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, ty_span,vec![]); let mut block = START_BLOCK; let expr = builder.hir.mirror(ast_expr); diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index 77483ad184ba6..d53bb1dc4d63a 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -931,12 +931,37 @@ impl<'tcx> IntRange<'tcx> { } } -// Return a set of constructors equivalent to `all_ctors \ used_ctors`. +// A request for missing constructor data in terms of either: +// - whether or not there any missing constructors; or +// - the actual set of missing constructors. +#[derive(PartialEq)] +enum MissingCtorsInfo { + Emptiness, + Ctors, +} + +// Used by `compute_missing_ctors`. +#[derive(Debug, PartialEq)] +enum MissingCtors<'tcx> { + Empty, + NonEmpty, + + // Note that the Vec can be empty. + Ctors(Vec>), +} + +// When `info` is `MissingCtorsInfo::Ctors`, compute a set of constructors +// equivalent to `all_ctors \ used_ctors`. When `info` is +// `MissingCtorsInfo::Emptiness`, just determines if that set is empty or not. +// (The split logic gives a performance win, because we always need to know if +// the set is empty, but we rarely need the full set, and it can be expensive +// to compute the full set.) fn compute_missing_ctors<'a, 'tcx: 'a>( + info: MissingCtorsInfo, tcx: TyCtxt<'a, 'tcx, 'tcx>, all_ctors: &Vec>, used_ctors: &Vec>, -) -> Vec> { +) -> MissingCtors<'tcx> { let mut missing_ctors = vec![]; for req_ctor in all_ctors { @@ -965,10 +990,22 @@ fn compute_missing_ctors<'a, 'tcx: 'a>( // We add `refined_ctors` instead of `req_ctor`, because then we can // provide more detailed error information about precisely which // ranges have been omitted. - missing_ctors.extend(refined_ctors); + if info == MissingCtorsInfo::Emptiness { + if !refined_ctors.is_empty() { + // The set is non-empty; return early. + return MissingCtors::NonEmpty; + } + } else { + missing_ctors.extend(refined_ctors); + } } - missing_ctors + if info == MissingCtorsInfo::Emptiness { + // If we reached here, the set is empty. + MissingCtors::Empty + } else { + MissingCtors::Ctors(missing_ctors) + } } /// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html @@ -1081,20 +1118,23 @@ pub fn is_useful<'p, 'a: 'p, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>, // feature flag is not present, so this is only // needed for that case. - // Find those constructors that are not matched by any non-wildcard patterns in the - // current column. - let missing_ctors = compute_missing_ctors(cx.tcx, &all_ctors, &used_ctors); + // Missing constructors are those that are not matched by any + // non-wildcard patterns in the current column. We always determine if + // the set is empty, but we only fully construct them on-demand, + // because they're rarely used and can be big. + let cheap_missing_ctors = + compute_missing_ctors(MissingCtorsInfo::Emptiness, cx.tcx, &all_ctors, &used_ctors); let is_privately_empty = all_ctors.is_empty() && !cx.is_uninhabited(pcx.ty); let is_declared_nonexhaustive = cx.is_non_exhaustive_enum(pcx.ty) && !cx.is_local(pcx.ty); - debug!("missing_ctors={:#?} is_privately_empty={:#?} is_declared_nonexhaustive={:#?}", - missing_ctors, is_privately_empty, is_declared_nonexhaustive); + debug!("cheap_missing_ctors={:#?} is_privately_empty={:#?} is_declared_nonexhaustive={:#?}", + cheap_missing_ctors, is_privately_empty, is_declared_nonexhaustive); // For privately empty and non-exhaustive enums, we work as if there were an "extra" // `_` constructor for the type, so we can never match over all constructors. let is_non_exhaustive = is_privately_empty || is_declared_nonexhaustive; - if missing_ctors.is_empty() && !is_non_exhaustive { + if cheap_missing_ctors == MissingCtors::Empty && !is_non_exhaustive { split_grouped_constructors(cx.tcx, all_ctors, matrix, pcx.ty).into_iter().map(|c| { is_useful_specialized(cx, matrix, v, c, pcx.ty, witness) }).find(|result| result.is_useful()).unwrap_or(NotUseful) @@ -1165,15 +1205,22 @@ pub fn is_useful<'p, 'a: 'p, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>, witness }).collect() } else { - pats.into_iter().flat_map(|witness| { - missing_ctors.iter().map(move |ctor| { - // Extends the witness with a "wild" version of this - // constructor, that matches everything that can be built with - // it. For example, if `ctor` is a `Constructor::Variant` for - // `Option::Some`, this pushes the witness for `Some(_)`. - witness.clone().push_wild_constructor(cx, ctor, pcx.ty) - }) - }).collect() + let expensive_missing_ctors = + compute_missing_ctors(MissingCtorsInfo::Ctors, cx.tcx, &all_ctors, + &used_ctors); + if let MissingCtors::Ctors(missing_ctors) = expensive_missing_ctors { + pats.into_iter().flat_map(|witness| { + missing_ctors.iter().map(move |ctor| { + // Extends the witness with a "wild" version of this + // constructor, that matches everything that can be built with + // it. For example, if `ctor` is a `Constructor::Variant` for + // `Option::Some`, this pushes the witness for `Some(_)`. + witness.clone().push_wild_constructor(cx, ctor, pcx.ty) + }) + }).collect() + } else { + bug!("cheap missing ctors") + } }; UsefulWithWitness(new_witnesses) } diff --git a/src/librustc_mir/interpret/snapshot.rs b/src/librustc_mir/interpret/snapshot.rs index 047a0125f78af..cff2288fd8720 100644 --- a/src/librustc_mir/interpret/snapshot.rs +++ b/src/librustc_mir/interpret/snapshot.rs @@ -6,9 +6,8 @@ // it is not used by the general miri engine, just by CTFE. use std::hash::{Hash, Hasher}; -use std::mem; -use rustc::ich::{StableHashingContext, StableHashingContextProvider}; +use rustc::ich::StableHashingContextProvider; use rustc::mir; use rustc::mir::interpret::{ AllocId, Pointer, Scalar, @@ -20,7 +19,7 @@ use rustc::ty::{self, TyCtxt}; use rustc::ty::layout::Align; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::indexed_vec::IndexVec; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use syntax::ast::Mutability; use syntax::source_map::Span; @@ -217,23 +216,10 @@ impl_snapshot_for!(struct MemPlace { align -> *align, // just copy alignment verbatim }); -// Can't use the macro here because that does not support named enum fields. -impl<'a> HashStable> for Place { - fn hash_stable( - &self, hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) - { - mem::discriminant(self).hash_stable(hcx, hasher); - match self { - Place::Ptr(mem_place) => mem_place.hash_stable(hcx, hasher), - - Place::Local { frame, local } => { - frame.hash_stable(hcx, hasher); - local.hash_stable(hcx, hasher); - }, - } - } -} +impl_stable_hash_for!(enum ::interpret::Place { + Ptr(mem_place), + Local { frame, local }, +}); impl<'a, Ctx> Snapshot<'a, Ctx> for Place where Ctx: SnapshotContext<'a>, { @@ -317,20 +303,10 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation } } -// Can't use the macro here because that does not support named enum fields. -impl<'a> HashStable> for StackPopCleanup { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) - { - mem::discriminant(self).hash_stable(hcx, hasher); - match self { - StackPopCleanup::Goto(ref block) => block.hash_stable(hcx, hasher), - StackPopCleanup::None { cleanup } => cleanup.hash_stable(hcx, hasher), - } - } -} +impl_stable_hash_for!(enum ::interpret::eval_context::StackPopCleanup { + Goto(block), + None { cleanup }, +}); #[derive(Eq, PartialEq)] struct FrameSnapshot<'a, 'tcx: 'a> { @@ -343,28 +319,17 @@ struct FrameSnapshot<'a, 'tcx: 'a> { stmt: usize, } -// Not using the macro because that does not support types depending on two lifetimes -impl<'a, 'mir, 'tcx: 'mir> HashStable> for Frame<'mir, 'tcx> { - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - - let Frame { - mir, - instance, - span, - return_to_block, - return_place, - locals, - block, - stmt, - } = self; +impl_stable_hash_for!(impl<'tcx, 'mir: 'tcx> for struct Frame<'mir, 'tcx> { + mir, + instance, + span, + return_to_block, + return_place -> (return_place.as_ref().map(|r| &**r)), + locals, + block, + stmt, +}); - (mir, instance, span, return_to_block).hash_stable(hcx, hasher); - (return_place.as_ref().map(|r| &**r), locals, block, stmt).hash_stable(hcx, hasher); - } -} impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx> where Ctx: SnapshotContext<'a>, { @@ -443,21 +408,11 @@ impl<'a, 'mir, 'tcx> Hash for EvalSnapshot<'a, 'mir, 'tcx> } } -// Not using the macro because we need special handling for `memory`, which the macro -// does not support at the same time as the extra bounds on the type. -impl<'a, 'b, 'mir, 'tcx> HashStable> - for EvalSnapshot<'a, 'mir, 'tcx> -{ - fn hash_stable( - &self, - hcx: &mut StableHashingContext<'b>, - hasher: &mut StableHasher) - { - // Not hashing memory: Avoid hashing memory all the time during execution - let EvalSnapshot{ memory: _, stack } = self; - stack.hash_stable(hcx, hasher); - } -} +impl_stable_hash_for!(impl<'tcx, 'b, 'mir> for struct EvalSnapshot<'b, 'mir, 'tcx> { + // Not hashing memory: Avoid hashing memory all the time during execution + memory -> _, + stack, +}); impl<'a, 'mir, 'tcx> Eq for EvalSnapshot<'a, 'mir, 'tcx> {} diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs index ac1ba0edc3b3b..25e2ff6edb763 100644 --- a/src/librustc_mir/interpret/validity.rs +++ b/src/librustc_mir/interpret/validity.rs @@ -463,7 +463,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> // Validate all fields of compound data structures let path_len = path.len(); // Remember the length, in case we need to truncate match dest.layout.fields { - layout::FieldPlacement::Union(..) => { + layout::FieldPlacement::Union(fields) => { + // Empty unions are not accepted by rustc. That's great, it means we can + // use that as an unambiguous signal for detecting primitives. Make sure + // we did not miss any primitive. + debug_assert!(fields > 0); // We can't check unions, their bits are allowed to be anything. // The fields don't need to correspond to any bit pattern of the union's fields. // See https://github.com/rust-lang/rust/issues/32836#issuecomment-406875389 diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index 4b26094b9fc28..d8f627fcf4dd7 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -547,7 +547,7 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { // `dest[i] = Clone::clone(src[beg])`; // Goto #3 if ok, #5 if unwinding happens. let dest_field = dest.clone().index(beg); - let src_field = src.clone().index(beg); + let src_field = src.index(beg); self.make_clone_call(dest_field, src_field, ty, BasicBlock::new(3), BasicBlock::new(5)); diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs index f4efe33da7080..5114fa879746b 100644 --- a/src/librustc_mir/transform/promote_consts.rs +++ b/src/librustc_mir/transform/promote_consts.rs @@ -333,6 +333,14 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { let operand = Operand::Copy(promoted_place(ty, span)); mem::replace(&mut args[index], operand) } + // We expected a `TerminatorKind::Call` for which we'd like to promote an + // argument. `qualify_consts` saw a `TerminatorKind::Call` here, but + // we are seeing a `Goto`. That means that the `promote_temps` method + // already promoted this call away entirely. This case occurs when calling + // a function requiring a constant argument and as that constant value + // providing a value whose computation contains another call to a function + // requiring a constant argument. + TerminatorKind::Goto { .. } => return, _ => bug!() } } diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index 78547abf9d9d3..c4d8ec3586883 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -820,7 +820,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { let fn_ty = func.ty(self.mir, self.tcx); let mut callee_def_id = None; - let (mut is_shuffle, mut is_const_fn) = (false, false); + let mut is_shuffle = false; + let mut is_const_fn = false; + let mut is_promotable_const_fn = false; if let ty::FnDef(def_id, _) = fn_ty.sty { callee_def_id = Some(def_id); match self.tcx.fn_sig(def_id).abi() { @@ -881,6 +883,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { // functions without #[rustc_promotable] if self.tcx.is_promotable_const_fn(def_id) { is_const_fn = true; + is_promotable_const_fn = true; + } else if self.tcx.is_const_fn(def_id) { + is_const_fn = true; } } else { // stable const fn or unstable const fns with their feature gate @@ -982,7 +987,17 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { if !constant_arguments.contains(&i) { return } - if this.qualif.is_empty() { + // Since the argument is required to be constant, + // we care about constness, not promotability. + // If we checked for promotability, we'd miss out on + // the results of function calls (which are never promoted + // in runtime code) + // This is not a problem, because the argument explicitly + // requests constness, in contrast to regular promotion + // which happens even without the user requesting it. + // We can error out with a hard error if the argument is not + // constant here. + if (this.qualif - Qualif::NOT_PROMOTABLE).is_empty() { this.promotion_candidates.push(candidate); } else { this.tcx.sess.span_err(this.span, @@ -1011,7 +1026,11 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { // Be conservative about the returned value of a const fn. let tcx = self.tcx; let ty = dest.ty(self.mir, tcx).to_ty(tcx); - self.qualif = Qualif::empty(); + if is_const_fn && !is_promotable_const_fn && self.mode == Mode::Fn { + self.qualif = Qualif::NOT_PROMOTABLE; + } else { + self.qualif = Qualif::empty(); + } self.add_type(ty); } self.assign(dest, location); diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index 1dca367ffdf5f..d9a1e4a0fd639 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -753,11 +753,11 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> self.place.clone() ))); drop_block_stmts.push(self.assign(&cur, Rvalue::Cast( - CastKind::Misc, Operand::Move(tmp.clone()), iter_ty + CastKind::Misc, Operand::Move(tmp), iter_ty ))); drop_block_stmts.push(self.assign(&length_or_end, Rvalue::BinaryOp(BinOp::Offset, - Operand::Copy(cur.clone()), Operand::Move(length.clone()) + Operand::Copy(cur), Operand::Move(length) ))); } else { // index = 0 (length already pushed) diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index b7ed3ef59b4c8..a43a3c6ac5a60 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -218,7 +218,7 @@ impl<'a, 'cl> Resolver<'a, 'cl> { }; this.add_import_directive( base.into_iter().collect(), - subclass.clone(), + subclass, source.ident.span, id, root_use_tree.span, diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index a7fe1bb421c37..c0b718e4863fc 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -105,7 +105,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { tcx: save_ctxt.tcx, save_ctxt, dumper, - span: span_utils.clone(), + span: span_utils, cur_scope: CRATE_NODE_ID, // mac_defs: FxHashSet::default(), macro_calls: FxHashSet::default(), diff --git a/src/librustc_target/spec/mod.rs b/src/librustc_target/spec/mod.rs index 9ee4582fabf7b..d43d45f64a55c 100644 --- a/src/librustc_target/spec/mod.rs +++ b/src/librustc_target/spec/mod.rs @@ -1132,7 +1132,7 @@ impl ToJson for Target { macro_rules! target_val { ($attr:ident) => ( { let name = (stringify!($attr)).replace("_", "-"); - d.insert(name.to_string(), self.$attr.to_json()); + d.insert(name, self.$attr.to_json()); } ); ($attr:ident, $key_name:expr) => ( { let name = $key_name; @@ -1144,7 +1144,7 @@ impl ToJson for Target { ($attr:ident) => ( { let name = (stringify!($attr)).replace("_", "-"); if default.$attr != self.options.$attr { - d.insert(name.to_string(), self.options.$attr.to_json()); + d.insert(name, self.options.$attr.to_json()); } } ); ($attr:ident, $key_name:expr) => ( { @@ -1160,7 +1160,7 @@ impl ToJson for Target { .iter() .map(|(k, v)| (k.desc().to_owned(), v.clone())) .collect::>(); - d.insert(name.to_string(), obj.to_json()); + d.insert(name, obj.to_json()); } } ); (env - $attr:ident) => ( { @@ -1170,7 +1170,7 @@ impl ToJson for Target { .iter() .map(|&(ref k, ref v)| k.clone() + "=" + &v) .collect::>(); - d.insert(name.to_string(), obj.to_json()); + d.insert(name, obj.to_json()); } } ); diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs index 743a2390ec43d..73489309d0742 100644 --- a/src/librustc_typeck/check/autoderef.rs +++ b/src/librustc_typeck/check/autoderef.rs @@ -61,7 +61,7 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> { let suggested_limit = *tcx.sess.recursion_limit.get() * 2; let msg = format!("reached the recursion limit while auto-dereferencing {:?}", self.cur_ty); - let error_id = (DiagnosticMessageId::ErrorId(55), Some(self.span), msg.clone()); + let error_id = (DiagnosticMessageId::ErrorId(55), Some(self.span), msg); let fresh = tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id); if fresh { struct_span_err!(tcx.sess, diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index 967c710ac34a1..3bdd038bff19c 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -61,7 +61,7 @@ //! we may want to adjust precisely when coercions occur. use check::{FnCtxt, Needs}; - +use errors::DiagnosticBuilder; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::infer::{Coercion, InferResult, InferOk}; @@ -72,14 +72,12 @@ use rustc::ty::{self, TypeAndMut, Ty, ClosureSubsts}; use rustc::ty::fold::TypeFoldable; use rustc::ty::error::TypeError; use rustc::ty::relate::RelateResult; -use errors::DiagnosticBuilder; +use smallvec::{smallvec, SmallVec}; +use std::ops::Deref; use syntax::feature_gate; use syntax::ptr::P; use syntax_pos; -use std::collections::VecDeque; -use std::ops::Deref; - struct Coerce<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, cause: ObligationCause<'tcx>, @@ -536,18 +534,23 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { let mut selcx = traits::SelectionContext::new(self); - // Use a FIFO queue for this custom fulfillment procedure. (The maximum - // length is almost always 1.) - let mut queue = VecDeque::with_capacity(1); - // Create an obligation for `Source: CoerceUnsized`. let cause = ObligationCause::misc(self.cause.span, self.body_id); - queue.push_back(self.tcx.predicate_for_trait_def(self.fcx.param_env, - cause, - coerce_unsized_did, - 0, - coerce_source, - &[coerce_target.into()])); + + // Use a FIFO queue for this custom fulfillment procedure. + // + // A Vec (or SmallVec) is not a natural choice for a queue. However, + // this code path is hot, and this queue usually has a max length of 1 + // and almost never more than 3. By using a SmallVec we avoid an + // allocation, at the (very small) cost of (occasionally) having to + // shift subsequent elements down when removing the front element. + let mut queue: SmallVec<[_; 4]> = + smallvec![self.tcx.predicate_for_trait_def(self.fcx.param_env, + cause, + coerce_unsized_did, + 0, + coerce_source, + &[coerce_target.into()])]; let mut has_unsized_tuple_coercion = false; @@ -555,7 +558,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { // emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where // inference might unify those two inner type variables later. let traits = [coerce_unsized_did, unsize_did]; - while let Some(obligation) = queue.pop_front() { + while !queue.is_empty() { + let obligation = queue.remove(0); debug!("coerce_unsized resolve step: {:?}", obligation); let trait_ref = match obligation.predicate { ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => { diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 695812faafff1..54c6c8f7b9322 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -325,7 +325,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, diag.span_suggestion_with_applicability( impl_err_span, "consider change the type to match the mutability in trait", - trait_err_str.to_string(), + trait_err_str, Applicability::MachineApplicable, ); } diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index f4538dbd25e2f..c506f23078f25 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -1157,7 +1157,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // Convert the bounds into obligations. let impl_obligations = traits::predicates_for_generics( - cause.clone(), self.param_env, &impl_bounds); + cause, self.param_env, &impl_bounds); debug!("impl_obligations={:?}", impl_obligations); impl_obligations.into_iter() @@ -1175,7 +1175,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { TraitCandidate(trait_ref) => { let predicate = trait_ref.to_predicate(); let obligation = - traits::Obligation::new(cause.clone(), self.param_env, predicate); + traits::Obligation::new(cause, self.param_env, predicate); if !self.predicate_may_hold(&obligation) { if self.probe(|_| self.select_trait_candidate(trait_ref).is_err()) { // This candidate's primary obligation doesn't even diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 1e18deb2a7d0d..845631475aeaf 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -4745,25 +4745,17 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } else if !self.check_for_cast(err, expr, found, expected) { let methods = self.get_conversion_methods(expr.span, expected, found); if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) { - let suggestions = iter::repeat(expr_text).zip(methods.iter()) + let suggestions = iter::repeat(&expr_text).zip(methods.iter()) .filter_map(|(receiver, method)| { let method_call = format!(".{}()", method.ident); if receiver.ends_with(&method_call) { None // do not suggest code that is already there (#53348) } else { - /* - methods defined in `method_call_list` will overwrite - `.clone()` in copy of `receiver` - */ let method_call_list = [".to_vec()", ".to_string()"]; if receiver.ends_with(".clone()") - && method_call_list.contains(&method_call.as_str()){ - // created copy of `receiver` because we don't want other - // suggestion to get affected - let mut new_receiver = receiver.clone(); - let max_len = new_receiver.rfind(".").unwrap(); - new_receiver.truncate(max_len); - Some(format!("{}{}", new_receiver, method_call)) + && method_call_list.contains(&method_call.as_str()) { + let max_len = receiver.rfind(".").unwrap(); + Some(format!("{}{}", &receiver[..max_len], method_call)) } else { Some(format!("{}{}", receiver, method_call)) diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs index 26487605bac67..fde8648c0c40a 100644 --- a/src/librustdoc/clean/auto_trait.rs +++ b/src/librustdoc/clean/auto_trait.rs @@ -84,7 +84,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> { .into_iter() .chain(self.get_auto_trait_impl_for( def_id, - name.clone(), + name, generics.clone(), def_ctor, tcx.require_lang_item(lang_items::SyncTraitLangItem), diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs index b6bc8d603d5ac..8246c7bab27af 100644 --- a/src/librustdoc/clean/blanket_impl.rs +++ b/src/librustdoc/clean/blanket_impl.rs @@ -50,6 +50,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> { name: Option, ) -> Vec where F: Fn(DefId) -> Def { + debug!("get_blanket_impls(def_id={:?}, ...)", def_id); let mut impls = Vec::new(); if self.cx .tcx @@ -66,7 +67,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> { } let ty = self.cx.tcx.type_of(def_id); let generics = self.cx.tcx.generics_of(def_id); - let real_name = name.clone().map(|name| Ident::from_str(&name)); + let real_name = name.map(|name| Ident::from_str(&name)); let param_env = self.cx.tcx.param_env(def_id); for &trait_def_id in self.cx.all_traits.iter() { if !self.cx.renderinfo.borrow().access_levels.is_doc_reachable(trait_def_id) || @@ -78,6 +79,8 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> { } self.cx.tcx.for_each_relevant_impl(trait_def_id, ty, |impl_def_id| { self.cx.tcx.infer_ctxt().enter(|infcx| { + debug!("get_blanet_impls: Considering impl for trait '{:?}' {:?}", + trait_def_id, impl_def_id); let t_generics = infcx.tcx.generics_of(impl_def_id); let trait_ref = infcx.tcx.impl_trait_ref(impl_def_id) .expect("Cannot get impl trait"); @@ -104,12 +107,12 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> { drop(obligations); debug!( - "invoking predicate_may_hold: {:?}", - trait_ref, + "invoking predicate_may_hold: param_env={:?}, trait_ref={:?}, ty={:?}", + param_env, trait_ref, ty ); let may_apply = match infcx.evaluate_obligation( &traits::Obligation::new( - cause.clone(), + cause, param_env, trait_ref.to_predicate(), ), @@ -117,6 +120,10 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> { Ok(eval_result) => eval_result.may_apply(), Err(traits::OverflowError) => true, // overflow doesn't mean yes *or* no }; + debug!("get_blanket_impls: found applicable impl: {}\ + for trait_ref={:?}, ty={:?}", + may_apply, trait_ref, ty); + if !may_apply { return } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 9b2720717c9b3..88240e844edc2 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -3611,7 +3611,7 @@ impl ToSource for syntax_pos::Span { fn to_src(&self, cx: &DocContext) -> String { debug!("converting span {:?} to snippet", self.clean(cx)); let sn = match cx.sess().source_map().span_to_snippet(*self) { - Ok(x) => x.to_string(), + Ok(x) => x, Err(_) => String::new() }; debug!("got snippet {}", sn); diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 87b4527a2a739..6522261fe1e7c 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -28,9 +28,12 @@ use syntax::parse; use syntax_pos::{Span, FileName}; /// Highlights `src`, returning the HTML output. -pub fn render_with_highlighting(src: &str, class: Option<&str>, - extension: Option<&str>, - tooltip: Option<(&str, &str)>) -> String { +pub fn render_with_highlighting( + src: &str, + class: Option<&str>, + extension: Option<&str>, + tooltip: Option<(&str, &str)>, +) -> String { debug!("highlighting: ================\n{}\n==============", src); let sess = parse::ParseSess::new(FilePathMapping::empty()); let fm = sess.source_map().new_source_file(FileName::Custom("stdin".to_string()), @@ -384,9 +387,9 @@ impl Class { } fn write_header(class: Option<&str>, out: &mut dyn Write) -> io::Result<()> { - write!(out, "
\n", class.unwrap_or(""))
+    write!(out, "
\n", class.unwrap_or(""))
 }
 
 fn write_footer(out: &mut dyn Write) -> io::Result<()> {
-    write!(out, "
\n") + write!(out, "
\n") } diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index dda0f37c3f95b..8b35ede4a02c0 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -521,7 +521,7 @@ pub fn run(mut krate: clean::Crate, external_html: external_html.clone(), krate: krate.name.clone(), }, - css_file_extension: css_file_extension.clone(), + css_file_extension, created_dirs: Default::default(), sort_modules_alphabetically, themes, @@ -1343,7 +1343,7 @@ impl DocFolder for Cache { self.search_index.push(IndexItem { ty: item.type_(), name: s.to_string(), - path: path.join("::").to_string(), + path: path.join("::"), desc: plain_summary_line(item.doc_value()), parent, parent_idx: None, @@ -1709,6 +1709,7 @@ impl<'a> Settings<'a> { ("method-docs", "Auto-hide item methods' documentation", false), ("go-to-only-result", "Directly go to item in search if there is only one result", false), + ("line-numbers", "Show line numbers on code examples", false), ], root_path, suffix, @@ -2283,7 +2284,7 @@ fn document_short(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, link format!("{} [Read more]({})", &plain_summary_line(Some(s)), naive_assoc_href(item, link)) } else { - plain_summary_line(Some(s)).to_string() + plain_summary_line(Some(s)) }; render_markdown(w, cx, &markdown, item.links(), prefix)?; } else if !prefix.is_empty() { @@ -2435,7 +2436,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, // (which is the position in the vector). indices.dedup_by_key(|i| (items[*i].def_id, if items[*i].name.as_ref().is_some() { - Some(full_path(cx, &items[*i]).clone()) + Some(full_path(cx, &items[*i])) } else { None }, diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index 6307dda454da8..9d1a5c3837830 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -2093,6 +2093,7 @@ return wrapper; } + var hideItemDeclarations = getCurrentValue('rustdoc-item-declarations') === "false"; onEach(document.getElementsByClassName('docblock'), function(e) { if (hasClass(e, 'autohide')) { var wrap = e.previousElementSibling; @@ -2116,16 +2117,14 @@ } } if (e.parentNode.id === "main") { - var otherMessage; + var otherMessage = ''; var fontSize; var extraClass; - var show = true; if (hasClass(e, "type-decl")) { fontSize = "20px"; otherMessage = ' Show declaration'; - show = getCurrentValue('rustdoc-item-declarations') === "false"; - if (!show) { + if (hideItemDeclarations === false) { extraClass = 'collapsed'; } } else if (hasClass(e, "non-exhaustive")) { @@ -2142,8 +2141,12 @@ extraClass = "marg-left"; } - e.parentNode.insertBefore(createToggle(otherMessage, fontSize, extraClass, show), e); - if (otherMessage && show) { + e.parentNode.insertBefore(createToggle(otherMessage, + fontSize, + extraClass, + hideItemDeclarations), + e); + if (otherMessage.length > 0 && hideItemDeclarations === true) { collapseDocs(e.previousSibling.childNodes[0], "toggle"); } } @@ -2186,13 +2189,33 @@ }); } + // To avoid checking on "rustdoc-item-attributes" value on every loop... + var itemAttributesFunc = function() {}; + if (getCurrentValue("rustdoc-item-attributes") !== "false") { + itemAttributesFunc = function(x) { + collapseDocs(x.previousSibling.childNodes[0], "toggle"); + }; + } onEach(document.getElementById('main').getElementsByClassName('attributes'), function(i_e) { i_e.parentNode.insertBefore(createToggleWrapper(toggle.cloneNode(true)), i_e); - if (getCurrentValue("rustdoc-item-attributes") !== "false") { - collapseDocs(i_e.previousSibling.childNodes[0], "toggle"); - } + itemAttributesFunc(i_e); }); + // To avoid checking on "rustdoc-line-numbers" value on every loop... + var lineNumbersFunc = function() {}; + if (getCurrentValue("rustdoc-line-numbers") === "true") { + lineNumbersFunc = function(x) { + var count = x.textContent.split('\n').length; + var elems = []; + for (var i = 0; i < count; ++i) { + elems.push(i + 1); + } + var node = document.createElement('pre'); + addClass(node, 'line-number'); + node.innerHTML = elems.join('\n'); + x.parentNode.insertBefore(node, x); + }; + } onEach(document.getElementsByClassName('rust-example-rendered'), function(e) { if (hasClass(e, 'compile_fail')) { e.addEventListener("mouseover", function(event) { @@ -2209,6 +2232,7 @@ e.previousElementSibling.childNodes[0].style.color = ''; }); } + lineNumbersFunc(e); }); function showModal(content) { diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index 63740ea729131..c0951d9f8401e 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -282,6 +282,24 @@ nav.sub { padding-left: 0; } +.example-wrap { + display: inline-flex; + width: 100%; +} + +.example-wrap > pre.line-number { + overflow: initial; + border: 1px solid; + border-top-left-radius: 5px; + border-bottom-left-radius: 5px; + padding: 13px 8px; + text-align: right; +} + +.example-wrap > pre.rust { + width: 100%; +} + #search { margin-left: 230px; position: relative; diff --git a/src/librustdoc/html/static/themes/dark.css b/src/librustdoc/html/static/themes/dark.css index 8e4890d905817..4a8950b236c62 100644 --- a/src/librustdoc/html/static/themes/dark.css +++ b/src/librustdoc/html/static/themes/dark.css @@ -233,6 +233,10 @@ pre.rust .question-mark { color: #ff9011; } +.example-wrap > pre.line-number { + border-color: #4a4949; +} + a.test-arrow { background-color: rgba(78, 139, 202, 0.2); } diff --git a/src/librustdoc/html/static/themes/light.css b/src/librustdoc/html/static/themes/light.css index 2742faab017bf..b3b0b6b2ea9e8 100644 --- a/src/librustdoc/html/static/themes/light.css +++ b/src/librustdoc/html/static/themes/light.css @@ -227,6 +227,10 @@ pre.rust .question-mark { color: #ff9011; } +.example-wrap > pre.line-number { + border-color: #c7c7c7; +} + a.test-arrow { background-color: rgba(78, 139, 202, 0.2); } diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 406180c09e8e4..45a0494849bfa 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -460,7 +460,7 @@ fn main_args(args: &[String]) -> isize { let externs = match parse_externs(&matches) { Ok(ex) => ex, Err(err) => { - diag.struct_err(&err.to_string()).emit(); + diag.struct_err(&err).emit(); return 1; } }; diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 85cd0f84e1cb0..2e6e76b5a4039 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -208,7 +208,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize, // needs to be the result of SourceMap::span_to_unmapped_path let input = config::Input::Str { name: filename.to_owned(), - input: test.to_owned(), + input: test, }; let outputs = OutputTypes::new(&[(OutputType::Exe, None)]); @@ -350,7 +350,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize, let newpath = { let path = env::var_os(var).unwrap_or(OsString::new()); let mut path = env::split_paths(&path).collect::>(); - path.insert(0, libdir.clone()); + path.insert(0, libdir); env::join_paths(path).unwrap() }; cmd.env(var, &newpath); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 5bf1a7dd663cc..1701c8da2c5bd 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -247,8 +247,13 @@ impl AttrProcMacro for F /// Represents a thing that maps token trees to Macro Results pub trait TTMacroExpander { - fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) - -> Box; + fn expand<'cx>( + &self, + ecx: &'cx mut ExtCtxt, + span: Span, + input: TokenStream, + def_span: Option, + ) -> Box; } pub type MacroExpanderFn = @@ -259,8 +264,13 @@ impl TTMacroExpander for F where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box { - fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) - -> Box { + fn expand<'cx>( + &self, + ecx: &'cx mut ExtCtxt, + span: Span, + input: TokenStream, + _def_span: Option, + ) -> Box { struct AvoidInterpolatedIdents; impl Folder for AvoidInterpolatedIdents { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 9e06384f5a804..33b651e1b3854 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -764,7 +764,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { edition) { dummy_span } else { - kind.make_from(expander.expand(self.cx, span, mac.node.stream())) + kind.make_from(expander.expand(self.cx, span, mac.node.stream(), None)) } } @@ -785,7 +785,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { edition) { dummy_span } else { - kind.make_from(expander.expand(self.cx, span, mac.node.stream())) + kind.make_from(expander.expand( + self.cx, + span, + mac.node.stream(), + def_info.map(|(_, s)| s), + )) } } @@ -1036,10 +1041,28 @@ impl<'a> Parser<'a> { // Avoid emitting backtrace info twice. let def_site_span = self.span.with_ctxt(SyntaxContext::empty()); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); - let msg = format!("caused by the macro expansion here; the usage \ - of `{}!` is likely invalid in {} context", - macro_path, kind_name); - err.span_note(span, &msg).emit(); + err.span_label(span, "caused by the macro expansion here"); + let msg = format!( + "the usage of `{}!` is likely invalid in {} context", + macro_path, + kind_name, + ); + err.note(&msg); + let semi_span = self.sess.source_map().next_point(span); + + let semi_full_span = semi_span.to(self.sess.source_map().next_point(semi_span)); + match self.sess.source_map().span_to_snippet(semi_full_span) { + Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => { + err.span_suggestion_with_applicability( + semi_span, + "you might be missing a semicolon here", + ";".to_owned(), + Applicability::MaybeIncorrect, + ); + } + _ => {} + } + err.emit(); } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 805aa9bef227d..e9149054a55e8 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -50,7 +50,12 @@ pub struct ParserAnyMacro<'a> { impl<'a> ParserAnyMacro<'a> { pub fn make(mut self: Box>, kind: AstFragmentKind) -> AstFragment { let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self; - let fragment = panictry!(parser.parse_ast_fragment(kind, true)); + let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| { + if e.span.is_dummy() { // Get around lack of span in error (#30128) + e.set_span(site_span); + } + e + })); // We allow semicolons at the end of expressions -- e.g. the semicolon in // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`, @@ -74,16 +79,19 @@ struct MacroRulesMacroExpander { } impl TTMacroExpander for MacroRulesMacroExpander { - fn expand<'cx>(&self, - cx: &'cx mut ExtCtxt, - sp: Span, - input: TokenStream) - -> Box { + fn expand<'cx>( + &self, + cx: &'cx mut ExtCtxt, + sp: Span, + input: TokenStream, + def_span: Option, + ) -> Box { if !self.valid { return DummyResult::any(sp); } generic_extension(cx, sp, + def_span, self.name, input, &self.lhses, @@ -99,6 +107,7 @@ fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) { /// Given `lhses` and `rhses`, this is the new macro we create fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, sp: Span, + def_span: Option, name: ast::Ident, arg: TokenStream, lhses: &[quoted::TokenTree], @@ -133,8 +142,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, // Replace all the tokens for the corresponding positions in the macro, to maintain // proper positions in error reporting, while maintaining the macro_backtrace. if rhs_spans.len() == tts.len() { - tts = tts.map_enumerated(|i, tt| { - let mut tt = tt.clone(); + tts = tts.map_enumerated(|i, mut tt| { let mut sp = rhs_spans[i]; sp = sp.with_ctxt(tt.span().ctxt()); tt.set_span(sp); @@ -178,7 +186,14 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, } let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers")); - let mut err = cx.struct_span_err(best_fail_spot.substitute_dummy(sp), &best_fail_msg); + let span = best_fail_spot.substitute_dummy(sp); + let mut err = cx.struct_span_err(span, &best_fail_msg); + err.span_label(span, best_fail_msg); + if let Some(sp) = def_span { + if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() { + err.span_label(cx.source_map().def_span(sp), "when calling this macro"); + } + } // Check whether there's a missing comma in this macro call, like `println!("{}" a);` if let Some((arg, comma_span)) = arg.add_comma() { @@ -189,7 +204,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, }; match TokenTree::parse(cx, lhs_tt, arg.clone()) { Success(_) => { - if comma_span == DUMMY_SP { + if comma_span.is_dummy() { err.note("you might be missing a comma"); } else { err.span_suggestion_short_with_applicability( @@ -792,15 +807,15 @@ fn check_matcher_core(sess: &ParseSess, if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token { for next_token in &suffix_first.tokens { match is_in_follow(next_token, &frag_spec.as_str()) { - Err((msg, help)) => { + IsInFollow::Invalid(msg, help) => { sess.span_diagnostic.struct_span_err(next_token.span(), &msg) .help(help).emit(); // don't bother reporting every source of // conflict for a particular element of `last`. continue 'each_last; } - Ok(true) => {} - Ok(false) => { + IsInFollow::Yes => {} + IsInFollow::No(ref possible) => { let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1 { @@ -809,15 +824,41 @@ fn check_matcher_core(sess: &ParseSess, "may be" }; - sess.span_diagnostic.span_err( - next_token.span(), + let sp = next_token.span(); + let mut err = sess.span_diagnostic.struct_span_err( + sp, &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ is not allowed for `{frag}` fragments", name=name, frag=frag_spec, next=quoted_tt_to_string(next_token), - may_be=may_be) + may_be=may_be), ); + err.span_label( + sp, + format!("not allowed after `{}` fragments", frag_spec), + ); + let msg = "allowed there are: "; + match &possible[..] { + &[] => {} + &[t] => { + err.note(&format!( + "only {} is allowed after `{}` fragments", + t, + frag_spec, + )); + } + ts => { + err.note(&format!( + "{}{} or {}", + msg, + ts[..ts.len() - 1].iter().map(|s| *s) + .collect::>().join(", "), + ts[ts.len() - 1], + )); + } + } + err.emit(); } } } @@ -860,6 +901,12 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool { } } +enum IsInFollow { + Yes, + No(Vec<&'static str>), + Invalid(String, &'static str), +} + /// True if `frag` can legally be followed by the token `tok`. For /// fragments that can consume an unbounded number of tokens, `tok` /// must be within a well-defined follow set. This is intended to @@ -868,81 +915,99 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool { /// break macros that were relying on that binary operator as a /// separator. // when changing this do not forget to update doc/book/macros.md! -fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { +fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { use self::quoted::TokenTree; if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. - Ok(true) + IsInFollow::Yes } else { match frag { "item" => { // since items *must* be followed by either a `;` or a `}`, we can // accept anything after them - Ok(true) + IsInFollow::Yes }, "block" => { // anything can follow block, the braces provide an easy boundary to // maintain - Ok(true) + IsInFollow::Yes }, - "stmt" | "expr" => match *tok { - TokenTree::Token(_, ref tok) => match *tok { - FatArrow | Comma | Semi => Ok(true), - _ => Ok(false) - }, - _ => Ok(false), + "stmt" | "expr" => { + let tokens = vec!["`=>`", "`,`", "`;`"]; + match *tok { + TokenTree::Token(_, ref tok) => match *tok { + FatArrow | Comma | Semi => IsInFollow::Yes, + _ => IsInFollow::No(tokens), + }, + _ => IsInFollow::No(tokens), + } }, - "pat" => match *tok { - TokenTree::Token(_, ref tok) => match *tok { - FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), - Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true), - _ => Ok(false) - }, - _ => Ok(false), + "pat" => { + let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; + match *tok { + TokenTree::Token(_, ref tok) => match *tok { + FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, + Ident(i, false) if i.name == "if" || i.name == "in" => IsInFollow::Yes, + _ => IsInFollow::No(tokens), + }, + _ => IsInFollow::No(tokens), + } }, - "path" | "ty" => match *tok { - TokenTree::Token(_, ref tok) => match *tok { - OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | - Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | - BinOp(token::Or) => Ok(true), - Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true), - _ => Ok(false) - }, - TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true), - _ => Ok(false), + "path" | "ty" => { + let tokens = vec![ + "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`", + "`where`", + ]; + match *tok { + TokenTree::Token(_, ref tok) => match *tok { + OpenDelim(token::DelimToken::Brace) | + OpenDelim(token::DelimToken::Bracket) | + Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | + BinOp(token::Or) => IsInFollow::Yes, + Ident(i, false) if i.name == "as" || i.name == "where" => IsInFollow::Yes, + _ => IsInFollow::No(tokens), + }, + TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => IsInFollow::Yes, + _ => IsInFollow::No(tokens), + } }, "ident" | "lifetime" => { // being a single token, idents and lifetimes are harmless - Ok(true) + IsInFollow::Yes }, "literal" => { // literals may be of a single token, or two tokens (negative numbers) - Ok(true) + IsInFollow::Yes }, "meta" | "tt" => { // being either a single token or a delimited sequence, tt is // harmless - Ok(true) + IsInFollow::Yes }, "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. + let tokens = vec!["`,`", "an ident", "a type"]; match *tok { TokenTree::Token(_, ref tok) => match *tok { - Comma => Ok(true), - Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true), - ref tok => Ok(tok.can_begin_type()) + Comma => IsInFollow::Yes, + Ident(i, is_raw) if is_raw || i.name != "priv" => IsInFollow::Yes, + ref tok => if tok.can_begin_type() { + IsInFollow::Yes + } else { + IsInFollow::No(tokens) + } }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident" || frag.name == "ty" - || frag.name == "path" => Ok(true), - _ => Ok(false) + || frag.name == "path" => IsInFollow::Yes, + _ => IsInFollow::No(tokens), } }, - "" => Ok(true), // keywords::Invalid - _ => Err((format!("invalid fragment specifier `{}`", frag), - VALID_FRAGMENT_NAMES_MSG)) + "" => IsInFollow::Yes, // keywords::Invalid + _ => IsInFollow::Invalid(format!("invalid fragment specifier `{}`", frag), + VALID_FRAGMENT_NAMES_MSG), } } } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index ae486158fee69..3d897d17e0b5c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -219,9 +219,9 @@ impl Add for LockstepIterSize { LockstepIterSize::Unconstrained => other, LockstepIterSize::Contradiction(_) => self, LockstepIterSize::Constraint(l_len, ref l_id) => match other { - LockstepIterSize::Unconstrained => self.clone(), + LockstepIterSize::Unconstrained => self, LockstepIterSize::Contradiction(_) => other, - LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(), + LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self, LockstepIterSize::Constraint(r_len, r_id) => { let msg = format!("inconsistent lockstep iteration: \ '{}' has {} items, but '{}' has {}", diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 9f554a90afbc4..8ff4b0d025c8d 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -346,7 +346,7 @@ fn mk_main(cx: &mut TestCtxt) -> P { test_runner.span = sp; - let test_main_path_expr = ecx.expr_path(test_runner.clone()); + let test_main_path_expr = ecx.expr_path(test_runner); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![mk_tests_slice(cx)]); let call_test_main = ecx.stmt_expr(call_test_main); diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index 3705a245584d0..32a58de3529fd 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -178,7 +178,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P< cx.expr_match(span, new, vec![eq_arm, neq_arm]) }, - equals_expr.clone(), + equals_expr, Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| { if self_args.len() != 2 { cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 973ad631b8343..55b3928d68eee 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -75,7 +75,7 @@ impl MultiItemModifier for ProcMacroDerive { // Mark attributes as known, and used. MarkAttrs(&self.attrs).visit_item(&item); - let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone())); + let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item)); let res = __internal::set_sess(ecx, || { let inner = self.inner; panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input))) diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 45eaf1d3190f2..639155636edfa 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -612,6 +612,17 @@ impl MultiSpan { &self.primary_spans } + /// Returns `true` if this contains only a dummy primary span with any hygienic context. + pub fn is_dummy(&self) -> bool { + let mut is_dummy = true; + for span in &self.primary_spans { + if !span.is_dummy() { + is_dummy = false; + } + } + is_dummy + } + /// Replaces all occurrences of one Span with another. Used to move Spans in areas that don't /// display well (like std macros). Returns true if replacements occurred. pub fn replace(&mut self, before: Span, after: Span) -> bool { diff --git a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs index ac39118c5f1e0..27169299c8a69 100644 --- a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs +++ b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs @@ -38,7 +38,8 @@ impl TTMacroExpander for Expander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, sp: Span, - _: TokenStream) -> Box { + _: TokenStream, + _: Option) -> Box { let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i)) .collect::>().join(", "); MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args))) diff --git a/src/test/rustdoc/deprecated-impls.rs b/src/test/rustdoc/deprecated-impls.rs index bcf0645766b30..3fb83bff91604 100644 --- a/src/test/rustdoc/deprecated-impls.rs +++ b/src/test/rustdoc/deprecated-impls.rs @@ -109,7 +109,7 @@ impl Bar for Foo2 { // @has - '//*[@class="stab deprecated"]' 'Deprecated since 1.0.5: fn_def_with_doc' // @has - 'fn_def_with_doc short' - // @!has - 'fn_def_with full' + // @!has - 'fn_def_with_doc full' fn fn_def_with_doc() {} // @has - '//*[@class="stab deprecated"]' 'Deprecated since 1.0.6: fn_def_without_doc' diff --git a/src/test/rustdoc/escape-rust-expr.rs b/src/test/rustdoc/dont-show-const-contents.rs similarity index 64% rename from src/test/rustdoc/escape-rust-expr.rs rename to src/test/rustdoc/dont-show-const-contents.rs index 4594eb95ea18e..1392c62b4abb1 100644 --- a/src/test/rustdoc/escape-rust-expr.rs +++ b/src/test/rustdoc/dont-show-const-contents.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Test that we HTML-escape Rust expressions, where HTML special chars -// can occur, and we know it's definitely not markup. +// Test that the contents of constants are not displayed as part of the +// documentation. -// @!has escape_rust_expr/constant.CONST_S.html '//pre[@class="rust const"]' '"