diff --git a/src/auditwheel/audit.rs b/src/auditwheel/audit.rs index 930899958..3235f4d5c 100644 --- a/src/auditwheel/audit.rs +++ b/src/auditwheel/audit.rs @@ -107,7 +107,7 @@ fn find_incompliant_symbols( if sym.st_type() == STT_FUNC { let name = strtab.get_at(sym.st_name).unwrap_or("BAD NAME"); for symbol_version in symbol_versions { - if name.ends_with(&format!("@{}", symbol_version)) { + if name.ends_with(&format!("@{symbol_version}")) { symbols.push(name.to_string()); } } @@ -184,7 +184,7 @@ fn policy_is_satisfied( .collect(); let offending_symbol_versions: Vec = offending_versions .iter() - .map(|v| format!("{}_{}", name, v)) + .map(|v| format!("{name}_{v}")) .collect(); let offending_symbols = find_incompliant_symbols(elf, &offending_symbol_versions)?; let offender = if offending_symbols.is_empty() { @@ -242,8 +242,7 @@ fn get_default_platform_policies() -> Vec { return MUSLLINUX_POLICIES .iter() .filter(|policy| { - policy.name == "linux" - || policy.name == format!("musllinux_{}_{}", major, minor) + policy.name == "linux" || policy.name == format!("musllinux_{major}_{minor}") }) .cloned() .collect(); @@ -290,9 +289,7 @@ pub fn auditwheel_rs( Some(PlatformTag::Musllinux { x, y }) => MUSLLINUX_POLICIES .clone() .into_iter() - .filter(|policy| { - policy.name == "linux" || policy.name == format!("musllinux_{}_{}", x, y) - }) + .filter(|policy| policy.name == "linux" || policy.name == format!("musllinux_{x}_{y}")) .map(|mut policy| { policy.fixup_musl_libc_so_name(target.target_arch()); policy @@ -349,8 +346,7 @@ pub fn auditwheel_rs( if policy.priority < highest_policy.priority && highest_policy.name != "manylinux_2_5" { println!( "📦 Wheel is eligible for a higher priority tag. \ - You requested {} but this wheel is eligible for {}", - policy, highest_policy, + You requested {policy} but this wheel is eligible for {highest_policy}", ); } } @@ -410,7 +406,7 @@ pub fn get_sysroot_path(target: &Target) -> Result { .target(target_triple); let compiler = build .try_get_compiler() - .with_context(|| format!("Failed to get compiler for {}", target_triple))?; + .with_context(|| format!("Failed to get compiler for {target_triple}"))?; // Only GNU like compilers support `--print-sysroot` if !compiler.is_like_gnu() { return Ok(PathBuf::from("/")); @@ -450,7 +446,7 @@ pub fn get_policy_and_libs( auditwheel_rs(artifact, target, platform_tag, allow_linking_libpython).with_context( || { if let Some(platform_tag) = platform_tag { - format!("Error ensuring {} compliance", platform_tag) + format!("Error ensuring {platform_tag} compliance") } else { "Error checking for manylinux/musllinux compliance".to_string() } @@ -462,7 +458,7 @@ pub fn get_policy_and_libs( let external_libs = find_external_libs(&artifact.path, &policy, sysroot, ld_paths) .with_context(|| { if let Some(platform_tag) = platform_tag { - format!("Error repairing wheel for {} compliance", platform_tag) + format!("Error repairing wheel for {platform_tag} compliance") } else { "Error repairing wheel for manylinux/musllinux compliance".to_string() } diff --git a/src/auditwheel/platform_tag.rs b/src/auditwheel/platform_tag.rs index cb9a198e3..dacc6313f 100644 --- a/src/auditwheel/platform_tag.rs +++ b/src/auditwheel/platform_tag.rs @@ -85,8 +85,8 @@ impl PlatformTag { impl fmt::Display for PlatformTag { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - PlatformTag::Manylinux { x, y } => write!(f, "manylinux_{}_{}", x, y), - PlatformTag::Musllinux { x, y } => write!(f, "musllinux_{}_{}", x, y), + PlatformTag::Manylinux { x, y } => write!(f, "manylinux_{x}_{y}"), + PlatformTag::Musllinux { x, y } => write!(f, "musllinux_{x}_{y}"), PlatformTag::Linux => write!(f, "linux"), } } diff --git a/src/build_context.rs b/src/build_context.rs index ab2f2f2f5..c9e4a0d15 100644 --- a/src/build_context.rs +++ b/src/build_context.rs @@ -73,9 +73,9 @@ impl BridgeModel { impl Display for BridgeModel { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - BridgeModel::Bin(Some((name, _))) => write!(f, "{} bin", name), + BridgeModel::Bin(Some((name, _))) => write!(f, "{name} bin"), BridgeModel::Bin(None) => write!(f, "bin"), - BridgeModel::Bindings(name, _) => write!(f, "{}", name), + BridgeModel::Bindings(name, _) => write!(f, "{name}"), BridgeModel::BindingsAbi3(..) => write!(f, "pyo3"), BridgeModel::Cffi => write!(f, "cffi"), BridgeModel::UniFfi => write!(f, "uniffi"), @@ -288,8 +288,7 @@ impl BuildContext { && !python_interpreter.support_portable_wheels() { println!( - "🐍 Skipping auditwheel because {} does not support manylinux/musllinux wheels", - python_interpreter + "🐍 Skipping auditwheel because {python_interpreter} does not support manylinux/musllinux wheels" ); return Ok((Policy::default(), Vec::new())); } @@ -390,10 +389,10 @@ impl BuildContext { // Generate a new soname with a short hash let short_hash = &hash_file(&lib_path)?[..8]; let (file_stem, file_ext) = lib.name.split_once('.').unwrap(); - let new_soname = if !file_stem.ends_with(&format!("-{}", short_hash)) { - format!("{}-{}.{}", file_stem, short_hash, file_ext) + let new_soname = if !file_stem.ends_with(&format!("-{short_hash}")) { + format!("{file_stem}-{short_hash}.{file_ext}") } else { - format!("{}.{}", file_stem, file_ext) + format!("{file_stem}.{file_ext}") }; // Copy the original lib to a tmpdir and modify some of its properties @@ -509,7 +508,7 @@ impl BuildContext { let platform = self .target .get_platform_tag(platform_tags, self.universal2)?; - let tag = format!("cp{}{}-abi3-{}", major, min_minor, platform); + let tag = format!("cp{major}{min_minor}-abi3-{platform}"); let mut writer = WheelWriter::new( &tag, @@ -535,7 +534,7 @@ impl BuildContext { self.add_pth(&mut writer)?; add_data(&mut writer, self.project_layout.data.as_deref())?; let wheel_path = writer.finish()?; - Ok((wheel_path, format!("cp{}{}", major, min_minor))) + Ok((wheel_path, format!("cp{major}{min_minor}"))) } /// For abi3 we only need to build a single wheel and we don't even need a python interpreter diff --git a/src/build_options.rs b/src/build_options.rs index ca531bb60..b712d429b 100644 --- a/src/build_options.rs +++ b/src/build_options.rs @@ -239,10 +239,7 @@ impl BuildOptions { min_python_minor, )?; let host_python = &host_interpreters[0]; - println!( - "🐍 Using host {} for cross-compiling preparation", - host_python - ); + println!("🐍 Using host {host_python} for cross-compiling preparation"); // pyo3 env::set_var("PYO3_PYTHON", &host_python.executable); // rust-cpython, and legacy pyo3 versions @@ -322,14 +319,14 @@ impl BuildOptions { .map(ToString::to_string) .collect::>() .join(", "); - println!("🐍 Found {}", interpreters_str); + println!("🐍 Found {interpreters_str}"); Ok(interpreters) } BridgeModel::Cffi => { let interpreter = find_single_python_interpreter(bridge, interpreter, target, "cffi")?; - println!("🐍 Using {} to generate the cffi bindings", interpreter); + println!("🐍 Using {interpreter} to generate the cffi bindings"); Ok(vec![interpreter]) } BridgeModel::Bin(None) | BridgeModel::UniFfi => Ok(vec![]), @@ -366,7 +363,7 @@ impl BuildOptions { soabi: None, }]) } else if let Some(interp) = interpreters.get(0) { - println!("🐍 Using {} to generate to link bindings (With abi3, an interpreter is only required on windows)", interp); + println!("🐍 Using {interp} to generate to link bindings (With abi3, an interpreter is only required on windows)"); Ok(interpreters) } else if generate_import_lib { println!("🐍 Not using a specific python interpreter (Automatically generating windows import library)"); @@ -623,10 +620,7 @@ impl BuildOptions { for platform_tag in &platform_tags { if !platform_tag.is_supported() { - eprintln!( - "⚠️ Warning: {} is unsupported by the Rust compiler.", - platform_tag - ); + eprintln!("⚠️ Warning: {platform_tag} is unsupported by the Rust compiler."); } } @@ -860,7 +854,7 @@ fn has_abi3(cargo_metadata: &Metadata) -> Result> { )) }) .collect::>>() - .context(format!("Bogus {} cargo features", lib))? + .context(format!("Bogus {lib} cargo features"))? .into_iter() .min(); if abi3_selected && min_abi3_version.is_none() { @@ -1009,7 +1003,7 @@ pub fn find_bridge(cargo_metadata: &Metadata, bridge: Option<&str>) -> Result
) -> Result
().with_context(|| { - format!( - "Invalid python interpreter major version '{}', expect a digit", - ver_major - ) + format!("Invalid python interpreter major version '{ver_major}', expect a digit") })?; let ver_minor = ver_minor.parse::().with_context(|| { - format!( - "Invalid python interpreter minor version '{}', expect a digit", - ver_minor - ) + format!("Invalid python interpreter minor version '{ver_minor}', expect a digit") })?; let sysconfig = InterpreterConfig::lookup( target.target_os(), @@ -1181,10 +1165,7 @@ fn find_interpreter_in_sysconfig( (ver_major, ver_minor), ) .with_context(|| { - format!( - "Failed to find a {} {}.{} interpreter", - python_impl, ver_major, ver_minor - ) + format!("Failed to find a {python_impl} {ver_major}.{ver_minor} interpreter") })?; debug!( "Found {} {}.{} in bundled sysconfig", diff --git a/src/compile.rs b/src/compile.rs index 25364b15f..8873aa2e8 100644 --- a/src/compile.rs +++ b/src/compile.rs @@ -36,8 +36,8 @@ pub struct BuildArtifact { /// Builds the rust crate into a native module (i.e. an .so or .dll) for a /// specific python version. Returns a mapping from crate type (e.g. cdylib) /// to artifact location. -pub fn compile<'a>( - context: &'a BuildContext, +pub fn compile( + context: &BuildContext, python_interpreter: Option<&PythonInterpreter>, targets: &[CompileTarget], ) -> Result>> { @@ -49,8 +49,8 @@ pub fn compile<'a>( } /// Build an universal2 wheel for macos which contains both an x86 and an aarch64 binary -fn compile_universal2<'a>( - context: &'a BuildContext, +fn compile_universal2( + context: &BuildContext, python_interpreter: Option<&PythonInterpreter>, targets: &[CompileTarget], ) -> Result>> { @@ -214,13 +214,13 @@ fn compile_target( // See https://github.com/PyO3/setuptools-rust/issues/106 for detail let module_name = &context.module_name; let so_filename = match bridge_model { - BridgeModel::BindingsAbi3(..) => format!("{base}.abi3.so", base = module_name), + BridgeModel::BindingsAbi3(..) => format!("{module_name}.abi3.so"), _ => python_interpreter .expect("missing python interpreter for non-abi3 wheel build") .get_library_name(module_name), }; let macos_dylib_install_name = - format!("link-args=-Wl,-install_name,@rpath/{}", so_filename); + format!("link-args=-Wl,-install_name,@rpath/{so_filename}"); let mac_args = [ "-C".to_string(), "link-arg=-undefined".to_string(), @@ -299,7 +299,7 @@ fn compile_target( let zig_triple = if target.is_linux() && !target.is_musl_target() { match context.platform_tag.iter().find(|tag| tag.is_manylinux()) { Some(PlatformTag::Manylinux { x, y }) => { - format!("{}.{}.{}", target_triple, x, y) + format!("{target_triple}.{x}.{y}") } _ => target_triple.to_string(), } @@ -411,10 +411,9 @@ fn compile_target( use crate::target::rustc_macosx_target_version; let (major, minor) = rustc_macosx_target_version(target_triple); - build_command.env("MACOSX_DEPLOYMENT_TARGET", format!("{}.{}", major, minor)); + build_command.env("MACOSX_DEPLOYMENT_TARGET", format!("{major}.{minor}")); eprintln!( - "💻 Using `MACOSX_DEPLOYMENT_TARGET={}.{}` for {} by default", - major, minor, target_triple + "💻 Using `MACOSX_DEPLOYMENT_TARGET={major}.{minor}` for {target_triple} by default" ); } @@ -452,8 +451,7 @@ fn compile_target( if should_warn { // This is a spurious error I don't really understand eprintln!( - "⚠️ Warning: The package {} wasn't listed in `cargo metadata`", - package_id + "⚠️ Warning: The package {package_id} wasn't listed in `cargo metadata`" ); } continue; @@ -522,7 +520,7 @@ fn compile_target( /// /// Currently the check is only run on linux, macOS and Windows pub fn warn_missing_py_init(artifact: &Path, module_name: &str) -> Result<()> { - let py_init = format!("PyInit_{}", module_name); + let py_init = format!("PyInit_{module_name}"); let mut fd = File::open(artifact)?; let mut buffer = Vec::new(); fd.read_to_end(&mut buffer)?; @@ -581,10 +579,9 @@ pub fn warn_missing_py_init(artifact: &Path, module_name: &str) -> Result<()> { if !found { eprintln!( - "⚠️ Warning: Couldn't find the symbol `{}` in the native library. \ + "⚠️ Warning: Couldn't find the symbol `{py_init}` in the native library. \ Python will fail to import this module. \ - If you're using pyo3, check that `#[pymodule]` uses `{}` as module name", - py_init, module_name + If you're using pyo3, check that `#[pymodule]` uses `{module_name}` as module name" ) } diff --git a/src/cross_compile.rs b/src/cross_compile.rs index 188fb0729..8e73c816d 100644 --- a/src/cross_compile.rs +++ b/src/cross_compile.rs @@ -152,7 +152,7 @@ fn search_lib_dir(path: impl AsRef, target: &Target) -> Vec { let (cpython_version_pat, pypy_version_pat) = if let Some(v) = env::var_os("PYO3_CROSS_PYTHON_VERSION").map(|s| s.into_string().unwrap()) { - (format!("python{}", v), format!("pypy{}", v)) + (format!("python{v}"), format!("pypy{v}")) } else { ("python3.".into(), "pypy3.".into()) }; diff --git a/src/develop.rs b/src/develop.rs index 7443cdb32..5222f555b 100644 --- a/src/develop.rs +++ b/src/develop.rs @@ -86,8 +86,8 @@ pub fn develop( // Remove extra marker to make it installable with pip for extra in &extras { pkg = pkg - .replace(&format!(" and extra == '{}'", extra), "") - .replace(&format!("; extra == '{}'", extra), ""); + .replace(&format!(" and extra == '{extra}'"), "") + .replace(&format!("; extra == '{extra}'"), ""); } pkg })); @@ -114,7 +114,7 @@ pub fn develop( .args(command) .arg(dunce::simplified(filename)) .output() - .context(format!("pip install failed with {:?}", python))?; + .context(format!("pip install failed with {python:?}"))?; if !output.status.success() { bail!( "pip install in {} failed running {:?}: {}\n--- Stdout:\n{}\n--- Stderr:\n{}\n---\n", diff --git a/src/main.rs b/src/main.rs index 8f1dea723..5e2d08146 100644 --- a/src/main.rs +++ b/src/main.rs @@ -234,7 +234,7 @@ fn pep517(subcommand: Pep517Command) -> Result<()> { let platform = context .target .get_platform_tag(&[PlatformTag::Linux], context.universal2)?; - vec![format!("cp{}{}-abi3-{}", major, minor, platform)] + vec![format!("cp{major}{minor}-abi3-{platform}")] } BridgeModel::Bin(None) | BridgeModel::Cffi | BridgeModel::UniFfi => { context @@ -355,7 +355,7 @@ fn run() -> Result<()> { }; println!("🐍 {} python interpreter found:", found.len()); for interpreter in found { - println!(" - {}", interpreter); + println!(" - {interpreter}"); } } Opt::Develop { @@ -429,7 +429,7 @@ fn main() { if let Err(e) = run() { eprintln!("💥 maturin failed"); for cause in e.chain() { - eprintln!(" Caused by: {}", cause); + eprintln!(" Caused by: {cause}"); } std::process::exit(1); } diff --git a/src/metadata.rs b/src/metadata.rs index 7a901d57f..482e24c31 100644 --- a/src/metadata.rs +++ b/src/metadata.rs @@ -180,7 +180,7 @@ impl Metadata21 { for author in authors { match (&author.name, &author.email) { (Some(name), Some(email)) => { - emails.push(format!("{} <{}>", name, email)); + emails.push(format!("{name} <{email}>")); } (Some(name), None) => { names.push(name.as_str()); @@ -205,7 +205,7 @@ impl Metadata21 { for maintainer in maintainers { match (&maintainer.name, &maintainer.email) { (Some(name), Some(email)) => { - emails.push(format!("{} <{}>", name, email)); + emails.push(format!("{name} <{email}>")); } (Some(name), None) => { names.push(name.as_str()); @@ -248,9 +248,9 @@ impl Metadata21 { // optional dependency already has environment markers let new_marker = format!("({}) and extra == '{}'", marker.trim(), extra); - format!("{}; {}", dep, new_marker) + format!("{dep}; {new_marker}") } else { - format!("{}; extra == '{}'", dep, extra) + format!("{dep}; extra == '{extra}'") }; self.requires_dist.push(dist); } @@ -433,7 +433,7 @@ impl Metadata21 { // "A string containing a browsable URL for the project and a label for it, separated by a comma." // `Project-URL: Bug Tracker, http://bitbucket.org/tarek/distribute/issues/` for (key, value) in self.project_url.iter() { - fields.push(("Project-URL", format!("{}, {}", key, value))) + fields.push(("Project-URL", format!("{key}, {value}"))) } // Description shall be last, so we can ignore RFC822 and just put the description @@ -464,11 +464,11 @@ impl Metadata21 { }; for (key, value) in fields { - writeln!(out, "{}: {}", key, value)?; + writeln!(out, "{key}: {value}")?; } if let Some(body) = body { - writeln!(out, "\n{}", body)?; + writeln!(out, "\n{body}")?; } Ok(out) diff --git a/src/module_writer.rs b/src/module_writer.rs index ba41d0fc7..e106a82f3 100644 --- a/src/module_writer.rs +++ b/src/module_writer.rs @@ -137,7 +137,7 @@ impl PathWriter { for (filename, hash, len) in self.record { buffer - .write_all(format!("{},sha256={},{}\n", filename, hash, len).as_bytes()) + .write_all(format!("{filename},sha256={hash},{len}\n").as_bytes()) .context(format!( "Failed to write to file at {}", record_file.display() @@ -299,7 +299,7 @@ impl WheelWriter { let absolute_path = project_layout.python_dir.normalize()?.into_path_buf(); if let Some(python_path) = absolute_path.to_str() { let name = metadata21.get_distribution_escaped(); - let target = format!("{}.pth", name); + let target = format!("{name}.pth"); debug!("Adding {} from {}", target, python_path); self.add_bytes(target, python_path.as_bytes())?; } else { @@ -349,11 +349,11 @@ impl WheelWriter { self.zip.start_file(&record_filename, options)?; for (filename, hash, len) in self.record { self.zip - .write_all(format!("{},sha256={},{}\n", filename, hash, len).as_bytes())?; + .write_all(format!("{filename},sha256={hash},{len}\n").as_bytes())?; } // Write the record for the RECORD file itself self.zip - .write_all(format!("{},,\n", record_filename).as_bytes())?; + .write_all(format!("{record_filename},,\n").as_bytes())?; self.zip.finish()?; Ok(self.wheel_path) @@ -487,7 +487,7 @@ Root-Is-Purelib: false ); for tag in tags { - writeln!(wheel_file, "Tag: {}", tag)?; + writeln!(wheel_file, "Tag: {tag}")?; } Ok(wheel_file) @@ -500,7 +500,7 @@ fn entry_points_txt( ) -> String { entrypoints .iter() - .fold(format!("[{}]\n", entry_type), |text, (k, v)| { + .fold(format!("[{entry_type}]\n"), |text, (k, v)| { text + k + "=" + v + "\n" }) } @@ -712,10 +712,10 @@ pub fn write_bindings_module( // abi3 None => { if target.is_unix() { - format!("{base}.abi3.so", base = ext_name) + format!("{ext_name}.abi3.so") } else { // Apparently there is no tag for abi3 on windows - format!("{base}.pyd", base = ext_name) + format!("{ext_name}.pyd") } } }; @@ -756,16 +756,15 @@ pub fn write_bindings_module( __doc__ = {module_name}.__doc__ if hasattr({module_name}, "__all__"): - __all__ = {module_name}.__all__"#, - module_name = module_name + __all__ = {module_name}.__all__"# ) .as_bytes(), )?; let type_stub = project_layout .rust_module - .join(format!("{}.pyi", module_name)); + .join(format!("{module_name}.pyi")); if type_stub.exists() { - println!("📖 Found type stub file at {}.pyi", module_name); + println!("📖 Found type stub file at {module_name}.pyi"); writer.add_file(&module.join("__init__.pyi"), type_stub)?; writer.add_bytes(&module.join("py.typed"), b"")?; } @@ -823,9 +822,9 @@ pub fn write_cffi_module( writer.add_directory(&module)?; let type_stub = project_layout .rust_module - .join(format!("{}.pyi", module_name)); + .join(format!("{module_name}.pyi")); if type_stub.exists() { - println!("📖 Found type stub file at {}.pyi", module_name); + println!("📖 Found type stub file at {module_name}.pyi"); writer.add_file(&module.join("__init__.pyi"), type_stub)?; writer.add_bytes(&module.join("py.typed"), b"")?; } @@ -921,12 +920,12 @@ fn generate_uniffi_bindings( // uniffi bindings hardcoded the extension filenames let cdylib_name = match cdylib_name { Some(name) => name, - None => format!("uniffi_{}", name), + None => format!("uniffi_{name}"), }; let cdylib = match target_os { - Os::Macos => format!("lib{}.dylib", cdylib_name), - Os::Windows => format!("{}.dll", cdylib_name), - _ => format!("lib{}.so", cdylib_name), + Os::Macos => format!("lib{cdylib_name}.dylib"), + Os::Windows => format!("{cdylib_name}.dll"), + _ => format!("lib{cdylib_name}.so"), }; Ok(UniFfiBindings { @@ -954,7 +953,7 @@ pub fn write_uniffi_module( cdylib, path: uniffi_binding, } = generate_uniffi_bindings(crate_dir, target_dir, target_os)?; - let py_init = format!("from .{} import * # NOQA\n", binding_name); + let py_init = format!("from .{binding_name} import * # NOQA\n"); if !editable { write_python_part(writer, project_layout, pyproject_toml) @@ -995,9 +994,9 @@ pub fn write_uniffi_module( writer.add_directory(&module)?; let type_stub = project_layout .rust_module - .join(format!("{}.pyi", module_name)); + .join(format!("{module_name}.pyi")); if type_stub.exists() { - println!("📖 Found type stub file at {}.pyi", module_name); + println!("📖 Found type stub file at {module_name}.pyi"); writer.add_file(&module.join("__init__.pyi"), type_stub)?; writer.add_bytes(&module.join("py.typed"), b"")?; } @@ -1053,7 +1052,7 @@ import sysconfig def main(): # The actual executable - program_location = Path(sysconfig.get_path("scripts")).joinpath("{}") + program_location = Path(sysconfig.get_path("scripts")).joinpath("{bin_name}") # wasmtime-py boilerplate engine = Engine() store = Store(engine) @@ -1079,8 +1078,7 @@ def main(): if __name__ == '__main__': main() - "#, - bin_name + "# ); // We can't use add_file since we want to mark the file as executable @@ -1140,7 +1138,7 @@ pub fn write_python_part( .iter() .filter_map(|glob_pattern| glob_pattern.targets(Format::Sdist)) { - println!("📦 Including files matching \"{}\"", pattern); + println!("📦 Including files matching \"{pattern}\""); for source in glob::glob(&pyproject_dir.join(pattern).to_string_lossy()) .expect("No files found for pattern") .filter_map(Result::ok) diff --git a/src/project_layout.rs b/src/project_layout.rs index 9d657b8d1..1ea1b3cec 100644 --- a/src/project_layout.rs +++ b/src/project_layout.rs @@ -373,8 +373,8 @@ impl ProjectLayout { bail!("No such data directory {}", data.display()); } Some(data) - } else if project_root.join(format!("{}.data", module_name)).is_dir() { - Some(project_root.join(format!("{}.data", module_name))) + } else if project_root.join(format!("{module_name}.data")).is_dir() { + Some(project_root.join(format!("{module_name}.data"))) } else { None }; diff --git a/src/pyproject_toml.rs b/src/pyproject_toml.rs index d8ec33918..cb21ddfd4 100644 --- a/src/pyproject_toml.rs +++ b/src/pyproject_toml.rs @@ -266,8 +266,7 @@ impl PyProjectToml { if self.build_system.build_backend.as_deref() != Some(maturin) { eprintln!( "⚠️ Warning: `build-backend` in pyproject.toml is not set to `{maturin}`, \ - packaging tools such as pip will not use maturin to build this project.", - maturin = maturin + packaging tools such as pip will not use maturin to build this project." ); return false; } diff --git a/src/python_interpreter/config.rs b/src/python_interpreter/config.rs index 0d6771079..4473f7e8e 100644 --- a/src/python_interpreter/config.rs +++ b/src/python_interpreter/config.rs @@ -147,25 +147,19 @@ impl InterpreterConfig { .split_once('.') .context("Invalid python interpreter version")?; let major = ver_major.parse::().with_context(|| { - format!( - "Invalid python interpreter major version '{}', expect a digit", - ver_major - ) + format!("Invalid python interpreter major version '{ver_major}', expect a digit") })?; let minor = ver_minor.parse::().with_context(|| { - format!( - "Invalid python interpreter minor version '{}', expect a digit", - ver_minor - ) + format!("Invalid python interpreter minor version '{ver_minor}', expect a digit") })?; let implementation = implementation.unwrap_or_else(|| "cpython".to_string()); let interpreter_kind = implementation.parse().map_err(|e| format_err!("{}", e))?; let abi_tag = match interpreter_kind { InterpreterKind::CPython => { if (major, minor) >= (3, 8) { - abi_tag.unwrap_or_else(|| format!("{}{}", major, minor)) + abi_tag.unwrap_or_else(|| format!("{major}{minor}")) } else { - abi_tag.unwrap_or_else(|| format!("{}{}m", major, minor)) + abi_tag.unwrap_or_else(|| format!("{major}{minor}m")) } } InterpreterKind::PyPy => abi_tag.unwrap_or_else(|| "pp73".to_string()), @@ -232,7 +226,7 @@ suppress_build_script_link_lines=false"#, minor = self.minor, ); if let Some(pointer_width) = self.pointer_width { - write!(content, "\npointer_width={}", pointer_width).unwrap(); + write!(content, "\npointer_width={pointer_width}").unwrap(); } content } diff --git a/src/python_interpreter/mod.rs b/src/python_interpreter/mod.rs index a0e29e4af..287df81c8 100644 --- a/src/python_interpreter/mod.rs +++ b/src/python_interpreter/mod.rs @@ -45,8 +45,7 @@ fn windows_interpreter_no_build( // those for 64-bit targets if pointer_width != target_width { println!( - "👽 {}.{} is installed as {}-bit, while the target is {}-bit. Skipping.", - major, minor, pointer_width, target_width + "👽 {major}.{minor} is installed as {pointer_width}-bit, while the target is {target_width}-bit. Skipping." ); return true; } @@ -142,7 +141,7 @@ fn find_all_windows(target: &Target, min_python_minor: usize) -> Result Result Result Ok(InterpreterKind::CPython), "pypy" => Ok(InterpreterKind::PyPy), - unknown => Err(format!("Unknown interpreter kind '{}'", unknown)), + unknown => Err(format!("Unknown interpreter kind '{unknown}'")), } } } @@ -571,7 +569,7 @@ impl PythonInterpreter { ); return Ok(None); } else { - eprintln!("{}", stderr); + eprintln!("{stderr}"); bail!(err_msg); } } @@ -587,7 +585,7 @@ impl PythonInterpreter { }; // Try py -x.y on Windows let mut metadata_py = tempfile::NamedTempFile::new()?; - write!(metadata_py, "{}", GET_INTERPRETER_METADATA)?; + write!(metadata_py, "{GET_INTERPRETER_METADATA}")?; let mut cmd = Command::new("cmd"); cmd.arg("/c") .arg("py") @@ -736,7 +734,7 @@ impl PythonInterpreter { find_all_windows(target, min_python_minor)? } else { let mut executables: Vec = (min_python_minor..=MAXIMUM_PYTHON_MINOR) - .map(|minor| format!("python3.{}", minor)) + .map(|minor| format!("python3.{minor}")) .collect(); // Also try to find PyPy for cffi and pyo3 bindings if matches!(bridge, BridgeModel::Cffi) @@ -744,7 +742,7 @@ impl PythonInterpreter { || bridge.is_bindings("pyo3-ffi") { executables.extend( - (min_python_minor..=MAXIMUM_PYPY_MINOR).map(|minor| format!("pypy3.{}", minor)), + (min_python_minor..=MAXIMUM_PYPY_MINOR).map(|minor| format!("pypy3.{minor}")), ); } executables diff --git a/src/source_distribution.rs b/src/source_distribution.rs index 01bc7794d..052b003ac 100644 --- a/src/source_distribution.rs +++ b/src/source_distribution.rs @@ -150,10 +150,10 @@ fn rewrite_cargo_toml( } // This is the location of the targeted crate in the source distribution table[&dep_name]["path"] = if root_crate { - toml_edit::value(format!("{}/{}", local_deps_folder, dep_name)) + toml_edit::value(format!("{local_deps_folder}/{dep_name}")) } else { // Cargo.toml contains relative paths, and we're already in LOCAL_DEPENDENCIES_FOLDER - toml_edit::value(format!("../{}", dep_name)) + toml_edit::value(format!("../{dep_name}")) }; if workspace_inherit { // Remove workspace inheritance now that we converted it into a path dependency @@ -207,8 +207,7 @@ fn rewrite_cargo_toml( let path = Path::new(s.value()); if let Some(name) = path.file_name().and_then(|x| x.to_str()) { if known_path_deps.contains_key(name) { - new_members - .push(format!("{}/{}", LOCAL_DEPENDENCIES_FOLDER, name)); + new_members.push(format!("{LOCAL_DEPENDENCIES_FOLDER}/{name}")); } } } @@ -443,7 +442,7 @@ fn add_crate_to_source_distribution( .count(); format!("{}{}", "../".repeat(level), LOCAL_DEPENDENCIES_FOLDER) } else if cargo_toml_in_rust_src { - format!("../../{}", LOCAL_DEPENDENCIES_FOLDER) + format!("../../{LOCAL_DEPENDENCIES_FOLDER}") } else { LOCAL_DEPENDENCIES_FOLDER.to_string() }; @@ -671,7 +670,7 @@ pub fn source_distribution( } let mut include = |pattern| -> Result<()> { - println!("📦 Including files matching \"{}\"", pattern); + println!("📦 Including files matching \"{pattern}\""); for source in glob::glob(&pyproject_dir.join(pattern).to_string_lossy()) .expect("No files found for pattern") .filter_map(Result::ok) diff --git a/src/target.rs b/src/target.rs index 0d25278f0..e7641b8e0 100644 --- a/src/target.rs +++ b/src/target.rs @@ -282,9 +282,9 @@ impl Target { platform_tags.sort(); let mut tags = vec![]; for platform_tag in platform_tags { - tags.push(format!("{}_{}", platform_tag, arch)); + tags.push(format!("{platform_tag}_{arch}")); for alias in platform_tag.aliases() { - tags.push(format!("{}_{}", alias, arch)); + tags.push(format!("{alias}_{arch}")); } } tags.join(".") @@ -294,16 +294,12 @@ impl Target { let ((x86_64_major, x86_64_minor), (arm64_major, arm64_minor)) = macosx_deployment_target(env::var("MACOSX_DEPLOYMENT_TARGET").ok().as_deref(), universal2)?; if universal2 { format!( - "macosx_{x86_64_major}_{x86_64_minor}_x86_64.macosx_{arm64_major}_{arm64_minor}_arm64.macosx_{x86_64_major}_{x86_64_minor}_universal2", - x86_64_major = x86_64_major, - x86_64_minor = x86_64_minor, - arm64_major = arm64_major, - arm64_minor = arm64_minor + "macosx_{x86_64_major}_{x86_64_minor}_x86_64.macosx_{arm64_major}_{arm64_minor}_arm64.macosx_{x86_64_major}_{x86_64_minor}_universal2" ) } else if self.arch == Arch::Aarch64 { - format!("macosx_{}_{}_arm64", arm64_major, arm64_minor) + format!("macosx_{arm64_major}_{arm64_minor}_arm64") } else { - format!("macosx_{}_{}_x86_64", x86_64_major, x86_64_minor) + format!("macosx_{x86_64_major}_{x86_64_minor}_x86_64") } } // FreeBSD @@ -340,7 +336,7 @@ impl Target { Err(_) => emcc_version()?, }; let release = release.replace(['.', '-'], "_"); - format!("emscripten_{}_wasm32", release) + format!("emscripten_{release}_wasm32") } (Os::Wasi, Arch::Wasm32) => { "any".to_string() @@ -361,15 +357,12 @@ impl Target { // SunOS 5 == Solaris 2 os = "solaris".to_string(); release = format!("{}_{}", major_ver - 3, other); - machine = format!("{}_64bit", machine); + machine = format!("{machine}_64bit"); } } } format!( - "{}_{}_{}", - os, - release, - machine + "{os}_{release}_{machine}" ) } }; diff --git a/src/upload.rs b/src/upload.rs index 1466f33c1..80589acdc 100644 --- a/src/upload.rs +++ b/src/upload.rs @@ -382,7 +382,7 @@ pub fn upload(registry: &Registry, wheel_path: &Path) -> Result<(), UploadError> "User-Agent", &format!("{}/{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")), ) - .set("Authorization", &format!("Basic {}", encoded)) + .set("Authorization", &format!("Basic {encoded}")) .send(multipart_data); match response { @@ -391,8 +391,7 @@ pub fn upload(registry: &Registry, wheel_path: &Path) -> Result<(), UploadError> let err_text = response.into_string().unwrap_or_else(|e| { format!( "The registry should return some text, \ - even in case of an error, but didn't ({})", - e + even in case of an error, but didn't ({e})" ) }); debug!("Upload error response: {}", err_text); @@ -444,7 +443,7 @@ pub fn upload_ui(items: &[PathBuf], publish: &PublishOpt) -> Result<()> { .map(|m| m.as_str()); match title { Some(title) => { - println!("⛔ {}", title); + println!("⛔ {title}"); } None => println!("⛔ Username and/or password are wrong"), } @@ -462,7 +461,7 @@ pub fn upload_ui(items: &[PathBuf], publish: &PublishOpt) -> Result<()> { | Err(keyring::Error::NoStorageAccess(_)) | Err(keyring::Error::PlatformFailure(_)) => {} Err(err) => { - eprintln!("⚠️ Warning: Failed to remove password from keyring: {}", err) + eprintln!("⚠️ Warning: Failed to remove password from keyring: {err}") } } } @@ -474,8 +473,7 @@ pub fn upload_ui(items: &[PathBuf], publish: &PublishOpt) -> Result<()> { if let UploadError::FileExistsError(_) = err { if publish.skip_existing { println!( - "⚠️ Note: Skipping {:?} because it appears to already exist", - filename + "⚠️ Note: Skipping {filename:?} because it appears to already exist" ); continue; } @@ -483,8 +481,7 @@ pub fn upload_ui(items: &[PathBuf], publish: &PublishOpt) -> Result<()> { let filesize = fs::metadata(i) .map(|x| ByteSize(x.len()).to_string()) .unwrap_or_else(|e| format!("Failed to get the filesize of {:?}: {}", &i, e)); - return Err(err) - .context(format!("💥 Failed to upload {:?} ({})", filename, filesize)); + return Err(err).context(format!("💥 Failed to upload {filename:?} ({filesize})")); } } } @@ -502,10 +499,7 @@ pub fn upload_ui(items: &[PathBuf], publish: &PublishOpt) -> Result<()> { | Err(keyring::Error::NoStorageAccess(_)) | Err(keyring::Error::PlatformFailure(_)) => {} Err(err) => { - eprintln!( - "⚠️ Warning: Failed to store the password in the keyring: {:?}", - err - ); + eprintln!("⚠️ Warning: Failed to store the password in the keyring: {err:?}"); } } } diff --git a/tests/common/develop.rs b/tests/common/develop.rs index 7288088ee..735c5cbad 100644 --- a/tests/common/develop.rs +++ b/tests/common/develop.rs @@ -17,7 +17,7 @@ pub fn test_develop( let package = package.as_ref(); let (venv_dir, python) = if conda { - create_conda_env(&format!("maturin-{}", unique_name), 3, 10)? + create_conda_env(&format!("maturin-{unique_name}"), 3, 10)? } else { create_virtualenv(unique_name, None)? }; @@ -49,10 +49,7 @@ pub fn test_develop( CargoOptions { manifest_path: Some(manifest_file), quiet: true, - target_dir: Some(PathBuf::from(format!( - "test-crates/targets/{}", - unique_name - ))), + target_dir: Some(PathBuf::from(format!("test-crates/targets/{unique_name}"))), ..Default::default() }, &venv_dir, diff --git a/tests/common/integration.rs b/tests/common/integration.rs index ce82fcba5..17493d64b 100644 --- a/tests/common/integration.rs +++ b/tests/common/integration.rs @@ -29,8 +29,8 @@ pub fn test_integration( let package_string = package.as_ref().join("Cargo.toml").display().to_string(); // The first argument is ignored by clap - let shed = format!("test-crates/wheels/{}", unique_name); - let target_dir = format!("test-crates/targets/{}", unique_name); + let shed = format!("test-crates/wheels/{unique_name}"); + let target_dir = format!("test-crates/targets/{unique_name}"); let python_interp = test_python_path(); let mut cli = vec![ "build", @@ -103,7 +103,7 @@ pub fn test_integration( assert!(filename.to_string_lossy().ends_with(file_suffix)) } let mut venv_name = if supported_version == "py3" { - format!("{}-py3", unique_name) + format!("{unique_name}-py3") } else { format!( "{}-py{}.{}", @@ -111,7 +111,7 @@ pub fn test_integration( ) }; if let Some(target) = target { - venv_name = format!("{}-{}", venv_name, target); + venv_name = format!("{venv_name}-{target}"); } let (venv_dir, python) = create_virtualenv(&venv_name, Some(python_interpreter.executable.clone()))?; @@ -128,7 +128,7 @@ pub fn test_integration( .args(command) .arg(dunce::simplified(filename)) .output() - .context(format!("pip install failed with {:?}", python))?; + .context(format!("pip install failed with {python:?}"))?; if !output.status.success() { let full_command = format!("{} {}", python.display(), command.join(" ")); bail!( @@ -167,7 +167,7 @@ pub fn test_integration_conda(package: impl AsRef, bindings: Option Result<()> { let message = str::from_utf8(&output.stdout).unwrap().trim(); if message != "SUCCESS" { - panic!("Not SUCCESS: {}", message); + panic!("Not SUCCESS: {message}"); } Ok(()) @@ -83,7 +83,7 @@ pub fn handle_result(result: Result) -> T { match result { Err(e) => { for cause in e.chain().rev() { - eprintln!("Cause: {}", cause); + eprintln!("Cause: {cause}"); } panic!("{}", e); } @@ -107,7 +107,7 @@ pub fn create_virtualenv(name: &str, python_interp: Option) -> Result<( target.get_python() }); let venv_name = match get_python_implementation(&venv_interp) { - Ok(python_impl) => format!("{}-{}", name, python_impl), + Ok(python_impl) => format!("{name}-{python_impl}"), Err(_) => name.to_string(), }; let venv_dir = PathBuf::from("test-crates") @@ -164,7 +164,7 @@ pub fn create_conda_env(name: &str, major: usize, minor: usize) -> Result<(PathB .arg("create") .arg("-n") .arg(name) - .arg(format!("python={}.{}", major, minor)) + .arg(format!("python={major}.{minor}")) .arg("-q") .arg("-y") .arg("--json") diff --git a/tests/common/other.rs b/tests/common/other.rs index d3d23cd56..095ea4ec2 100644 --- a/tests/common/other.rs +++ b/tests/common/other.rs @@ -181,10 +181,7 @@ fn build_wheel_files(package: impl AsRef, unique_name: &str) -> Result