Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable our standard lints #28

Merged
merged 6 commits into from
Nov 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 81 additions & 0 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# add the below section to `.cargo/config.toml`

[target.'cfg(all())']
rustflags = [
# BEGIN - Embark standard lints v6 for Rust 1.55+
# do not change or add/remove here, but one can add exceptions after this section
# for more info see: <https://github.com/EmbarkStudios/rust-ecosystem/issues/59>
"-Dunsafe_code",
"-Wclippy::all",
"-Wclippy::await_holding_lock",
"-Wclippy::char_lit_as_u8",
"-Wclippy::checked_conversions",
"-Wclippy::dbg_macro",
"-Wclippy::debug_assert_with_mut_call",
"-Wclippy::doc_markdown",
"-Wclippy::empty_enum",
"-Wclippy::enum_glob_use",
"-Wclippy::exit",
"-Wclippy::expl_impl_clone_on_copy",
"-Wclippy::explicit_deref_methods",
"-Wclippy::explicit_into_iter_loop",
"-Wclippy::fallible_impl_from",
"-Wclippy::filter_map_next",
"-Wclippy::flat_map_option",
"-Wclippy::float_cmp_const",
"-Wclippy::fn_params_excessive_bools",
"-Wclippy::from_iter_instead_of_collect",
"-Wclippy::if_let_mutex",
"-Wclippy::implicit_clone",
"-Wclippy::imprecise_flops",
"-Wclippy::inefficient_to_string",
"-Wclippy::invalid_upcast_comparisons",
"-Wclippy::large_digit_groups",
"-Wclippy::large_stack_arrays",
"-Wclippy::large_types_passed_by_value",
"-Wclippy::let_unit_value",
"-Wclippy::linkedlist",
"-Wclippy::lossy_float_literal",
"-Wclippy::macro_use_imports",
"-Wclippy::manual_ok_or",
"-Wclippy::map_err_ignore",
"-Wclippy::map_flatten",
"-Wclippy::map_unwrap_or",
"-Wclippy::match_on_vec_items",
"-Wclippy::match_same_arms",
"-Wclippy::match_wild_err_arm",
"-Wclippy::match_wildcard_for_single_variants",
"-Wclippy::mem_forget",
"-Wclippy::mismatched_target_os",
"-Wclippy::missing_enforced_import_renames",
"-Wclippy::mut_mut",
"-Wclippy::mutex_integer",
"-Wclippy::needless_borrow",
"-Wclippy::needless_continue",
"-Wclippy::needless_for_each",
"-Wclippy::option_option",
"-Wclippy::path_buf_push_overwrite",
"-Wclippy::ptr_as_ptr",
"-Wclippy::rc_mutex",
"-Wclippy::ref_option_ref",
"-Wclippy::rest_pat_in_fully_bound_structs",
"-Wclippy::same_functions_in_if_condition",
"-Wclippy::semicolon_if_nothing_returned",
"-Wclippy::single_match_else",
"-Wclippy::string_add_assign",
"-Wclippy::string_add",
"-Wclippy::string_lit_as_bytes",
"-Wclippy::string_to_string",
"-Wclippy::todo",
"-Wclippy::trait_duplication_in_bounds",
"-Wclippy::unimplemented",
"-Wclippy::unnested_or_patterns",
"-Wclippy::unused_self",
"-Wclippy::useless_transmute",
"-Wclippy::verbose_file_reads",
"-Wclippy::zero_sized_map_values",
"-Wfuture_incompatible",
"-Wnonstandard_style",
"-Wrust_2018_idioms",
# END - Embark standard lints v6 for Rust 1.55+
]
8 changes: 4 additions & 4 deletions crates/cervo-asset/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use std::io::{Cursor, Read, Write};
/// Magic used to ensure assets are valid.
pub const MAGIC: [u8; 4] = [b'C', b'R', b'V', b'O'];

/// AssetKind denotes what kind of policy is contained inside an [`AssetData`].
/// `AssetKind` denotes what kind of policy is contained inside an [`AssetData`].
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum AssetKind {
Expand Down Expand Up @@ -66,7 +66,7 @@ pub struct AssetData {
}

impl AssetData {
/// Create a new AssetData from parts.
/// Create a new `AssetData` from parts.
///
/// Note: Does not validate the data.
pub fn new<Data: Into<Vec<u8>>>(kind: AssetKind, data: Data) -> Self {
Expand All @@ -76,7 +76,7 @@ impl AssetData {
}
}

/// Create a new AssetData from a reader and a kind.
/// Create a new `AssetData` from a reader and a kind.
///
/// Note: Does not validate the data.
pub fn from_reader<Reader: Read>(kind: AssetKind, mut reader: Reader) -> Result<Self> {
Expand Down Expand Up @@ -146,7 +146,7 @@ impl AssetData {

/// Load a simple unbatching inferer from this asset.
///
/// See ['BasicInferer'] for more details.
/// See ['`BasicInferer`'] for more details.
pub fn load_basic(&self) -> Result<BasicInferer> {
let mut cursor = Cursor::new(&self.data);
match self.kind {
Expand Down
5 changes: 4 additions & 1 deletion crates/cervo-asset/tests/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@ pub fn get_file(name: &'static str) -> std::io::Result<File> {
std::fs::File::open(path)
}

pub fn build_inputs_from_desc(count: u64, inputs: &[(String, Vec<usize>)]) -> HashMap<u64, State> {
pub fn build_inputs_from_desc(
count: u64,
inputs: &[(String, Vec<usize>)],
) -> HashMap<u64, State<'_>> {
(0..count)
.map(|idx| {
(
Expand Down
9 changes: 6 additions & 3 deletions crates/cervo-core/src/batcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,12 @@ impl Batcher {
}
}

Ok(HashMap::from_iter(
self.scratch.ids.drain(..).zip(outputs.into_iter()),
))
Ok(self
.scratch
.ids
.drain(..)
.zip(outputs.into_iter())
.collect::<_>())
}

/// Check if there is any data to run on here.
Expand Down
4 changes: 2 additions & 2 deletions crates/cervo-core/src/batcher/scratch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,15 @@ impl ScratchPad {
.iter()
.map(|(name, shape)| {
let count = shape.iter().product();
ScratchPadData::new(name.to_owned(), count, capacity)
ScratchPadData::new(name.clone(), count, capacity)
})
.collect();

let outputs = outputs
.iter()
.map(|(name, shape)| {
let count = shape.iter().product();
ScratchPadData::new(name.to_owned(), count, capacity)
ScratchPadData::new(name.clone(), count, capacity)
})
.collect();

Expand Down
2 changes: 1 addition & 1 deletion crates/cervo-core/src/batcher/wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::collections::HashMap;
/// data-insertion and execution, which generally improves
/// performance.
///
/// Can be easily constructed using [InfererExt::into_batched](crate::prelude::InfererExt::into_batched).
/// Can be easily constructed using [`InfererExt::into_batched`](crate::prelude::InfererExt::into_batched).
pub struct Batched<Inf: Inferer> {
inner: Inf,
batcher: Batcher,
Expand Down
4 changes: 2 additions & 2 deletions crates/cervo-core/src/epsilon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use perchance::PerchanceContext;
use rand::thread_rng;
use rand_distr::{Distribution, StandardNormal};

/// NoiseGenerators are consumed by the [`EpsilonInjector`] by generating noise sampled for a standard normal
/// `NoiseGenerators` are consumed by the [`EpsilonInjector`] by generating noise sampled for a standard normal
/// distribution. Custom noise-generators can be implemented and passed via [`EpsilonInjector::with_generator`].
pub trait NoiseGenerator {
fn generate(&self, count: usize, out: &mut [f32]);
Expand Down Expand Up @@ -89,7 +89,7 @@ impl NoiseGenerator for LowQualityNoiseGenerator {
/// use-cases.
///
/// This implementation uses [`rand::thread_rng`] internally as the entropy source, and uses the optimized
/// StandardNormal distribution for sampling.
/// `StandardNormal` distribution for sampling.
pub struct HighQualityNoiseGenerator {
normal_distribution: StandardNormal,
}
Expand Down
8 changes: 4 additions & 4 deletions crates/cervo-core/src/inferer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,10 +195,10 @@ pub trait InfererExt: Inferer + Sized {
}

/// Execute the model on the provided pre-batched data.
fn infer_single<'this>(&'this self, input: State<'_>) -> Result<Response<'this>, anyhow::Error>
where
Self: Sized,
{
fn infer_single<'this>(
&'this self,
input: State<'_>,
) -> Result<Response<'this>, anyhow::Error> {
let mut batcher = Batcher::new_sized(self, 1);
batcher.push(0, input)?;

Expand Down
2 changes: 1 addition & 1 deletion crates/cervo-core/src/model_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
use tract_core::{model::TypedModel, tract_data::TractResult};
use tract_hir::{infer::Factoid, prelude::InferenceModel};

/// The ModelApi describes the inputs and outputs for a model.
/// The `ModelApi` describes the inputs and outputs for a model.
pub struct ModelApi {
/// The named model inputs.
pub inputs: Vec<(String, Vec<usize>)>,
Expand Down
8 changes: 4 additions & 4 deletions crates/cervo-core/tests/batcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use cervo_core::prelude::{Batcher, Inferer, InfererExt, State};

struct TestInferer<
B: Fn(usize) -> usize,
R: Fn(cervo_core::batcher::ScratchPadView) -> anyhow::Result<(), anyhow::Error>,
R: Fn(cervo_core::batcher::ScratchPadView<'_>) -> anyhow::Result<(), anyhow::Error>,
> {
batch_size: B,
raw: R,
Expand All @@ -19,15 +19,15 @@ struct TestInferer<
impl<B, R> Inferer for TestInferer<B, R>
where
B: Fn(usize) -> usize,
R: Fn(cervo_core::batcher::ScratchPadView) -> anyhow::Result<(), anyhow::Error>,
R: Fn(cervo_core::batcher::ScratchPadView<'_>) -> anyhow::Result<(), anyhow::Error>,
{
fn select_batch_size(&self, max_count: usize) -> usize {
(self.batch_size)(max_count)
}

fn infer_raw(
&self,
batch: cervo_core::batcher::ScratchPadView,
batch: cervo_core::batcher::ScratchPadView<'_>,
) -> anyhow::Result<(), anyhow::Error> {
(self.raw)(batch)
}
Expand Down Expand Up @@ -278,7 +278,7 @@ fn test_values() {

let r = batcher.execute(&inf).unwrap();
assert_eq!(r.len(), 4);
dbg!(&r);
let _ = &r;
for (id, vals) in r {
assert_eq!(vals.data["out"].len(), 11);
assert_eq!(vals.data["out"][0], id as f32);
Expand Down
2 changes: 1 addition & 1 deletion crates/cervo-nnef/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ lazy_static::lazy_static! {
/// ahead of time.
pub fn init() {
use lazy_static::LazyStatic;
NNEF::initialize(&NNEF)
NNEF::initialize(&NNEF);
}

/// Utility function to check if a file name is `.nnef.tar`.
Expand Down
5 changes: 4 additions & 1 deletion crates/cervo-nnef/tests/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@ pub fn get_file(name: &'static str) -> std::io::Result<File> {
std::fs::File::open(path)
}

pub fn build_inputs_from_desc(count: u64, inputs: &[(String, Vec<usize>)]) -> HashMap<u64, State> {
pub fn build_inputs_from_desc(
count: u64,
inputs: &[(String, Vec<usize>)],
) -> HashMap<u64, State<'_>> {
(0..count)
.map(|idx| {
(
Expand Down
4 changes: 1 addition & 3 deletions crates/cervo-onnx/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,7 @@ pub fn builder<T: Read>(read: T) -> InfererBuilder<OnnxData<T>> {
pub fn to_nnef(reader: &mut dyn Read, batch_size: Option<usize>) -> Result<Vec<u8>> {
let mut model = model_for_reader(reader)?;

let batch = batch_size
.map(|v| v.to_dim())
.unwrap_or_else(|| Symbol::from('N').to_dim());
let batch = batch_size.map_or_else(|| Symbol::from('N').to_dim(), |v| v.to_dim());

let input_outlets = model.input_outlets()?.to_vec();

Expand Down
5 changes: 4 additions & 1 deletion crates/cervo-onnx/tests/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@ pub fn get_file(name: &'static str) -> std::io::Result<File> {
std::fs::File::open(path)
}

pub fn build_inputs_from_desc(count: u64, inputs: &[(String, Vec<usize>)]) -> HashMap<u64, State> {
pub fn build_inputs_from_desc(
count: u64,
inputs: &[(String, Vec<usize>)],
) -> HashMap<u64, State<'_>> {
(0..count)
.map(|idx| {
(
Expand Down
6 changes: 3 additions & 3 deletions crates/cervo-runtime/src/runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ impl Runtime {
match res {
Some(res) => {
result.insert(ticket.1, res);
executed.push(ticket.1)
executed.push(ticket.1);
}
None => {
non_executed.push(ticket);
Expand Down Expand Up @@ -420,7 +420,7 @@ mod tests {
let err = res.unwrap_err();

if let CervoError::OrphanedData(keys) = err {
assert_eq!(keys, vec![k])
assert_eq!(keys, vec![k]);
} else {
panic!("expected CervoError::OrphanedData")
}
Expand All @@ -439,7 +439,7 @@ mod tests {
let err = res.unwrap_err();

if let CervoError::OrphanedData(keys) = err {
assert_eq!(keys, vec![k])
assert_eq!(keys, vec![k]);
} else {
panic!("expected CervoError::OrphanedData")
}
Expand Down
22 changes: 10 additions & 12 deletions crates/cervo-runtime/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,12 +96,11 @@ impl ModelState {

let elapsed = start.elapsed();
let mut timings = self.timings.borrow_mut();
match timings.iter_mut().find(|b| b.size == 1) {
Some(bucket) => bucket.add(elapsed),
None => {
timings.push(TimingBucket::new(1, elapsed));
timings.sort_by_key(|b| b.size);
}
if let Some(bucket) = timings.iter_mut().find(|b| b.size == 1) {
bucket.add(elapsed);
} else {
timings.push(TimingBucket::new(1, elapsed));
timings.sort_by_key(|b| b.size);
}

Ok(res)
Expand All @@ -123,12 +122,11 @@ impl ModelState {

let elapsed = start.elapsed();
let mut timings = self.timings.borrow_mut();
match timings.iter_mut().find(|b| b.size == batch_size) {
Some(bucket) => bucket.add(elapsed),
None => {
timings.push(TimingBucket::new(batch_size, elapsed));
timings.sort_by_key(|b| b.size);
}
if let Some(bucket) = timings.iter_mut().find(|b| b.size == batch_size) {
bucket.add(elapsed);
} else {
timings.push(TimingBucket::new(batch_size, elapsed));
timings.sort_by_key(|b| b.size);
}

Ok(res)
Expand Down
8 changes: 4 additions & 4 deletions crates/cervo-runtime/src/timing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ mod tests {
#[test]
fn initial_mean_initial_value() {
let state = WelfordState::new(Duration::from_secs_f32(1.0));
assert!(is_close(state.mean().as_secs_f32(), 1.0))
assert!(is_close(state.mean().as_secs_f32(), 1.0));
}

#[test]
Expand All @@ -89,7 +89,7 @@ mod tests {
state.update(Duration::from_secs_f32(1.0));
}

assert_eq!(state.mean().as_secs_f32(), 1.0)
assert_eq!(state.mean().as_secs_f32(), 1.0);
}

#[test]
Expand All @@ -100,7 +100,7 @@ mod tests {
state.update(Duration::from_secs_f32(v as f32));
}

assert_eq!(state.mean().as_secs_f32(), 4.5)
assert_eq!(state.mean().as_secs_f32(), 4.5);
}

#[test]
Expand All @@ -111,6 +111,6 @@ mod tests {
state.update(Duration::from_secs_f32(v as f32));
}

assert_eq!(state.mean().as_secs_f32(), 49.5)
assert_eq!(state.mean().as_secs_f32(), 49.5);
}
}
Loading