Skip to content

Commit

Permalink
Clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
magbak committed Dec 19, 2024
1 parent 76de500 commit f568057
Show file tree
Hide file tree
Showing 53 changed files with 258 additions and 339 deletions.
21 changes: 9 additions & 12 deletions lib/bigquery-polars/src/querying.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,25 +71,25 @@ impl BigQueryExecutor {
.query_response()
.job_reference
.as_ref()
.ok_or_else(|| return BigQueryExecutorError::JobReferenceMissingError)?;
.ok_or_else(|| BigQueryExecutorError::JobReferenceMissingError)?;

let job_id = job_info
.job_id
.as_ref()
.ok_or_else(|| return BigQueryExecutorError::JobIdNoneError)?
.ok_or_else(|| BigQueryExecutorError::JobIdNoneError)?
.clone();
let location = &job_info.location;

let mut rs = loop {
let rs = self
.get_query_results(&job, &job_id, location.clone(), None)
.get_query_results(job, &job_id, location.clone(), None)
.await?;

if let Some(complete) = &rs.job_complete {
if *complete {
break rs;
}
} else if let Some(_) = &rs.schema {
} else if rs.schema.is_some() {
break rs;
}
sleep(Duration::from_millis(500)).await;
Expand Down Expand Up @@ -130,7 +130,7 @@ impl BigQueryExecutor {
));
}
}
return any_values;
any_values
})
.collect();
rows_processed += rows.len();
Expand All @@ -152,7 +152,7 @@ impl BigQueryExecutor {
}
let page_token = rs.page_token.clone();
rs = self
.get_query_results(&job, &job_id, location.clone(), page_token)
.get_query_results(job, &job_id, location.clone(), page_token)
.await?;
}
if !all_lfs.is_empty() {
Expand Down Expand Up @@ -181,10 +181,10 @@ impl BigQueryExecutor {
start_index: None,
timeout_ms: None,
};
Ok(job
job
.get_query_results(self.project_id.as_str(), job_id, params.clone())
.await
.map_err(map_bqerr)?)
.map_err(map_bqerr)
}
}

Expand Down Expand Up @@ -215,10 +215,7 @@ fn table_cell_to_any<'a>(
AnyValue::Boolean(value_as_ref.as_str().unwrap().parse::<bool>().unwrap())
}
FieldType::Timestamp => {
let some_utc = match some_utc {
None => {None}
Some(tz) => {Some(Arc::new(PlSmallStr::from_str(tz)))}
};
let some_utc = some_utc.as_ref().map(|tz| Arc::new(PlSmallStr::from_str(tz)));
let ts_str = value_as_ref.as_str().unwrap();
let timestamp_ns = (ts_str.parse::<f64>().unwrap() * (1e9f64)) as i64;
AnyValue::DatetimeOwned(timestamp_ns, TimeUnit::Nanoseconds, some_utc)
Expand Down
2 changes: 1 addition & 1 deletion lib/chrontext/src/combiner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ impl Combiner {
let mut new_solution_mappings =
self.execute_static_query(&static_query, None).await?;
let new_virtualized_queries =
self.prepper.prepare(&query, &mut new_solution_mappings);
self.prepper.prepare(query, &mut new_solution_mappings);
// Combination assumes there is something to combine!
// If there are no time series queries, we are done.
if new_virtualized_queries.is_empty() {
Expand Down
18 changes: 9 additions & 9 deletions lib/chrontext/src/combiner/lazy_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -322,9 +322,9 @@ impl Combiner {
let expr = right.get(i).unwrap();
let expr_context = right_contexts.get(i).unwrap();
let expr_prepared_virtualized_queries =
split_virtualized_queries(&mut prepared_virtualized_queries, &expr_context);
split_virtualized_queries(&mut prepared_virtualized_queries, expr_context);
let expr_static_query_map =
split_static_queries_opt(&mut static_query_map, &expr_context);
split_static_queries_opt(&mut static_query_map, expr_context);
output_solution_mappings = self
.lazy_expression(
expr,
Expand All @@ -339,7 +339,7 @@ impl Combiner {
output_solution_mappings,
&left_context,
&right_contexts,
&context,
context,
)?
}
Expression::Add(left, right) => {
Expand Down Expand Up @@ -546,7 +546,7 @@ impl Combiner {
.alias(exists_context.as_str()),
);

let new_inner = rewrite_exists_graph_pattern(inner, &exists_context.as_str());
let new_inner = rewrite_exists_graph_pattern(inner, exists_context.as_str());
output_solution_mappings.rdf_node_types.insert(
exists_context.as_str().to_string(),
RDFNodeType::Literal(xsd::BOOLEAN.into_owned()),
Expand Down Expand Up @@ -620,7 +620,7 @@ impl Combiner {
&left_context,
&middle_context,
&right_context,
&context,
context,
)?
}
Expression::Coalesce(inner) => {
Expand All @@ -632,10 +632,10 @@ impl Combiner {
let inner_context = inner_contexts.get(i).unwrap();
let inner_prepared_virtualized_queries = split_virtualized_queries(
&mut prepared_virtualized_queries,
&inner_context,
inner_context,
);
let inner_static_query_map =
split_static_queries_opt(&mut static_query_map, &inner_context);
split_static_queries_opt(&mut static_query_map, inner_context);
output_solution_mappings = self
.lazy_expression(
inner.get(i).unwrap(),
Expand All @@ -647,7 +647,7 @@ impl Combiner {
.await?;
}

coalesce_expression(output_solution_mappings, inner_contexts, &context)?
coalesce_expression(output_solution_mappings, inner_contexts, context)?
}
Expression::FunctionCall(func, args) => {
let mut args_contexts: HashMap<usize, Context> = HashMap::new();
Expand All @@ -674,7 +674,7 @@ impl Combiner {
func,
args,
args_contexts,
&context,
context,
)?
}
};
Expand Down
6 changes: 3 additions & 3 deletions lib/chrontext/src/combiner/lazy_graph_patterns.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ impl Combiner {

//We have to eagerly evaluate static queries contained in the group by pattern since otherwise we are unable to push down the group by into the time series database.
let mut found_group_by_pushdown = false;
let mut static_query_opt = static_query_map.remove(&context);
let mut static_query_opt = static_query_map.remove(context);
if static_query_opt.is_none() {
debug!("No static query found");
let groupby_inner_context = context.extension_with(PathEntry::GroupInner);
Expand All @@ -65,7 +65,7 @@ impl Combiner {
graph_pattern,
false,
&mut new_solution_mappings,
&context,
context,
);
debug!(
"Finshed preparing time series queries, {} were created",
Expand Down Expand Up @@ -151,7 +151,7 @@ impl Combiner {
updated_solution_mappings,
static_query_map,
new_prepared_virtualized_queries,
&context,
context,
)
.await
}
Expand Down
2 changes: 1 addition & 1 deletion lib/chrontext/src/combiner/lazy_graph_patterns/union.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ impl Combiner {
});
let left_solution_mappings = self
.lazy_graph_pattern(
&left,
left,
solution_mappings.clone(),
left_static_query_map,
left_prepared_virtualized_queries,
Expand Down
16 changes: 4 additions & 12 deletions lib/chrontext/src/combiner/static_subqueries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ impl Combiner {
.sparql_database
.execute(&use_query)
.await
.map_err(|x| CombinerError::StaticQueryExecutionError(x))?;
.map_err(CombinerError::StaticQueryExecutionError)?;
complete_basic_virtualized_queries(
&solutions,
&mut self.prepper.basic_virtualized_queries,
Expand Down Expand Up @@ -77,11 +77,7 @@ pub(crate) fn split_static_queries_opt(
static_queries: &mut Option<HashMap<Context, Query>>,
context: &Context,
) -> Option<HashMap<Context, Query>> {
if let Some(static_queries) = static_queries {
Some(split_static_queries(static_queries, context))
} else {
None
}
static_queries.as_mut().map(|static_queries| split_static_queries(static_queries, context))
}

fn constrain_query(
Expand Down Expand Up @@ -122,8 +118,7 @@ fn constrain_query(
.map(|x| {
x.into_iter()
.map(|y: Option<Term>| {
if let Some(y) = y {
Some(match y {
y.map(|y| match y {
Term::NamedNode(nn) => GroundTerm::NamedNode(nn),
Term::BlankNode(_) => {
panic!()
Expand All @@ -133,9 +128,6 @@ fn constrain_query(
todo!()
}
})
} else {
None
}
})
.collect()
})
Expand Down Expand Up @@ -192,7 +184,7 @@ fn constrain_pattern_with_values(
fn get_variable_set(query: &Query) -> Vec<&Variable> {
if let Query::Select { pattern, .. } = query {
if let GraphPattern::Project { variables, .. } = pattern {
return variables.iter().collect();
variables.iter().collect()
} else {
panic!("Non project graph pattern in query")
}
Expand Down
8 changes: 4 additions & 4 deletions lib/chrontext/src/combiner/virtualized_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,15 +79,15 @@ impl Combiner {
.virtualized_database
.query(&vq)
.await
.map_err(|x| CombinerError::VirtualizedDatabaseError(x))?;
.map_err(CombinerError::VirtualizedDatabaseError)?;

// We allow empty (no columns & rows) result for compatibility with e.g. Azure Kusto.
if mappings.height() == 0 && mappings.get_columns().is_empty() {
return Ok(self.attach_expected_empty_results(&vq, solution_mappings));
}

vq.validate(&mappings)
.map_err(|x| CombinerError::TimeseriesValidationError(x))?;
.map_err(CombinerError::TimeseriesValidationError)?;
let mut mappings = mappings.lazy();
let drop_cols = get_drop_cols(&vq);
let mut groupby_cols: Vec<_> = vq
Expand Down Expand Up @@ -160,11 +160,11 @@ impl Combiner {
on_cols.as_slice(),
JoinArgs::new(JoinType::Inner),
)
.drop(drop_cols.iter().map(|x|col(x)));
.drop(drop_cols.iter().map(col));
for c in &drop_cols {
solution_mappings.rdf_node_types.remove(c);
}
return Ok(solution_mappings);
Ok(solution_mappings)
}
}

Expand Down
10 changes: 8 additions & 2 deletions lib/chrontext/src/constraints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ pub struct VariableConstraints {
variable_constraints: Vec<(VariableInContext, Constraint)>,
}

impl Default for VariableConstraints {
fn default() -> Self {
Self::new()
}
}

impl VariableConstraints {
pub fn get_constraint(&self, variable: &Variable, context: &Context) -> Option<&Constraint> {
let mut constraint = None;
Expand Down Expand Up @@ -40,8 +46,8 @@ impl VariableConstraints {
}

pub fn new() -> VariableConstraints {
return VariableConstraints {
VariableConstraints {
variable_constraints: vec![],
};
}
}
}
6 changes: 3 additions & 3 deletions lib/chrontext/src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ impl Engine {
) -> Engine {
Engine {
pushdown_settings,
virtualized_database: virtualized_database,
sparql_database: sparql_database,
virtualized_database,
sparql_database,
virtualization,
}
}
Expand Down Expand Up @@ -115,7 +115,7 @@ impl Engine {
let mut solution_mappings = combiner
.combine_static_and_time_series_results(static_queries_map, &preprocessed_query)
.await
.map_err(|x| ChrontextError::CombinerError(x))?;
.map_err(ChrontextError::CombinerError)?;
for (original, renamed) in rename_map {
if let Some(dt) = solution_mappings.rdf_node_types.remove(&renamed) {
solution_mappings.mappings = solution_mappings
Expand Down
8 changes: 4 additions & 4 deletions lib/chrontext/src/preparing/expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,12 @@ impl TimeseriesQueryPrepper {
) -> EXPrepReturn {
match expression {
Expression::NamedNode(..) => {
let exr = EXPrepReturn::new(HashMap::new());
exr

EXPrepReturn::new(HashMap::new())
}
Expression::Literal(..) => {
let exr = EXPrepReturn::new(HashMap::new());
exr

EXPrepReturn::new(HashMap::new())
}
Expression::Variable(..) => EXPrepReturn::new(HashMap::new()),
Expression::Or(left, right) => self.prepare_or_expression(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl TimeseriesQueryPrepper {
solution_mappings: &mut SolutionMappings,
context: &Context,
) -> EXPrepReturn {
let (left_path_entry, right_path_entry) = match { operation } {
let (left_path_entry, right_path_entry) = match operation {
BinaryOrdinaryOperator::Add => (PathEntry::AddLeft, PathEntry::AddRight),
BinaryOrdinaryOperator::Subtract => (PathEntry::SubtractLeft, PathEntry::SubtractRight),
BinaryOrdinaryOperator::Multiply => (PathEntry::MultiplyLeft, PathEntry::MultiplyRight),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ impl TimeseriesQueryPrepper {
if args_prepared.iter().any(|x| x.fail_groupby_complex_query) {
return EXPrepReturn::fail_groupby_complex_query();
}
if args_prepared.len() > 0 {
if !args_prepared.is_empty() {
let mut first_prepared = args_prepared.remove(0);
for p in args_prepared {
first_prepared.with_virtualized_queries_from(p)
Expand Down
6 changes: 3 additions & 3 deletions lib/chrontext/src/preparing/expressions/not_expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@ impl TimeseriesQueryPrepper {
solution_mappings: &mut SolutionMappings,
context: &Context,
) -> EXPrepReturn {
let wrapped_prepare = self.prepare_expression(

self.prepare_expression(
wrapped,
try_groupby_complex_query,
solution_mappings,
&context.extension_with(PathEntry::Not),
);
wrapped_prepare
)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,12 @@ impl TimeseriesQueryPrepper {
UnaryOrdinaryOperator::UnaryPlus => PathEntry::UnaryPlus,
UnaryOrdinaryOperator::UnaryMinus => PathEntry::UnaryMinus,
};
let wrapped_prepare = self.prepare_expression(

self.prepare_expression(
wrapped,
try_groupby_complex_query,
solution_mappings,
&context.extension_with(path_entry),
);
wrapped_prepare
)
}
}
4 changes: 2 additions & 2 deletions lib/chrontext/src/preparing/graph_patterns.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,8 @@ impl TimeseriesQueryPrepper {
start,
length,
} => self.prepare_slice(
start.clone(),
length.clone(),
*start,
*length,
inner,
try_groupby_complex_query,
solution_mappings,
Expand Down
Loading

0 comments on commit f568057

Please sign in to comment.