Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update rust vesion to 1.57 #1395

Merged
merged 5 commits into from
Dec 4, 2021
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ballista-examples/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ license = "Apache-2.0"
keywords = [ "arrow", "distributed", "query", "sql" ]
edition = "2021"
publish = false
rust-version = "1.56"
rust-version = "1.57"

[dependencies]
datafusion = { path = "../datafusion" }
Expand Down
2 changes: 1 addition & 1 deletion ballista/rust/client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ homepage = "https://github.com/apache/arrow-datafusion"
repository = "https://github.com/apache/arrow-datafusion"
authors = ["Apache Arrow <[email protected]>"]
edition = "2021"
rust-version = "1.56"
rust-version = "1.57"

[dependencies]
ballista-core = { path = "../core", version = "0.6.0" }
Expand Down
2 changes: 1 addition & 1 deletion ballista/rust/scheduler/src/state/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,7 @@ fn find_unresolved_shuffles(
Ok(plan
.children()
.iter()
.map(|child| find_unresolved_shuffles(child))
.map(find_unresolved_shuffles)
.collect::<Result<Vec<_>>>()?
.into_iter()
.flatten()
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ homepage = "https://github.com/apache/arrow-datafusion"
repository = "https://github.com/apache/arrow-datafusion"
license = "Apache-2.0"
publish = false
rust-version = "1.56"
rust-version = "1.57"

[features]
simd = ["datafusion/simd"]
Expand Down
2 changes: 1 addition & 1 deletion datafusion-cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ keywords = [ "arrow", "datafusion", "ballista", "query", "sql" ]
license = "Apache-2.0"
homepage = "https://github.com/apache/arrow-datafusion"
repository = "https://github.com/apache/arrow-datafusion"
rust-version = "1.56"
rust-version = "1.57"

[dependencies]
clap = "2.33"
Expand Down
2 changes: 1 addition & 1 deletion datafusion-cli/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.

FROM rust:1.56 as builder
FROM rust:1.57 as builder

COPY ./datafusion /usr/src/datafusion

Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ license = "Apache-2.0"
keywords = [ "arrow", "query", "sql" ]
edition = "2021"
publish = false
rust-version = "1.56"
rust-version = "1.57"

[[example]]
name = "avro_sql"
Expand Down
2 changes: 1 addition & 1 deletion datafusion/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ include = [
"Cargo.toml",
]
edition = "2021"
rust-version = "1.56"
rust-version = "1.57"

[lib]
name = "datafusion"
Expand Down
6 changes: 5 additions & 1 deletion datafusion/src/physical_plan/datetime_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ use arrow::{
};
use chrono::prelude::*;
use chrono::Duration;
use std::borrow::Borrow;

/// given a function `op` that maps a `&str` to a Result of an arrow native type,
/// returns a `PrimitiveArray` after the application
Expand Down Expand Up @@ -77,7 +78,10 @@ where
})?;

// first map is the iterator, second is for the `Option<_>`
array.iter().map(|x| x.map(|x| op(x)).transpose()).collect()
array
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

.iter()
.map(|x| x.map(op.borrow()).transpose())
.collect()
}

// given an function that maps a `&str` to a arrow native type,
Expand Down
3 changes: 1 addition & 2 deletions datafusion/src/physical_plan/expressions/average.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ use super::{format_state_name, sum};
#[derive(Debug)]
pub struct Avg {
name: String,
#[allow(dead_code)]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will look into removing this field as well

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This PR might be the reason causes the different behavior on dead_code.
rust-lang/rust#85200

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think you are right

data_type: DataType,
nullable: bool,
expr: Arc<dyn PhysicalExpr>,
}

Expand Down Expand Up @@ -73,7 +73,6 @@ impl Avg {
name: name.into(),
expr,
data_type,
nullable: true,
}
}
}
Expand Down
6 changes: 3 additions & 3 deletions datafusion/src/physical_plan/sort_preserving_merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ struct SortPreservingMergeStream {
receivers: Vec<mpsc::Receiver<ArrowResult<RecordBatch>>>,

/// Drop helper for tasks feeding the [`receivers`](Self::receivers)
drop_helper: AbortOnDropMany<()>,
_drop_helper: AbortOnDropMany<()>,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is used (its drop() impl is used) so change its name


/// For each input stream maintain a dequeue of SortKeyCursor
///
Expand Down Expand Up @@ -379,7 +379,7 @@ struct SortPreservingMergeStream {
impl SortPreservingMergeStream {
fn new(
receivers: Vec<mpsc::Receiver<ArrowResult<RecordBatch>>>,
drop_helper: AbortOnDropMany<()>,
_drop_helper: AbortOnDropMany<()>,
schema: SchemaRef,
expressions: &[PhysicalSortExpr],
target_batch_size: usize,
Expand All @@ -394,7 +394,7 @@ impl SortPreservingMergeStream {
schema,
cursors,
receivers,
drop_helper,
_drop_helper,
column_expressions: expressions.iter().map(|x| x.expr.clone()).collect(),
sort_options: expressions.iter().map(|x| x.options).collect(),
target_batch_size,
Expand Down
5 changes: 1 addition & 4 deletions datafusion/src/physical_plan/string_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,10 +117,7 @@ where
let string_array = downcast_string_arg!(args[0], "string", T);

// first map is the iterator, second is for the `Option<_>`
Ok(string_array
.iter()
.map(|string| string.map(|s| op(s)))
.collect())
Ok(string_array.iter().map(|string| string.map(&op)).collect())
}

fn handle<'a, F, R>(args: &'a [ColumnarValue], op: F, name: &str) -> Result<ColumnarValue>
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ pub enum ScalarValue {
/// large binary
LargeBinary(Option<Vec<u8>>),
/// list of nested ScalarValue (boxed to reduce size_of(ScalarValue))
#[allow(clippy::box_vec)]
#[allow(clippy::box_collection)]
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh clippy, how wrong you are :)

the benefit is that the overall enum is smaller (as in the size of Vec<ScalarValue> is like 3 pointers (24 bytes) but a Box<Vec<ScalarValue>> is 8 bytes

List(Option<Box<Vec<ScalarValue>>>, Box<DataType>),
/// Date stored as a signed 32bit int
Date32(Option<i32>),
Expand All @@ -87,7 +87,7 @@ pub enum ScalarValue {
/// Interval with DayTime unit
IntervalDayTime(Option<i64>),
/// struct of nested ScalarValue (boxed to reduce size_of(ScalarValue))
#[allow(clippy::box_vec)]
#[allow(clippy::box_collection)]
Struct(Option<Box<Vec<ScalarValue>>>, Box<Vec<Field>>),
}

Expand Down
12 changes: 8 additions & 4 deletions datafusion/src/sql/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ pub struct CreateExternalTable {
#[derive(Debug, Clone, PartialEq)]
pub enum Statement {
/// ANSI SQL AST node
Statement(SQLStatement),
Statement(Box<SQLStatement>),
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

/// Extension: `CREATE EXTERNAL TABLE`
CreateExternalTable(CreateExternalTable),
}
Expand Down Expand Up @@ -167,13 +167,17 @@ impl<'a> DFParser<'a> {
}
_ => {
// use the native parser
Ok(Statement::Statement(self.parser.parse_statement()?))
Ok(Statement::Statement(Box::from(
self.parser.parse_statement()?,
)))
}
}
}
_ => {
// use the native parser
Ok(Statement::Statement(self.parser.parse_statement()?))
Ok(Statement::Statement(Box::from(
self.parser.parse_statement()?,
)))
}
}
}
Expand All @@ -183,7 +187,7 @@ impl<'a> DFParser<'a> {
if self.parser.parse_keyword(Keyword::EXTERNAL) {
self.parse_create_external_table()
} else {
Ok(Statement::Statement(self.parser.parse_create()?))
Ok(Statement::Statement(Box::from(self.parser.parse_create()?)))
}
}

Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/test/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ impl ExecutionPlan for BlockingExec {
async fn execute(&self, _partition: usize) -> Result<SendableRecordBatchStream> {
Ok(Box::pin(BlockingStream {
schema: Arc::clone(&self.schema),
refs: Arc::clone(&self.refs),
_refs: Arc::clone(&self.refs),
}))
}

Expand Down Expand Up @@ -577,7 +577,7 @@ pub struct BlockingStream {
schema: SchemaRef,

/// Ref-counting helper to check if the stream are still in memory.
refs: Arc<()>,
_refs: Arc<()>,
}

impl Stream for BlockingStream {
Expand Down
2 changes: 1 addition & 1 deletion dev/docker/ballista-base.dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@


# Base image extends debian:buster-slim
FROM rust:1.56.0-buster AS builder
FROM rust:1.57.0-buster AS builder

RUN apt update && apt -y install musl musl-dev musl-tools libssl-dev openssl

Expand Down
2 changes: 1 addition & 1 deletion python/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ description = "Build and run queries against data"
readme = "README.md"
license = "Apache-2.0"
edition = "2021"
rust-version = "1.56"
rust-version = "1.57"

[dependencies]
tokio = { version = "1.0", features = ["macros", "rt", "rt-multi-thread", "sync"] }
Expand Down