Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: dependency cleanup #1150

Merged
merged 11 commits into from
Dec 14, 2024
2 changes: 0 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ members = ["ballista-cli", "ballista/client", "ballista/core", "ballista/executo
resolver = "2"

[workspace.dependencies]
anyhow = "1"
arrow = { version = "53", features = ["ipc_compression"] }
arrow-flight = { version = "53", features = ["flight-sql-experimental"] }
clap = { version = "4.5", features = ["derive", "cargo"] }
Expand Down Expand Up @@ -58,7 +57,6 @@ dashmap = { version = "6.1" }
async-trait = { version = "0.1.4" }
serde = { version = "1.0" }
tokio-stream = { version = "0.1" }
parse_arg = { version = "0.1" }
url = { version = "2.5" }

# cargo build --profile release-lto
Expand Down
3 changes: 1 addition & 2 deletions ballista/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,15 +44,14 @@ arrow-flight = { workspace = true }
async-trait = { workspace = true }
chrono = { version = "0.4", default-features = false }
clap = { workspace = true }
configure_me = { workspace = true }
datafusion = { workspace = true }
datafusion-proto = { workspace = true }
datafusion-proto-common = { workspace = true }
futures = { workspace = true }

itertools = "0.13"
log = { workspace = true }
md-5 = { version = "^0.10.0" }
parse_arg = { workspace = true }
prost = { workspace = true }
prost-types = { workspace = true }
rand = { workspace = true }
Expand Down
4 changes: 2 additions & 2 deletions ballista/core/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ impl std::str::FromStr for TaskSchedulingPolicy {
}
}

impl parse_arg::ParseArgFromStr for TaskSchedulingPolicy {
impl configure_me::parse_arg::ParseArgFromStr for TaskSchedulingPolicy {
fn describe_type<W: fmt::Write>(mut writer: W) -> fmt::Result {
write!(writer, "The scheduler policy for the scheduler")
}
Expand All @@ -292,7 +292,7 @@ impl std::str::FromStr for LogRotationPolicy {
}
}

impl parse_arg::ParseArgFromStr for LogRotationPolicy {
impl configure_me::parse_arg::ParseArgFromStr for LogRotationPolicy {
fn describe_type<W: fmt::Write>(mut writer: W) -> fmt::Result {
write!(writer, "The log rotation policy")
}
Expand Down
148 changes: 148 additions & 0 deletions ballista/core/src/diagram.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

use crate::error::Result;
use crate::execution_plans::{ShuffleWriterExec, UnresolvedShuffleExec};

use datafusion::datasource::physical_plan::{CsvExec, ParquetExec};
use datafusion::physical_plan::aggregates::AggregateExec;
use datafusion::physical_plan::coalesce_batches::CoalesceBatchesExec;
use datafusion::physical_plan::coalesce_partitions::CoalescePartitionsExec;
use datafusion::physical_plan::filter::FilterExec;
use datafusion::physical_plan::joins::HashJoinExec;
use datafusion::physical_plan::projection::ProjectionExec;
use datafusion::physical_plan::sorts::sort::SortExec;
use datafusion::physical_plan::ExecutionPlan;
use std::fs::File;
use std::io::{BufWriter, Write};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;

pub fn produce_diagram(filename: &str, stages: &[Arc<ShuffleWriterExec>]) -> Result<()> {
let write_file = File::create(filename)?;
let mut w = BufWriter::new(&write_file);
writeln!(w, "digraph G {{")?;

// draw stages and entities
for stage in stages {
writeln!(w, "\tsubgraph cluster{} {{", stage.stage_id())?;
writeln!(w, "\t\tlabel = \"Stage {}\";", stage.stage_id())?;
let mut id = AtomicUsize::new(0);
build_exec_plan_diagram(
&mut w,
stage.children()[0].as_ref(),
stage.stage_id(),
&mut id,
true,
)?;
writeln!(w, "\t}}")?;
}

// draw relationships
for stage in stages {
let mut id = AtomicUsize::new(0);
build_exec_plan_diagram(
&mut w,
stage.children()[0].as_ref(),
stage.stage_id(),
&mut id,
false,
)?;
}

write!(w, "}}")?;
Ok(())
}

fn build_exec_plan_diagram(
w: &mut BufWriter<&File>,
plan: &dyn ExecutionPlan,
stage_id: usize,
id: &mut AtomicUsize,
draw_entity: bool,
) -> Result<usize> {
let operator_str = if plan.as_any().downcast_ref::<AggregateExec>().is_some() {
"AggregateExec"
} else if plan.as_any().downcast_ref::<SortExec>().is_some() {
"SortExec"
} else if plan.as_any().downcast_ref::<ProjectionExec>().is_some() {
"ProjectionExec"
} else if plan.as_any().downcast_ref::<HashJoinExec>().is_some() {
"HashJoinExec"
} else if plan.as_any().downcast_ref::<ParquetExec>().is_some() {
"ParquetExec"
} else if plan.as_any().downcast_ref::<CsvExec>().is_some() {
"CsvExec"
} else if plan.as_any().downcast_ref::<FilterExec>().is_some() {
"FilterExec"
} else if plan.as_any().downcast_ref::<ShuffleWriterExec>().is_some() {
"ShuffleWriterExec"
} else if plan
.as_any()
.downcast_ref::<UnresolvedShuffleExec>()
.is_some()
{
"UnresolvedShuffleExec"
} else if plan
.as_any()
.downcast_ref::<CoalesceBatchesExec>()
.is_some()
{
"CoalesceBatchesExec"
} else if plan
.as_any()
.downcast_ref::<CoalescePartitionsExec>()
.is_some()
{
"CoalescePartitionsExec"
} else {
println!("Unknown: {plan:?}");
"Unknown"
};

let node_id = id.load(Ordering::SeqCst);
id.store(node_id + 1, Ordering::SeqCst);

if draw_entity {
writeln!(
w,
"\t\tstage_{stage_id}_exec_{node_id} [shape=box, label=\"{operator_str}\"];"
)?;
}
for child in plan.children() {
if let Some(shuffle) = child.as_any().downcast_ref::<UnresolvedShuffleExec>() {
if !draw_entity {
writeln!(
w,
"\tstage_{}_exec_1 -> stage_{}_exec_{};",
shuffle.stage_id, stage_id, node_id
)?;
}
} else {
// relationships within same entity
let child_id =
build_exec_plan_diagram(w, child.as_ref(), stage_id, id, draw_entity)?;
if draw_entity {
writeln!(
w,
"\t\tstage_{stage_id}_exec_{child_id} -> stage_{stage_id}_exec_{node_id};"
)?;
}
}
}
Ok(node_id)
}
48 changes: 4 additions & 44 deletions ballista/core/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,11 @@ pub enum BallistaError {
NotImplemented(String),
General(String),
Internal(String),
Configuration(String),
ArrowError(ArrowError),
DataFusionError(DataFusionError),
SqlError(parser::ParserError),
IoError(io::Error),
// ReqwestError(reqwest::Error),
// HttpError(http::Error),
// KubeAPIError(kube::error::Error),
// KubeAPIRequestError(k8s_openapi::RequestError),
// KubeAPIResponseError(k8s_openapi::ResponseError),
TonicError(tonic::transport::Error),
GrpcError(tonic::Status),
GrpcConnectionError(String),
Expand Down Expand Up @@ -112,36 +108,6 @@ impl From<io::Error> for BallistaError {
}
}

// impl From<reqwest::Error> for BallistaError {
// fn from(e: reqwest::Error) -> Self {
// BallistaError::ReqwestError(e)
// }
// }
//
// impl From<http::Error> for BallistaError {
// fn from(e: http::Error) -> Self {
// BallistaError::HttpError(e)
// }
// }

// impl From<kube::error::Error> for BallistaError {
// fn from(e: kube::error::Error) -> Self {
// BallistaError::KubeAPIError(e)
// }
// }

// impl From<k8s_openapi::RequestError> for BallistaError {
// fn from(e: k8s_openapi::RequestError) -> Self {
// BallistaError::KubeAPIRequestError(e)
// }
// }

// impl From<k8s_openapi::ResponseError> for BallistaError {
// fn from(e: k8s_openapi::ResponseError) -> Self {
// BallistaError::KubeAPIResponseError(e)
// }
// }

impl From<tonic::transport::Error> for BallistaError {
fn from(e: tonic::transport::Error) -> Self {
BallistaError::TonicError(e)
Expand Down Expand Up @@ -191,15 +157,6 @@ impl Display for BallistaError {
}
BallistaError::SqlError(ref desc) => write!(f, "SQL error: {desc}"),
BallistaError::IoError(ref desc) => write!(f, "IO error: {desc}"),
// BallistaError::ReqwestError(ref desc) => write!(f, "Reqwest error: {}", desc),
// BallistaError::HttpError(ref desc) => write!(f, "HTTP error: {}", desc),
// BallistaError::KubeAPIError(ref desc) => write!(f, "Kube API error: {}", desc),
// BallistaError::KubeAPIRequestError(ref desc) => {
// write!(f, "KubeAPI request error: {}", desc)
// }
// BallistaError::KubeAPIResponseError(ref desc) => {
// write!(f, "KubeAPI response error: {}", desc)
// }
BallistaError::TonicError(desc) => write!(f, "Tonic error: {desc}"),
BallistaError::GrpcError(desc) => write!(f, "Grpc error: {desc}"),
BallistaError::GrpcConnectionError(desc) => {
Expand All @@ -220,6 +177,9 @@ impl Display for BallistaError {
)
}
BallistaError::Cancelled => write!(f, "Task cancelled"),
BallistaError::Configuration(desc) => {
write!(f, "Configuration error: {desc}")
}
}
}
}
Expand Down
5 changes: 2 additions & 3 deletions ballista/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,14 @@ pub fn print_version() {
pub mod client;
pub mod config;
pub mod consistent_hash;
pub mod diagram;
pub mod error;
pub mod event_loop;
pub mod execution_plans;
pub mod extension;
pub mod registry;
pub mod utils;

#[macro_use]
pub mod serde;
pub mod utils;

///
/// [RuntimeProducer] is a factory which creates runtime [RuntimeEnv]
Expand Down
Loading
Loading