Skip to content

Commit

Permalink
docs
Browse files Browse the repository at this point in the history
  • Loading branch information
comphead committed Oct 15, 2023
1 parent b4f8b9f commit 4c6252c
Show file tree
Hide file tree
Showing 25 changed files with 123 additions and 99 deletions.
4 changes: 2 additions & 2 deletions datafusion-cli/src/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use crate::{
},
print_options::{MaxRows, PrintOptions},
};
use datafusion::common::plan_err_raw;
use datafusion::common::plan_datafusion_err;
use datafusion::sql::{parser::DFParser, sqlparser::dialect::dialect_from_str};
use datafusion::{
datasource::listing::ListingTableUrl,
Expand Down Expand Up @@ -203,7 +203,7 @@ async fn exec_and_print(
let task_ctx = ctx.task_ctx();
let dialect = &task_ctx.session_config().options().sql_parser.dialect;
let dialect = dialect_from_str(dialect).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Unsupported SQL dialect: {dialect}. Available dialects: \
Generic, MySQL, PostgreSQL, Hive, SQLite, Snowflake, Redshift, \
MsSQL, ClickHouse, BigQuery, Ansi."
Expand Down
20 changes: 13 additions & 7 deletions datafusion/common/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -477,19 +477,25 @@ macro_rules! with_dollar_sign {
/// plan_err!("Error {:?}", val)
/// plan_err!("Error {val}")
/// plan_err!("Error {val:?}")
///
/// [`NAME_ERR`] - macro name for wrapping Err(DataFusionError::*)
/// [`NAME_DF_ERR`] - macro name for wrapping DataFusionError::*. Needed to keep backtrace opportunity
/// in construction where DataFusionError::* used directly, like `map_err`, `ok_or_else`, etc
macro_rules! make_error {
($NAME:ident, $NAME_RAW: ident, $ERR:ident) => {
($NAME_ERR:ident, $NAME_DF_ERR: ident, $ERR:ident) => {
with_dollar_sign! {
($d:tt) => {
/// Macro wraps `$ERR` to add backtrace feature
#[macro_export]
macro_rules! $NAME_RAW {
macro_rules! $NAME_DF_ERR {
($d($d args:expr),*) => {
DataFusionError::$ERR(format!("{}{}", format!($d($d args),*), DataFusionError::get_back_trace()).into())
}
}

/// Macro wraps Err(`$ERR`) to add backtrace feature
#[macro_export]
macro_rules! $NAME {
macro_rules! $NAME_ERR {
($d($d args:expr),*) => {
Err(DataFusionError::$ERR(format!("{}{}", format!($d($d args),*), DataFusionError::get_back_trace()).into()))
}
Expand All @@ -500,16 +506,16 @@ macro_rules! make_error {
}

// Exposes a macro to create `DataFusionError::Plan`
make_error!(plan_err, plan_err_raw, Plan);
make_error!(plan_err, plan_datafusion_err, Plan);

// Exposes a macro to create `DataFusionError::Internal`
make_error!(internal_err, internal_err_raw, Internal);
make_error!(internal_err, internal_datafusion_err, Internal);

// Exposes a macro to create `DataFusionError::NotImplemented`
make_error!(not_impl_err, not_impl_err_raw, NotImplemented);
make_error!(not_impl_err, not_impl_datafusion_err, NotImplemented);

// Exposes a macro to create `DataFusionError::Execution`
make_error!(exec_err, exec_err_raw, Execution);
make_error!(exec_err, exec_datafusion_err, Execution);

// Exposes a macro to create `DataFusionError::SQL`
#[macro_export]
Expand Down
19 changes: 11 additions & 8 deletions datafusion/core/src/execution/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use crate::{
};
use datafusion_common::{
alias::AliasGenerator,
exec_err, not_impl_err, plan_err, plan_err_raw,
exec_err, not_impl_err, plan_datafusion_err, plan_err,
tree_node::{TreeNode, TreeNodeVisitor, VisitRecursion},
};
use datafusion_execution::registry::SerializerRegistry;
Expand Down Expand Up @@ -1577,11 +1577,14 @@ impl SessionState {
self.catalog_list
.catalog(&resolved_ref.catalog)
.ok_or_else(|| {
plan_err_raw!("failed to resolve catalog: {}", resolved_ref.catalog)
plan_datafusion_err!(
"failed to resolve catalog: {}",
resolved_ref.catalog
)
})?
.schema(&resolved_ref.schema)
.ok_or_else(|| {
plan_err_raw!("failed to resolve schema: {}", resolved_ref.schema)
plan_datafusion_err!("failed to resolve schema: {}", resolved_ref.schema)
})
}

Expand Down Expand Up @@ -1683,7 +1686,7 @@ impl SessionState {
dialect: &str,
) -> Result<datafusion_sql::parser::Statement> {
let dialect = dialect_from_str(dialect).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Unsupported SQL dialect: {dialect}. Available dialects: \
Generic, MySQL, PostgreSQL, Hive, SQLite, Snowflake, Redshift, \
MsSQL, ClickHouse, BigQuery, Ansi."
Expand Down Expand Up @@ -2016,7 +2019,7 @@ impl<'a> ContextProvider for SessionContextProvider<'a> {
self.tables
.get(&name)
.cloned()
.ok_or_else(|| plan_err_raw!("table '{name}' not found"))
.ok_or_else(|| plan_datafusion_err!("table '{name}' not found"))
}

fn get_function_meta(&self, name: &str) -> Option<Arc<ScalarUDF>> {
Expand Down Expand Up @@ -2063,23 +2066,23 @@ impl FunctionRegistry for SessionState {
let result = self.scalar_functions.get(name);

result.cloned().ok_or_else(|| {
plan_err_raw!("There is no UDF named \"{name}\" in the registry")
plan_datafusion_err!("There is no UDF named \"{name}\" in the registry")
})
}

fn udaf(&self, name: &str) -> Result<Arc<AggregateUDF>> {
let result = self.aggregate_functions.get(name);

result.cloned().ok_or_else(|| {
plan_err_raw!("There is no UDAF named \"{name}\" in the registry")
plan_datafusion_err!("There is no UDAF named \"{name}\" in the registry")
})
}

fn udwf(&self, name: &str) -> Result<Arc<WindowUDF>> {
let result = self.window_functions.get(name);

result.cloned().ok_or_else(|| {
plan_err_raw!("There is no UDWF named \"{name}\" in the registry")
plan_datafusion_err!("There is no UDWF named \"{name}\" in the registry")
})
}
}
Expand Down
7 changes: 4 additions & 3 deletions datafusion/core/src/physical_optimizer/pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use arrow::{
datatypes::{DataType, Field, Schema, SchemaRef},
record_batch::RecordBatch,
};
use datafusion_common::{downcast_value, plan_err_raw, ScalarValue};
use datafusion_common::{downcast_value, plan_datafusion_err, ScalarValue};
use datafusion_common::{
internal_err, plan_err,
tree_node::{Transformed, TreeNode},
Expand Down Expand Up @@ -450,8 +450,9 @@ fn build_statistics_record_batch<S: PruningStatistics>(
arrays
);

RecordBatch::try_new_with_options(schema, arrays, &options)
.map_err(|err| plan_err_raw!("Can not create statistics record batch: {err}"))
RecordBatch::try_new_with_options(schema, arrays, &options).map_err(|err| {
plan_datafusion_err!("Can not create statistics record batch: {err}")
})
}

struct PruningExpressionBuilder<'a> {
Expand Down
6 changes: 3 additions & 3 deletions datafusion/core/src/physical_optimizer/sort_pushdown.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use crate::physical_plan::sorts::sort::SortExec;
use crate::physical_plan::{with_new_children_if_necessary, ExecutionPlan};

use datafusion_common::tree_node::{Transformed, TreeNode, VisitRecursion};
use datafusion_common::{plan_err, plan_err_raw, DataFusionError, Result};
use datafusion_common::{plan_datafusion_err, plan_err, DataFusionError, Result};
use datafusion_expr::JoinType;
use datafusion_physical_expr::expressions::Column;
use datafusion_physical_expr::utils::{
Expand Down Expand Up @@ -127,7 +127,7 @@ pub(crate) fn pushdown_sorts(
let plan = &requirements.plan;
let parent_required = requirements.required_ordering.as_deref();
const ERR_MSG: &str = "Expects parent requirement to contain something";
let err = || plan_err_raw!("{}", ERR_MSG);
let err = || plan_datafusion_err!("{}", ERR_MSG);
if let Some(sort_exec) = plan.as_any().downcast_ref::<SortExec>() {
let mut new_plan = plan.clone();
if !ordering_satisfy_requirement(
Expand Down Expand Up @@ -199,7 +199,7 @@ fn pushdown_requirement_to_children(
parent_required: Option<&[PhysicalSortRequirement]>,
) -> Result<Option<Vec<Option<Vec<PhysicalSortRequirement>>>>> {
const ERR_MSG: &str = "Expects parent requirement to contain something";
let err = || plan_err_raw!("{}", ERR_MSG);
let err = || plan_datafusion_err!("{}", ERR_MSG);
let maintains_input_order = plan.maintains_input_order();
if is_window(plan) {
let required_input_ordering = plan.required_input_ordering();
Expand Down
6 changes: 3 additions & 3 deletions datafusion/execution/src/task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use std::{

use datafusion_common::{
config::{ConfigOptions, Extensions},
plan_err_raw, DataFusionError, Result,
plan_datafusion_err, DataFusionError, Result,
};
use datafusion_expr::{AggregateUDF, ScalarUDF, WindowUDF};

Expand Down Expand Up @@ -182,15 +182,15 @@ impl FunctionRegistry for TaskContext {
let result = self.scalar_functions.get(name);

result.cloned().ok_or_else(|| {
plan_err_raw!("There is no UDF named \"{name}\" in the TaskContext")
plan_datafusion_err!("There is no UDF named \"{name}\" in the TaskContext")
})
}

fn udaf(&self, name: &str) -> Result<Arc<AggregateUDF>> {
let result = self.aggregate_functions.get(name);

result.cloned().ok_or_else(|| {
plan_err_raw!("There is no UDAF named \"{name}\" in the TaskContext")
plan_datafusion_err!("There is no UDAF named \"{name}\" in the TaskContext")
})
}

Expand Down
4 changes: 2 additions & 2 deletions datafusion/expr/src/aggregate_function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
use crate::utils;
use crate::{type_coercion::aggregates::*, Signature, TypeSignature, Volatility};
use arrow::datatypes::{DataType, Field};
use datafusion_common::{plan_err, plan_err_raw, DataFusionError, Result};
use datafusion_common::{plan_datafusion_err, plan_err, DataFusionError, Result};
use std::sync::Arc;
use std::{fmt, str::FromStr};
use strum_macros::EnumIter;
Expand Down Expand Up @@ -232,7 +232,7 @@ impl AggregateFunction {
// original errors are all related to wrong function signature
// aggregate them for better error message
.map_err(|_| {
plan_err_raw!(
plan_datafusion_err!(
"{}",
utils::generate_signature_error_msg(
&format!("{self}"),
Expand Down
6 changes: 4 additions & 2 deletions datafusion/expr/src/built_in_function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ use crate::{
Volatility,
};
use arrow::datatypes::{DataType, Field, Fields, IntervalUnit, TimeUnit};
use datafusion_common::{internal_err, plan_err, plan_err_raw, DataFusionError, Result};
use datafusion_common::{
internal_err, plan_datafusion_err, plan_err, DataFusionError, Result,
};
use std::collections::HashMap;
use std::fmt;
use std::str::FromStr;
Expand Down Expand Up @@ -501,7 +503,7 @@ impl BuiltinScalarFunction {

// verify that this is a valid set of data types for this function
data_types(input_expr_types, &self.signature()).map_err(|_| {
plan_err_raw!(
plan_datafusion_err!(
"{}",
utils::generate_signature_error_msg(
&format!("{self}"),
Expand Down
12 changes: 7 additions & 5 deletions datafusion/expr/src/expr_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ use crate::{LogicalPlan, Projection, Subquery};
use arrow::compute::can_cast_types;
use arrow::datatypes::{DataType, Field};
use datafusion_common::{
internal_err, plan_err, plan_err_raw, Column, DFField, DFSchema, DataFusionError,
ExprSchema, Result,
internal_err, plan_datafusion_err, plan_err, Column, DFField, DFSchema,
DataFusionError, ExprSchema, Result,
};
use std::collections::HashMap;
use std::sync::Arc;
Expand Down Expand Up @@ -139,9 +139,11 @@ impl ExprSchemable for Expr {
ref op,
}) => get_result_type(&left.get_type(schema)?, op, &right.get_type(schema)?),
Expr::Like { .. } | Expr::SimilarTo { .. } => Ok(DataType::Boolean),
Expr::Placeholder(Placeholder { data_type, .. }) => data_type
.clone()
.ok_or_else(|| plan_err_raw!("Placeholder type could not be resolved")),
Expr::Placeholder(Placeholder { data_type, .. }) => {
data_type.clone().ok_or_else(|| {
plan_datafusion_err!("Placeholder type could not be resolved")
})
}
Expr::Wildcard => {
// Wildcard do not really have a type and do not appear in projections
Ok(DataType::Null)
Expand Down
6 changes: 4 additions & 2 deletions datafusion/expr/src/field_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@
//! Utility functions for complex field access
use arrow::datatypes::{DataType, Field};
use datafusion_common::{plan_err, plan_err_raw, DataFusionError, Result, ScalarValue};
use datafusion_common::{
plan_datafusion_err, plan_err, DataFusionError, Result, ScalarValue,
};

/// Types of the field access expression of a nested type, such as `Field` or `List`
pub enum GetFieldAccessSchema {
Expand Down Expand Up @@ -52,7 +54,7 @@ impl GetFieldAccessSchema {
)
} else {
let field = fields.iter().find(|f| f.name() == s);
field.ok_or(plan_err_raw!("Field {s} not found in struct")).map(|f| f.as_ref().clone())
field.ok_or(plan_datafusion_err!("Field {s} not found in struct")).map(|f| f.as_ref().clone())
}
}
(DataType::Struct(_), _) => plan_err!(
Expand Down
6 changes: 3 additions & 3 deletions datafusion/expr/src/logical_plan/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ use datafusion_common::{
FileType, FunctionalDependencies, OwnedTableReference, Result, ScalarValue,
TableReference, ToDFSchema,
};
use datafusion_common::{plan_err, plan_err_raw};
use datafusion_common::{plan_datafusion_err, plan_err};
use std::any::Any;
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
Expand Down Expand Up @@ -1026,7 +1026,7 @@ impl LogicalPlanBuilder {
self.plan.schema().clone(),
right.schema().clone(),
)?.ok_or_else(||
plan_err_raw!(
plan_datafusion_err!(
"can't create join plan, join key should belong to one input, error key: ({normalized_left_key},{normalized_right_key})"
))
})
Expand Down Expand Up @@ -1206,7 +1206,7 @@ pub fn union(left_plan: LogicalPlan, right_plan: LogicalPlan) -> Result<LogicalP
let data_type =
comparison_coercion(left_field.data_type(), right_field.data_type())
.ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"UNION Column {} (type: {}) is not compatible with column {} (type: {})",
right_field.name(),
right_field.data_type(),
Expand Down
16 changes: 8 additions & 8 deletions datafusion/expr/src/type_coercion/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ use arrow::datatypes::{
DECIMAL256_MAX_PRECISION, DECIMAL256_MAX_SCALE,
};

use datafusion_common::{plan_datafusion_err, Result};
use datafusion_common::{plan_err, DataFusionError};
use datafusion_common::{plan_err_raw, Result};

use crate::Operator;

Expand Down Expand Up @@ -71,7 +71,7 @@ fn signature(lhs: &DataType, op: &Operator, rhs: &DataType) -> Result<Signature>
Operator::IsDistinctFrom |
Operator::IsNotDistinctFrom => {
comparison_coercion(lhs, rhs).map(Signature::comparison).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot infer common argument type for comparison operation {lhs} {op} {rhs}"
)
})
Expand All @@ -91,7 +91,7 @@ fn signature(lhs: &DataType, op: &Operator, rhs: &DataType) -> Result<Signature>
Operator::RegexNotMatch |
Operator::RegexNotIMatch => {
regex_coercion(lhs, rhs).map(Signature::comparison).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot infer common argument type for regex operation {lhs} {op} {rhs}"
)
})
Expand All @@ -102,22 +102,22 @@ fn signature(lhs: &DataType, op: &Operator, rhs: &DataType) -> Result<Signature>
| Operator::BitwiseShiftRight
| Operator::BitwiseShiftLeft => {
bitwise_coercion(lhs, rhs).map(Signature::uniform).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot infer common type for bitwise operation {lhs} {op} {rhs}"
)
})
}
Operator::StringConcat => {
string_concat_coercion(lhs, rhs).map(Signature::uniform).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot infer common string type for string concat operation {lhs} {op} {rhs}"
)
})
}
Operator::AtArrow
| Operator::ArrowAt => {
array_coercion(lhs, rhs).map(Signature::uniform).ok_or_else(|| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot infer common array type for arrow operation {lhs} {op} {rhs}"
)
})
Expand Down Expand Up @@ -154,7 +154,7 @@ fn signature(lhs: &DataType, op: &Operator, rhs: &DataType) -> Result<Signature>
// Temporal arithmetic by first coercing to a common time representation
// e.g. Date32 - Timestamp
let ret = get_result(&coerced, &coerced).map_err(|e| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot get result type for temporal operation {coerced} {op} {coerced}: {e}"
)
})?;
Expand All @@ -166,7 +166,7 @@ fn signature(lhs: &DataType, op: &Operator, rhs: &DataType) -> Result<Signature>
} else if let Some((lhs, rhs)) = math_decimal_coercion(lhs, rhs) {
// Decimal arithmetic, e.g. Decimal(10, 2) + Decimal(10, 0)
let ret = get_result(&lhs, &rhs).map_err(|e| {
plan_err_raw!(
plan_datafusion_err!(
"Cannot get result type for decimal operation {lhs} {op} {rhs}: {e}"
)
})?;
Expand Down
Loading

0 comments on commit 4c6252c

Please sign in to comment.