Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: cleanup deprecated API since version <= 40 #15027

Merged
merged 1 commit into from
Mar 6, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 0 additions & 13 deletions datafusion/common/src/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,22 +159,9 @@ impl DFSchema {
}

/// Create a new `DFSchema` from a list of Arrow [Field]s
#[allow(deprecated)]
pub fn from_unqualified_fields(
fields: Fields,
metadata: HashMap<String, String>,
) -> Result<Self> {
Self::from_unqualifed_fields(fields, metadata)
}

/// Create a new `DFSchema` from a list of Arrow [Field]s
#[deprecated(
since = "40.0.0",
note = "Please use `from_unqualified_fields` instead (this one's name is a typo). This method is subject to be removed soon"
)]
pub fn from_unqualifed_fields(
fields: Fields,
metadata: HashMap<String, String>,
) -> Result<Self> {
let field_count = fields.len();
let schema = Arc::new(Schema::new_with_metadata(fields, metadata));
Expand Down
115 changes: 0 additions & 115 deletions datafusion/core/src/execution/session_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,22 +280,6 @@ impl SessionState {
.build()
}

/// Returns new [`SessionState`] using the provided
/// [`SessionConfig`], [`RuntimeEnv`], and [`CatalogProviderList`]
#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
pub fn new_with_config_rt_and_catalog_list(
config: SessionConfig,
runtime: Arc<RuntimeEnv>,
catalog_list: Arc<dyn CatalogProviderList>,
) -> Self {
SessionStateBuilder::new()
.with_config(config)
.with_runtime_env(runtime)
.with_catalog_list(catalog_list)
.with_default_features()
.build()
}

pub(crate) fn resolve_table_ref(
&self,
table_ref: impl Into<TableReference>,
Expand Down Expand Up @@ -334,53 +318,6 @@ impl SessionState {
})
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Replace the random session id.
pub fn with_session_id(mut self, session_id: String) -> Self {
self.session_id = session_id;
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// override default query planner with `query_planner`
pub fn with_query_planner(
mut self,
query_planner: Arc<dyn QueryPlanner + Send + Sync>,
) -> Self {
self.query_planner = query_planner;
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Override the [`AnalyzerRule`]s optimizer plan rules.
pub fn with_analyzer_rules(
mut self,
rules: Vec<Arc<dyn AnalyzerRule + Send + Sync>>,
) -> Self {
self.analyzer = Analyzer::with_rules(rules);
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Replace the entire list of [`OptimizerRule`]s used to optimize plans
pub fn with_optimizer_rules(
mut self,
rules: Vec<Arc<dyn OptimizerRule + Send + Sync>>,
) -> Self {
self.optimizer = Optimizer::with_rules(rules);
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Replace the entire list of [`PhysicalOptimizerRule`]s used to optimize plans
pub fn with_physical_optimizer_rules(
mut self,
physical_optimizers: Vec<Arc<dyn PhysicalOptimizerRule + Send + Sync>>,
) -> Self {
self.physical_optimizers = PhysicalOptimizer::with_rules(physical_optimizers);
self
}

/// Add `analyzer_rule` to the end of the list of
/// [`AnalyzerRule`]s used to rewrite queries.
pub fn add_analyzer_rule(
Expand All @@ -391,17 +328,6 @@ impl SessionState {
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Add `optimizer_rule` to the end of the list of
/// [`OptimizerRule`]s used to rewrite queries.
pub fn add_optimizer_rule(
mut self,
optimizer_rule: Arc<dyn OptimizerRule + Send + Sync>,
) -> Self {
self.optimizer.rules.push(optimizer_rule);
self
}

// the add_optimizer_rule takes an owned reference
// it should probably be renamed to `with_optimizer_rule` to follow builder style
// and `add_optimizer_rule` that takes &mut self added instead of this
Expand All @@ -412,52 +338,11 @@ impl SessionState {
self.optimizer.rules.push(optimizer_rule);
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Add `physical_optimizer_rule` to the end of the list of
/// [`PhysicalOptimizerRule`]s used to rewrite queries.
pub fn add_physical_optimizer_rule(
mut self,
physical_optimizer_rule: Arc<dyn PhysicalOptimizerRule + Send + Sync>,
) -> Self {
self.physical_optimizers.rules.push(physical_optimizer_rule);
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Adds a new [`ConfigExtension`] to TableOptions
pub fn add_table_options_extension<T: ConfigExtension>(
mut self,
extension: T,
) -> Self {
self.table_options.extensions.insert(extension);
self
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Registers a [`FunctionFactory`] to handle `CREATE FUNCTION` statements
pub fn with_function_factory(
mut self,
function_factory: Arc<dyn FunctionFactory>,
) -> Self {
self.function_factory = Some(function_factory);
self
}

/// Registers a [`FunctionFactory`] to handle `CREATE FUNCTION` statements
pub fn set_function_factory(&mut self, function_factory: Arc<dyn FunctionFactory>) {
self.function_factory = Some(function_factory);
}

#[deprecated(since = "40.0.0", note = "Use SessionStateBuilder")]
/// Replace the extension [`SerializerRegistry`]
pub fn with_serializer_registry(
mut self,
registry: Arc<dyn SerializerRegistry>,
) -> Self {
self.serializer_registry = registry;
self
}

/// Get the function factory
pub fn function_factory(&self) -> Option<&Arc<dyn FunctionFactory>> {
self.function_factory.as_ref()
Expand Down
15 changes: 0 additions & 15 deletions datafusion/datasource-parquet/src/file_format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -802,21 +802,6 @@ fn get_col_stats(
.collect()
}

/// Deprecated
/// Use [`statistics_from_parquet_meta_calc`] instead.
/// This method was deprecated because it didn't need to be async so a new method was created
/// that exposes a synchronous API.
#[deprecated(
since = "40.0.0",
note = "please use `statistics_from_parquet_meta_calc` instead"
)]
pub async fn statistics_from_parquet_meta(
metadata: &ParquetMetaData,
table_schema: SchemaRef,
) -> Result<Statistics> {
statistics_from_parquet_meta_calc(metadata, table_schema)
}

fn summarize_min_max_null_counts(
min_accs: &mut [Option<MinAccumulator>],
max_accs: &mut [Option<MaxAccumulator>],
Expand Down
26 changes: 1 addition & 25 deletions datafusion/expr/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ use std::sync::Arc;

use crate::expr_fn::binary_expr;
use crate::logical_plan::Subquery;
use crate::utils::expr_to_columns;
use crate::Volatility;
use crate::{udaf, ExprSchemable, Operator, Signature, WindowFrame, WindowUDF};

Expand All @@ -35,7 +34,7 @@ use datafusion_common::tree_node::{
Transformed, TransformedResult, TreeNode, TreeNodeContainer, TreeNodeRecursion,
};
use datafusion_common::{
plan_err, Column, DFSchema, HashMap, Result, ScalarValue, Spans, TableReference,
Column, DFSchema, HashMap, Result, ScalarValue, Spans, TableReference,
};
use datafusion_functions_window_common::field::WindowUDFFieldArgs;
use sqlparser::ast::{
Expand Down Expand Up @@ -1090,11 +1089,6 @@ impl PlannedReplaceSelectItem {
}

impl Expr {
#[deprecated(since = "40.0.0", note = "use schema_name instead")]
pub fn display_name(&self) -> Result<String> {
Ok(self.schema_name().to_string())
}

/// The name of the column (field) that this `Expr` will produce.
///
/// For example, for a projection (e.g. `SELECT <expr>`) the resulting arrow
Expand Down Expand Up @@ -1444,15 +1438,6 @@ impl Expr {
Box::new(high),
))
}

#[deprecated(since = "39.0.0", note = "use try_as_col instead")]
pub fn try_into_col(&self) -> Result<Column> {
match self {
Expr::Column(it) => Ok(it.clone()),
_ => plan_err!("Could not coerce '{self}' into Column!"),
}
}

/// Return a reference to the inner `Column` if any
///
/// returns `None` if the expression is not a `Column`
Expand Down Expand Up @@ -1495,15 +1480,6 @@ impl Expr {
}
}

/// Return all referenced columns of this expression.
#[deprecated(since = "40.0.0", note = "use Expr::column_refs instead")]
pub fn to_columns(&self) -> Result<HashSet<Column>> {
let mut using_columns = HashSet::new();
expr_to_columns(self, &mut using_columns)?;

Ok(using_columns)
}

/// Return all references to columns in this expression.
///
/// # Example
Expand Down
18 changes: 0 additions & 18 deletions datafusion/expr/src/logical_plan/extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,17 +82,6 @@ pub trait UserDefinedLogicalNode: fmt::Debug + Send + Sync {
/// For example: `TopK: k=10`
fn fmt_for_explain(&self, f: &mut fmt::Formatter) -> fmt::Result;

#[deprecated(since = "39.0.0", note = "use with_exprs_and_inputs instead")]
#[allow(clippy::wrong_self_convention)]
fn from_template(
&self,
exprs: &[Expr],
inputs: &[LogicalPlan],
) -> Arc<dyn UserDefinedLogicalNode> {
self.with_exprs_and_inputs(exprs.to_vec(), inputs.to_vec())
.unwrap()
}

/// Create a new `UserDefinedLogicalNode` with the specified children
/// and expressions. This function is used during optimization
/// when the plan is being rewritten and a new instance of the
Expand Down Expand Up @@ -282,13 +271,6 @@ pub trait UserDefinedLogicalNodeCore:
/// For example: `TopK: k=10`
fn fmt_for_explain(&self, f: &mut fmt::Formatter) -> fmt::Result;

#[deprecated(since = "39.0.0", note = "use with_exprs_and_inputs instead")]
#[allow(clippy::wrong_self_convention)]
fn from_template(&self, exprs: &[Expr], inputs: &[LogicalPlan]) -> Self {
self.with_exprs_and_inputs(exprs.to_vec(), inputs.to_vec())
.unwrap()
}

/// Create a new `UserDefinedLogicalNode` with the specified children
/// and expressions. This function is used during optimization
/// when the plan is being rewritten and a new instance of the
Expand Down
10 changes: 0 additions & 10 deletions datafusion/expr/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,16 +48,6 @@ pub use datafusion_functions_aggregate_common::order::AggregateOrderSensitivity;
/// `COUNT(<constant>)` expressions
pub use datafusion_common::utils::expr::COUNT_STAR_EXPANSION;

/// Recursively walk a list of expression trees, collecting the unique set of columns
/// referenced in the expression
#[deprecated(since = "40.0.0", note = "Expr::add_column_refs instead")]
pub fn exprlist_to_columns(expr: &[Expr], accum: &mut HashSet<Column>) -> Result<()> {
for e in expr {
expr_to_columns(e, accum)?;
}
Ok(())
}

/// Count the number of distinct exprs in a list of group by expressions. If the
/// first element is a `GroupingSet` expression then it must be the only expr.
pub fn grouping_set_expr_count(group_expr: &[Expr]) -> Result<usize> {
Expand Down