Skip to content

Commit

Permalink
Fix clippy warnings with toolchain 1.63 (#2717)
Browse files Browse the repository at this point in the history
* fix clippy warnings

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>

* fix format_push_string

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>
  • Loading branch information
waynexia authored Jun 10, 2022
1 parent 10058f6 commit 080c324
Show file tree
Hide file tree
Showing 21 changed files with 65 additions and 73 deletions.
4 changes: 2 additions & 2 deletions datafusion/common/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ mod test {
#[allow(clippy::try_err)]
fn return_arrow_error() -> arrow::error::Result<()> {
// Expect the '?' to work
let _foo = Err(DataFusionError::Plan("foo".to_string()))?;
Err(DataFusionError::Plan("foo".to_string()))?;
Ok(())
}

Expand All @@ -304,7 +304,7 @@ mod test {
#[allow(clippy::try_err)]
fn return_datafusion_error() -> crate::error::Result<()> {
// Expect the '?' to work
let _bar = Err(ArrowError::SchemaError("bar".to_string()))?;
Err(ArrowError::SchemaError("bar".to_string()))?;
Ok(())
}
}
Expand Down
4 changes: 2 additions & 2 deletions datafusion/common/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -544,10 +544,10 @@ impl ScalarValue {
if precision <= DECIMAL_MAX_PRECISION && scale <= precision {
return Ok(ScalarValue::Decimal128(Some(value), precision, scale));
}
return Err(DataFusionError::Internal(format!(
Err(DataFusionError::Internal(format!(
"Can not new a decimal type ScalarValue for precision {} and scale {}",
precision, scale
)));
)))
}
/// Getter for the `DataType` of the value
pub fn get_datatype(&self) -> DataType {
Expand Down
13 changes: 5 additions & 8 deletions datafusion/core/src/physical_optimizer/pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -532,14 +532,11 @@ fn rewrite_expr_to_prunable(
};
}

_ => {
return Err(DataFusionError::Plan(format!(
"column expression {:?} is not supported",
column_expr
)))
}
_ => Err(DataFusionError::Plan(format!(
"column expression {:?} is not supported",
column_expr
))),
}
// Ok((column_expr.clone(), op, scalar_expr.clone()))
}

fn is_compare_op(op: Operator) -> bool {
Expand Down Expand Up @@ -784,7 +781,7 @@ fn build_statistics_expr(expr_builder: &mut PruningExpressionBuilder) -> Result<
Ok(statistics_expr)
}

#[derive(Debug, Copy, Clone, PartialEq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum StatisticsType {
Min,
Max,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_plan/hash_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ impl HashJoinMetrics {
}
}

#[derive(Clone, Copy, Debug, PartialEq)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
/// Partitioning mode to use for hash join
pub enum PartitionMode {
/// Left/right children are partitioned using the left and right keys
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/physical_plan/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ pub use self::planner::PhysicalPlanner;
/// Fields are optional and can be inexact because the sources
/// sometimes provide approximate estimates for performance reasons
/// and the transformations output are not always predictable.
#[derive(Debug, Clone, Default, PartialEq)]
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct Statistics {
/// The number of table rows
pub num_rows: Option<usize>,
Expand All @@ -105,7 +105,7 @@ pub struct Statistics {
pub is_exact: bool,
}
/// This table statistics are estimates about column
#[derive(Clone, Debug, Default, PartialEq)]
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct ColumnStatistics {
/// Number of null values on column
pub null_count: Option<usize>,
Expand Down
7 changes: 4 additions & 3 deletions datafusion/core/src/physical_plan/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ use futures::future::BoxFuture;
use futures::{FutureExt, StreamExt, TryStreamExt};
use log::{debug, trace};
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
use std::sync::Arc;

fn create_function_physical_name(
Expand Down Expand Up @@ -111,13 +112,13 @@ fn create_physical_name(e: &Expr, is_first_expr: bool) -> Result<String> {
} => {
let mut name = "CASE ".to_string();
if let Some(e) = expr {
name += &format!("{:?} ", e);
let _ = write!(name, "{:?} ", e);
}
for (w, t) in when_then_expr {
name += &format!("WHEN {:?} THEN {:?} ", w, t);
let _ = write!(name, "WHEN {:?} THEN {:?} ", w, t);
}
if let Some(e) = else_expr {
name += &format!("ELSE {:?} ", e);
let _ = write!(name, "ELSE {:?} ", e);
}
name += "END";
Ok(name)
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_plan/sorts/cursor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ pub struct SortKeyCursor {
sort_options: Arc<Vec<SortOptions>>,
}

impl<'a> std::fmt::Debug for SortKeyCursor {
impl std::fmt::Debug for SortKeyCursor {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("SortKeyCursor")
.field("sort_columns", &self.sort_columns)
Expand Down
4 changes: 2 additions & 2 deletions datafusion/data-access/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub enum ListEntry {
}

/// The path and size of the file.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SizedFile {
/// Path of the file. It is relative to the current object
/// store (it does not specify the `xx://` scheme).
Expand All @@ -46,7 +46,7 @@ pub struct SizedFile {
/// Description of a file as returned by the listing command of a
/// given object store. The resulting path is relative to the
/// object store that generated it.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FileMeta {
/// The path and size of the file.
pub sized_file: SizedFile,
Expand Down
7 changes: 4 additions & 3 deletions datafusion/expr/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ use datafusion_common::Column;
use datafusion_common::{DFSchema, Result};
use datafusion_common::{DataFusionError, ScalarValue};
use std::fmt;
use std::fmt::Write;
use std::hash::{BuildHasher, Hash, Hasher};
use std::ops::Not;
use std::sync::Arc;
Expand Down Expand Up @@ -715,16 +716,16 @@ fn create_name(e: &Expr, input_schema: &DFSchema) -> Result<String> {
let mut name = "CASE ".to_string();
if let Some(e) = expr {
let e = create_name(e, input_schema)?;
name += &format!("{} ", e);
let _ = write!(name, "{} ", e);
}
for (w, t) in when_then_expr {
let when = create_name(w, input_schema)?;
let then = create_name(t, input_schema)?;
name += &format!("WHEN {} THEN {} ", when, then);
let _ = write!(name, "WHEN {} THEN {} ", when, then);
}
if let Some(e) = else_expr {
let e = create_name(e, input_schema)?;
name += &format!("ELSE {} ", e);
let _ = write!(name, "ELSE {} ", e);
}
name += "END";
Ok(name)
Expand Down
8 changes: 4 additions & 4 deletions datafusion/expr/src/logical_plan/plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -620,7 +620,7 @@ impl LogicalPlan {
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> fmt::Display for Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &*self.0 {
match self.0 {
LogicalPlan::EmptyRelation(_) => write!(f, "EmptyRelation"),
LogicalPlan::Values(Values { ref values, .. }) => {
let str_values: Vec<_> = values
Expand Down Expand Up @@ -1098,7 +1098,7 @@ pub struct CreateView {
}

/// Types of files to parse as DataFrames
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FileType {
/// Newline-delimited JSON
NdJson,
Expand Down Expand Up @@ -1273,7 +1273,7 @@ pub enum Partitioning {

/// Represents which type of plan, when storing multiple
/// for use in EXPLAIN plans
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PlanType {
/// The initial LogicalPlan provided to DataFusion
InitialLogicalPlan,
Expand Down Expand Up @@ -1313,7 +1313,7 @@ impl fmt::Display for PlanType {
}

/// Represents some sort of execution plan, in String form
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
#[allow(clippy::rc_buffer)]
pub struct StringifiedPlan {
/// An identifier of what type of plan this string represents
Expand Down
4 changes: 2 additions & 2 deletions datafusion/expr/src/table_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use std::any::Any;
/// Indicates whether and how a filter expression can be handled by a
/// TableProvider for table scans.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TableProviderFilterPushDown {
/// The expression cannot be used by the provider.
Unsupported,
Expand All @@ -39,7 +39,7 @@ pub enum TableProviderFilterPushDown {
}

/// Indicates the type of this table for metadata/catalog purposes.
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TableType {
/// An ordinary physical table.
Base,
Expand Down
9 changes: 5 additions & 4 deletions datafusion/optimizer/src/common_subexpr_eliminate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ use datafusion_expr::{
Expr, ExprSchemable,
};
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
use std::sync::Arc;

/// A map from expression's identifier to tuple including
Expand Down Expand Up @@ -424,17 +425,17 @@ impl ExprIdentifierVisitor<'_> {
}
Expr::Cast { data_type, .. } => {
desc.push_str("Cast-");
desc.push_str(&format!("{:?}", data_type));
let _ = write!(desc, "{:?}", data_type);
}
Expr::TryCast { data_type, .. } => {
desc.push_str("TryCast-");
desc.push_str(&format!("{:?}", data_type));
let _ = write!(desc, "{:?}", data_type);
}
Expr::Sort {
asc, nulls_first, ..
} => {
desc.push_str("Sort-");
desc.push_str(&format!("{}{}", asc, nulls_first));
let _ = write!(desc, "{}{}", asc, nulls_first);
}
Expr::ScalarFunction { fun, .. } => {
desc.push_str("ScalarFunction-");
Expand All @@ -449,7 +450,7 @@ impl ExprIdentifierVisitor<'_> {
} => {
desc.push_str("WindowFunction-");
desc.push_str(&fun.to_string());
desc.push_str(&format!("{:?}", window_frame));
let _ = write!(desc, "{:?}", window_frame);
}
Expr::AggregateFunction { fun, distinct, .. } => {
desc.push_str("AggregateFunction-");
Expand Down
4 changes: 2 additions & 2 deletions datafusion/optimizer/src/limit_push_down.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ fn generate_push_down_join(
null_equals_null,
}) = join
{
return Ok(LogicalPlan::Join(Join {
Ok(LogicalPlan::Join(Join {
left: Arc::new(limit_push_down(
_optimizer,
Ancestor::FromLimit {
Expand All @@ -303,7 +303,7 @@ fn generate_push_down_join(
join_constraint: *join_constraint,
schema: schema.clone(),
null_equals_null: *null_equals_null,
}));
}))
} else {
Err(DataFusionError::Internal(format!(
"{:?} must be join type",
Expand Down
10 changes: 4 additions & 6 deletions datafusion/physical-expr/src/aggregate/approx_percentile_cont.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,12 +278,10 @@ impl ApproxPercentileAccumulator {
.filter_map(|v| v.try_as_f64().transpose())
.collect::<Result<Vec<_>>>()?)
}
e => {
return Err(DataFusionError::Internal(format!(
"APPROX_PERCENTILE_CONT is not expected to receive the type {:?}",
e
)));
}
e => Err(DataFusionError::Internal(format!(
"APPROX_PERCENTILE_CONT is not expected to receive the type {:?}",
e
))),
}
}
}
Expand Down
10 changes: 4 additions & 6 deletions datafusion/physical-expr/src/aggregate/tdigest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,12 +96,10 @@ impl TryIntoOrderedF64 for ScalarValue {
ScalarValue::UInt32(v) => Ok(v.map(|v| OrderedFloat::from(v as f64))),
ScalarValue::UInt64(v) => Ok(v.map(|v| OrderedFloat::from(v as f64))),

got => {
return Err(DataFusionError::NotImplemented(format!(
"Support for 'TryIntoOrderedF64' for data type {} is not implemented",
got
)))
}
got => Err(DataFusionError::NotImplemented(format!(
"Support for 'TryIntoOrderedF64' for data type {} is not implemented",
got
))),
}
}
}
Expand Down
8 changes: 4 additions & 4 deletions datafusion/physical-expr/src/expressions/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1246,22 +1246,22 @@ impl BinaryExpr {
if left_data_type == &DataType::Boolean {
boolean_op!(left, right, and_kleene)
} else {
return Err(DataFusionError::Internal(format!(
Err(DataFusionError::Internal(format!(
"Cannot evaluate binary expression {:?} with types {:?} and {:?}",
self.op,
left.data_type(),
right.data_type()
)));
)))
}
}
Operator::Or => {
if left_data_type == &DataType::Boolean {
boolean_op!(left, right, or_kleene)
} else {
return Err(DataFusionError::Internal(format!(
Err(DataFusionError::Internal(format!(
"Cannot evaluate binary expression {:?} with types {:?} and {:?}",
self.op, left_data_type, right_data_type
)));
)))
}
}
Operator::RegexMatch => {
Expand Down
12 changes: 5 additions & 7 deletions datafusion/physical-expr/src/expressions/in_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -504,13 +504,11 @@ impl PhysicalExpr for InListExpr {
.unwrap();
set_contains_with_negated!(array, set, self.negated)
}
datatype => {
return Result::Err(DataFusionError::NotImplemented(format!(
"InSet does not support datatype {:?}.",
datatype
)))
}
};
datatype => Result::Err(DataFusionError::NotImplemented(format!(
"InSet does not support datatype {:?}.",
datatype
))),
}
} else {
let list_values = self
.list
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/physical_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ fn scatter(mask: &BooleanArray, truthy: &dyn Array) -> Result<ArrayRef> {
// (SlicesIterator doesn't respect nulls)
let mask = and_kleene(mask, &is_not_null(mask)?)?;

let mut mutable = MutableArrayData::new(vec![&*truthy], true, mask.len());
let mut mutable = MutableArrayData::new(vec![truthy], true, mask.len());

// the SlicesIterator slices only the true values. So the gaps left by this iterator we need to
// fill with falsy values
Expand Down
6 changes: 3 additions & 3 deletions datafusion/sql/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ fn parse_file_type(s: &str) -> Result<FileType, ParserError> {
}

/// DataFusion extension DDL for `CREATE EXTERNAL TABLE`
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CreateExternalTable {
/// Table name
pub name: String,
Expand All @@ -70,7 +70,7 @@ pub struct CreateExternalTable {
}

/// DataFusion extension DDL for `DESCRIBE TABLE`
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DescribeTable {
/// Table name
pub table_name: String,
Expand All @@ -79,7 +79,7 @@ pub struct DescribeTable {
/// DataFusion Statement representations.
///
/// Tokens parsed by `DFParser` are converted into these values.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Statement {
/// ANSI SQL AST node
Statement(Box<SQLStatement>),
Expand Down
Loading

0 comments on commit 080c324

Please sign in to comment.