Skip to content

Commit

Permalink
Apply clippy fixes
Browse files Browse the repository at this point in the history
`dev/rust_lint.sh` no longer passes for me, maybe because of `rustup
update`. This is first portion of fixes suggested by clippy.
  • Loading branch information
findepi committed Nov 28, 2024
1 parent 5818732 commit 95acf32
Show file tree
Hide file tree
Showing 56 changed files with 83 additions and 84 deletions.
14 changes: 8 additions & 6 deletions datafusion/common/src/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,21 +109,23 @@ impl Column {
/// where `"foo.BAR"` would be parsed to a reference to column named `foo.BAR`
pub fn from_qualified_name(flat_name: impl Into<String>) -> Self {
let flat_name = flat_name.into();
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, false))
.unwrap_or_else(|| Self {
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, false)).unwrap_or(
Self {
relation: None,
name: flat_name,
})
},
)
}

/// Deserialize a fully qualified name string into a column preserving column text case
pub fn from_qualified_name_ignore_case(flat_name: impl Into<String>) -> Self {
let flat_name = flat_name.into();
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, true))
.unwrap_or_else(|| Self {
Self::from_idents(&mut parse_identifiers_normalized(&flat_name, true)).unwrap_or(
Self {
relation: None,
name: flat_name,
})
},
)
}

/// return the column's name.
Expand Down
2 changes: 1 addition & 1 deletion datafusion/common/src/hash_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ pub trait HashValue {
fn hash_one(&self, state: &RandomState) -> u64;
}

impl<'a, T: HashValue + ?Sized> HashValue for &'a T {
impl<T: HashValue + ?Sized> HashValue for &T {
fn hash_one(&self, state: &RandomState) -> u64 {
T::hash_one(self, state)
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/common/src/utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -569,7 +569,7 @@ pub mod datafusion_strsim {

struct StringWrapper<'a>(&'a str);

impl<'a, 'b> IntoIterator for &'a StringWrapper<'b> {
impl<'b> IntoIterator for &StringWrapper<'b> {
type Item = char;
type IntoIter = Chars<'b>;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ pub struct AvroArrowArrayReader<'a, R: Read> {
schema_lookup: BTreeMap<String, usize>,
}

impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
impl<R: Read> AvroArrowArrayReader<'_, R> {
pub fn try_new(
reader: R,
schema: SchemaRef,
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/avro_to_arrow/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ pub struct Reader<'a, R: Read> {
batch_size: usize,
}

impl<'a, R: Read> Reader<'a, R> {
impl<R: Read> Reader<'_, R> {
/// Create a new Avro Reader from any value that implements the `Read` trait.
///
/// If reading a `File`, you can customise the Reader, such as to enable schema
Expand Down Expand Up @@ -157,7 +157,7 @@ impl<'a, R: Read> Reader<'a, R> {
}
}

impl<'a, R: Read> Iterator for Reader<'a, R> {
impl<R: Read> Iterator for Reader<'_, R> {
type Item = ArrowResult<RecordBatch>;

/// Returns the next batch of results (defined by `self.batch_size`), or `None` if there
Expand Down
10 changes: 5 additions & 5 deletions datafusion/core/src/datasource/file_format/options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ pub struct CsvReadOptions<'a> {
pub file_sort_order: Vec<Vec<SortExpr>>,
}

impl<'a> Default for CsvReadOptions<'a> {
impl Default for CsvReadOptions<'_> {
fn default() -> Self {
Self::new()
}
Expand Down Expand Up @@ -243,7 +243,7 @@ pub struct ParquetReadOptions<'a> {
pub file_sort_order: Vec<Vec<SortExpr>>,
}

impl<'a> Default for ParquetReadOptions<'a> {
impl Default for ParquetReadOptions<'_> {
fn default() -> Self {
Self {
file_extension: DEFAULT_PARQUET_EXTENSION,
Expand Down Expand Up @@ -323,7 +323,7 @@ pub struct ArrowReadOptions<'a> {
pub table_partition_cols: Vec<(String, DataType)>,
}

impl<'a> Default for ArrowReadOptions<'a> {
impl Default for ArrowReadOptions<'_> {
fn default() -> Self {
Self {
schema: None,
Expand Down Expand Up @@ -368,7 +368,7 @@ pub struct AvroReadOptions<'a> {
pub table_partition_cols: Vec<(String, DataType)>,
}

impl<'a> Default for AvroReadOptions<'a> {
impl Default for AvroReadOptions<'_> {
fn default() -> Self {
Self {
schema: None,
Expand Down Expand Up @@ -420,7 +420,7 @@ pub struct NdJsonReadOptions<'a> {
pub file_sort_order: Vec<Vec<SortExpr>>,
}

impl<'a> Default for NdJsonReadOptions<'a> {
impl Default for NdJsonReadOptions<'_> {
fn default() -> Self {
Self {
schema: None,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ impl<'a> ObjectStoreFetch<'a> {
}
}

impl<'a> MetadataFetch for ObjectStoreFetch<'a> {
impl MetadataFetch for ObjectStoreFetch<'_> {
fn fetch(
&mut self,
range: Range<usize>,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/listing/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub fn split_files(
partitioned_files.sort_by(|a, b| a.path().cmp(b.path()));

// effectively this is div with rounding up instead of truncating
let chunk_size = (partitioned_files.len() + n - 1) / n;
let chunk_size = partitioned_files.len().div_ceil(n);
let mut chunks = Vec::with_capacity(n);
let mut current_chunk = Vec::with_capacity(chunk_size);
for file in partitioned_files.drain(..) {
Expand Down
3 changes: 1 addition & 2 deletions datafusion/core/src/datasource/physical_plan/file_groups.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,8 +217,7 @@ impl FileGroupPartitioner {
return None;
}

let target_partition_size =
(total_size as usize + (target_partitions) - 1) / (target_partitions);
let target_partition_size = (total_size as usize).div_ceil(target_partitions);

let current_partition_index: usize = 0;
let current_partition_size: usize = 0;
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/physical_plan/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ impl DisplayAs for FileScanConfig {
#[derive(Debug)]
struct FileGroupsDisplay<'a>(&'a [Vec<PartitionedFile>]);

impl<'a> DisplayAs for FileGroupsDisplay<'a> {
impl DisplayAs for FileGroupsDisplay<'_> {
fn fmt_as(&self, t: DisplayFormatType, f: &mut Formatter) -> FmtResult {
let n_groups = self.0.len();
let groups = if n_groups == 1 { "group" } else { "groups" };
Expand Down Expand Up @@ -171,7 +171,7 @@ impl<'a> DisplayAs for FileGroupsDisplay<'a> {
#[derive(Debug)]
pub(crate) struct FileGroupDisplay<'a>(pub &'a [PartitionedFile]);

impl<'a> DisplayAs for FileGroupDisplay<'a> {
impl DisplayAs for FileGroupDisplay<'_> {
fn fmt_as(&self, t: DisplayFormatType, f: &mut Formatter) -> FmtResult {
write!(f, "[")?;
match t {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ impl<'a> PagesPruningStatistics<'a> {
Some(vec)
}
}
impl<'a> PruningStatistics for PagesPruningStatistics<'a> {
impl PruningStatistics for PagesPruningStatistics<'_> {
fn min_values(&self, _column: &datafusion_common::Column) -> Option<ArrayRef> {
match self.converter.data_page_mins(
self.column_index,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ impl<'schema> PushdownChecker<'schema> {
}
}

impl<'schema> TreeNodeRewriter for PushdownChecker<'schema> {
impl TreeNodeRewriter for PushdownChecker<'_> {
type Node = Arc<dyn PhysicalExpr>;

fn f_down(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ impl<'a> RowGroupPruningStatistics<'a> {
}
}

impl<'a> PruningStatistics for RowGroupPruningStatistics<'a> {
impl PruningStatistics for RowGroupPruningStatistics<'_> {
fn min_values(&self, column: &Column) -> Option<ArrayRef> {
self.statistics_converter(column)
.and_then(|c| Ok(c.row_group_mins(self.metadata_iter())?))
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/execution/context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1764,7 +1764,7 @@ impl<'a> BadPlanVisitor<'a> {
}
}

impl<'n, 'a> TreeNodeVisitor<'n> for BadPlanVisitor<'a> {
impl<'n> TreeNodeVisitor<'n> for BadPlanVisitor<'_> {
type Node = LogicalPlan;

fn f_down(&mut self, node: &'n Self::Node) -> Result<TreeNodeRecursion> {
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/execution/session_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1636,7 +1636,7 @@ struct SessionContextProvider<'a> {
tables: HashMap<ResolvedTableReference, Arc<dyn TableSource>>,
}

impl<'a> ContextProvider for SessionContextProvider<'a> {
impl ContextProvider for SessionContextProvider<'_> {
fn get_expr_planners(&self) -> &[Arc<dyn ExprPlanner>] {
&self.state.expr_planners
}
Expand Down Expand Up @@ -1931,7 +1931,7 @@ impl<'a> SessionSimplifyProvider<'a> {
}
}

impl<'a> SimplifyInfo for SessionSimplifyProvider<'a> {
impl SimplifyInfo for SessionSimplifyProvider<'_> {
fn is_boolean_type(&self, expr: &Expr) -> datafusion_common::Result<bool> {
Ok(expr.get_type(self.df_schema)? == DataType::Boolean)
}
Expand Down
8 changes: 4 additions & 4 deletions datafusion/expr-common/src/type_coercion/aggregates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,19 +294,19 @@ pub fn coerce_avg_type(func_name: &str, arg_types: &[DataType]) -> Result<Vec<Da
// Supported types smallint, int, bigint, real, double precision, decimal, or interval
// Refer to https://www.postgresql.org/docs/8.2/functions-aggregate.html doc
fn coerced_type(func_name: &str, data_type: &DataType) -> Result<DataType> {
return match &data_type {
match &data_type {
DataType::Decimal128(p, s) => Ok(DataType::Decimal128(*p, *s)),
DataType::Decimal256(p, s) => Ok(DataType::Decimal256(*p, *s)),
d if d.is_numeric() => Ok(DataType::Float64),
DataType::Dictionary(_, v) => return coerced_type(func_name, v.as_ref()),
DataType::Dictionary(_, v) => coerced_type(func_name, v.as_ref()),
_ => {
return plan_err!(
plan_err!(
"The function {:?} does not support inputs of type {:?}.",
func_name,
data_type
)
}
};
}
}
Ok(vec![coerced_type(func_name, &arg_types[0])?])
}
Expand Down
4 changes: 2 additions & 2 deletions datafusion/functions-aggregate-common/src/merge_arrays.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl<'a> CustomElement<'a> {
// Overwrite ordering implementation such that
// - `self.ordering` values are used for comparison,
// - When used inside `BinaryHeap` it is a min-heap.
impl<'a> Ord for CustomElement<'a> {
impl Ord for CustomElement<'_> {
fn cmp(&self, other: &Self) -> Ordering {
// Compares according to custom ordering
self.ordering(&self.ordering, &other.ordering)
Expand All @@ -78,7 +78,7 @@ impl<'a> Ord for CustomElement<'a> {
}
}

impl<'a> PartialOrd for CustomElement<'a> {
impl PartialOrd for CustomElement<'_> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/functions-nested/src/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,5 +185,5 @@ impl ExprPlanner for FieldAccessPlanner {
}

fn is_array_agg(agg_func: &datafusion_expr::expr::AggregateFunction) -> bool {
return agg_func.func.name() == "array_agg";
agg_func.func.name() == "array_agg"
}
2 changes: 1 addition & 1 deletion datafusion/functions/src/strings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ pub enum ColumnarValueRef<'a> {
NonNullableStringViewArray(&'a StringViewArray),
}

impl<'a> ColumnarValueRef<'a> {
impl ColumnarValueRef<'_> {
#[inline]
pub fn is_valid(&self, i: usize) -> bool {
match &self {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/analyzer/type_coercion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ impl<'a> TypeCoercionRewriter<'a> {
}
}

impl<'a> TreeNodeRewriter for TypeCoercionRewriter<'a> {
impl TreeNodeRewriter for TypeCoercionRewriter<'_> {
type Node = Expr;

fn f_up(&mut self, expr: Expr) -> Result<Transformed<Expr>> {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/join_key_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ impl<'a> ExprPair<'a> {
}
}

impl<'a> Equivalent<(Expr, Expr)> for ExprPair<'a> {
impl Equivalent<(Expr, Expr)> for ExprPair<'_> {
fn equivalent(&self, other: &(Expr, Expr)) -> bool {
self.0 == &other.0 && self.1 == &other.1
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/optimizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ impl<'a> Rewriter<'a> {
}
}

impl<'a> TreeNodeRewriter for Rewriter<'a> {
impl TreeNodeRewriter for Rewriter<'_> {
type Node = LogicalPlan;

fn f_down(&mut self, node: LogicalPlan) -> Result<Transformed<LogicalPlan>> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ enum ConstSimplifyResult {
SimplifyRuntimeError(DataFusionError, Expr),
}

impl<'a> TreeNodeRewriter for ConstEvaluator<'a> {
impl TreeNodeRewriter for ConstEvaluator<'_> {
type Node = Expr;

fn f_down(&mut self, expr: Expr) -> Result<Transformed<Expr>> {
Expand Down Expand Up @@ -710,7 +710,7 @@ impl<'a, S> Simplifier<'a, S> {
}
}

impl<'a, S: SimplifyInfo> TreeNodeRewriter for Simplifier<'a, S> {
impl<S: SimplifyInfo> TreeNodeRewriter for Simplifier<'_, S> {
type Node = Expr;

/// rewrite the expression simplifying any constant expressions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl<'a> GuaranteeRewriter<'a> {
}
}

impl<'a> TreeNodeRewriter for GuaranteeRewriter<'a> {
impl TreeNodeRewriter for GuaranteeRewriter<'_> {
type Node = Expr;

fn f_up(&mut self, expr: Expr) -> Result<Transformed<Expr>> {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr-common/src/physical_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ pub fn with_new_children_if_necessary(
/// Example output: `[a + 1, b]`
pub fn format_physical_expr_list(exprs: &[Arc<dyn PhysicalExpr>]) -> impl Display + '_ {
struct DisplayWrapper<'a>(&'a [Arc<dyn PhysicalExpr>]);
impl<'a> Display for DisplayWrapper<'a> {
impl Display for DisplayWrapper<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut iter = self.0.iter();
write!(f, "[")?;
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr-common/src/sort_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ pub fn format_physical_sort_requirement_list(
exprs: &[PhysicalSortRequirement],
) -> impl Display + '_ {
struct DisplayWrapper<'a>(&'a [PhysicalSortRequirement]);
impl<'a> Display for DisplayWrapper<'a> {
impl Display for DisplayWrapper<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut iter = self.0.iter();
write!(f, "[")?;
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/equivalence/class.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ impl ConstExpr {
/// Returns a [`Display`]able list of `ConstExpr`.
pub fn format_list(input: &[ConstExpr]) -> impl Display + '_ {
struct DisplayableList<'a>(&'a [ConstExpr]);
impl<'a> Display for DisplayableList<'a> {
impl Display for DisplayableList<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut first = true;
for const_expr in self.0 {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/expressions/in_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ trait IsEqual: HashValue {
fn is_equal(&self, other: &Self) -> bool;
}

impl<'a, T: IsEqual + ?Sized> IsEqual for &'a T {
impl<T: IsEqual + ?Sized> IsEqual for &T {
fn is_equal(&self, other: &Self) -> bool {
T::is_equal(self, other)
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/utils/guarantee.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ impl LiteralGuarantee {
// for an `AND` conjunction to be true, all terms individually must be true
.fold(GuaranteeBuilder::new(), |builder, expr| {
if let Some(cel) = ColOpLit::try_new(expr) {
return builder.aggregate_conjunct(cel);
builder.aggregate_conjunct(cel)
} else if let Some(inlist) = expr
.as_any()
.downcast_ref::<crate::expressions::InListExpr>()
Expand Down
4 changes: 1 addition & 3 deletions datafusion/physical-expr/src/utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,7 @@ struct PhysicalExprDAEGBuilder<'a, T, F: Fn(&ExprTreeNode<NodeIndex>) -> Result<
constructor: &'a F,
}

impl<'a, T, F: Fn(&ExprTreeNode<NodeIndex>) -> Result<T>>
PhysicalExprDAEGBuilder<'a, T, F>
{
impl<T, F: Fn(&ExprTreeNode<NodeIndex>) -> Result<T>> PhysicalExprDAEGBuilder<'_, T, F> {
// This method mutates an expression node by transforming it to a physical expression
// and adding it to the graph. The method returns the mutated expression node.
fn mutate(
Expand Down
Loading

0 comments on commit 95acf32

Please sign in to comment.