Skip to content

Commit

Permalink
fix clippy in nightly
Browse files Browse the repository at this point in the history
  • Loading branch information
jackwener committed Feb 10, 2023
1 parent a1b6f50 commit c2c81f3
Show file tree
Hide file tree
Showing 18 changed files with 19 additions and 51 deletions.
5 changes: 1 addition & 4 deletions benchmarks/src/bin/tpch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -445,10 +445,7 @@ impl RunContext {
.duration_since(SystemTime::UNIX_EPOCH)
.expect("current time is later than UNIX_EPOCH")
.as_secs(),
arguments: std::env::args()
.skip(1)
.into_iter()
.collect::<Vec<String>>(),
arguments: std::env::args().skip(1).collect::<Vec<String>>(),
}
}
}
Expand Down
5 changes: 1 addition & 4 deletions datafusion/common/src/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1135,9 +1135,6 @@ mod tests {
}

fn test_metadata_n(n: usize) -> HashMap<String, String> {
(0..n)
.into_iter()
.map(|i| (format!("k{i}"), format!("v{i}")))
.collect()
(0..n).map(|i| (format!("k{i}"), format!("v{i}"))).collect()
}
}
2 changes: 0 additions & 2 deletions datafusion/common/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1526,7 +1526,6 @@ impl ScalarValue {
DataType::Dictionary(key_type, value_type) => {
// create the values array
let value_scalars = scalars
.into_iter()
.map(|scalar| match scalar {
ScalarValue::Dictionary(inner_key_type, scalar) => {
if &inner_key_type == key_type {
Expand Down Expand Up @@ -1704,7 +1703,6 @@ impl ScalarValue {
) -> Decimal128Array {
std::iter::repeat(value)
.take(size)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(precision, scale)
.unwrap()
Expand Down
2 changes: 1 addition & 1 deletion datafusion/common/src/table_reference.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ impl<'a> TableReference<'a> {
/// a single double quote in the identifier)
pub fn parse_str(s: &'a str) -> Self {
let mut parts = parse_identifiers(s)
.unwrap_or(vec![])
.unwrap_or_default()
.into_iter()
.map(|id| match id.quote_style {
Some(_) => id.value,
Expand Down
2 changes: 0 additions & 2 deletions datafusion/core/src/physical_plan/empty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,14 +74,12 @@ impl EmptyExec {
vec![RecordBatch::try_new(
Arc::new(Schema::new(
(0..n_field)
.into_iter()
.map(|i| {
Field::new(format!("placeholder_{i}"), DataType::Null, true)
})
.collect(),
)),
(0..n_field)
.into_iter()
.map(|_i| {
let ret: ArrayRef = Arc::new(NullArray::new(1));
ret
Expand Down
1 change: 0 additions & 1 deletion datafusion/core/src/physical_plan/sorts/sort.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,6 @@ fn in_mem_partial_sort(
let (sorted_arrays, batches): (Vec<Vec<ArrayRef>>, Vec<RecordBatch>) =
buffered_batches
.drain(..)
.into_iter()
.map(|b| {
let BatchWithSortArray {
sort_arrays,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,6 @@ impl ExecutionPlan for SortPreservingMergeExec {
// Use tokio only if running from a tokio context (#2201)
let receivers = match tokio::runtime::Handle::try_current() {
Ok(_) => (0..input_partitions)
.into_iter()
.map(|part_i| {
let (sender, receiver) = mpsc::channel(1);
let join_handle = spawn_execution(
Expand Down Expand Up @@ -355,10 +354,7 @@ impl SortPreservingMergeStream {
batch_size: usize,
) -> Result<Self> {
let stream_count = streams.len();
let batches = (0..stream_count)
.into_iter()
.map(|_| VecDeque::new())
.collect();
let batches = (0..stream_count).map(|_| VecDeque::new()).collect();
tracking_metrics.init_mem_used(streams.iter().map(|s| s.mem_used).sum());
let wrappers = streams.into_iter().map(|s| s.stream.fuse()).collect();

Expand All @@ -380,7 +376,7 @@ impl SortPreservingMergeStream {
aborted: false,
in_progress: vec![],
next_batch_id: 0,
cursors: (0..stream_count).into_iter().map(|_| None).collect(),
cursors: (0..stream_count).map(|_| None).collect(),
loser_tree: Vec::with_capacity(stream_count),
loser_tree_adjusted: false,
batch_size,
Expand Down Expand Up @@ -1060,7 +1056,6 @@ mod tests {

// Split the sorted RecordBatch into multiple
(0..batches)
.into_iter()
.map(|batch_idx| {
let columns = (0..sorted.num_columns())
.map(|column_idx| {
Expand Down
8 changes: 3 additions & 5 deletions datafusion/core/src/physical_plan/unnest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -248,11 +248,9 @@ fn build_batch(
.unwrap();
unnest_batch(batch, schema, column, list_array)
}
_ => {
return Err(DataFusionError::Execution(format!(
"Invalid unnest column {column}"
)));
}
_ => Err(DataFusionError::Execution(format!(
"Invalid unnest column {column}"
))),
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -518,7 +518,6 @@ impl SortedPartitionByBoundedWindowStream {
}
cur_window_expr_out_result_len
})
.into_iter()
.min()
.unwrap_or(0)
}
Expand Down
1 change: 0 additions & 1 deletion datafusion/physical-expr/src/aggregate/average.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,6 @@ mod tests {
let array: ArrayRef = Arc::new(
std::iter::repeat::<Option<i128>>(None)
.take(6)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(10, 0)?,
);
Expand Down
1 change: 0 additions & 1 deletion datafusion/physical-expr/src/aggregate/count_distinct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,6 @@ impl Accumulator for DistinctCountAccumulator {
"cols_out should only consist of ScalarValue::List. {t:?} is found"
))),
})
.into_iter()
.collect::<Result<Vec<_>>>()?;

self.values.iter().for_each(|distinct_values| {
Expand Down
4 changes: 0 additions & 4 deletions datafusion/physical-expr/src/aggregate/min_max.rs
Original file line number Diff line number Diff line change
Expand Up @@ -943,7 +943,6 @@ mod tests {
let array: ArrayRef = Arc::new(
std::iter::repeat::<Option<i128>>(None)
.take(0)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(10, 0)?,
);
Expand Down Expand Up @@ -971,7 +970,6 @@ mod tests {
let array: ArrayRef = Arc::new(
std::iter::repeat::<Option<i128>>(None)
.take(6)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(10, 0)?,
);
Expand Down Expand Up @@ -1031,7 +1029,6 @@ mod tests {
let array: ArrayRef = Arc::new(
std::iter::repeat::<Option<i128>>(None)
.take(0)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(10, 0)?,
);
Expand Down Expand Up @@ -1074,7 +1071,6 @@ mod tests {
let array: ArrayRef = Arc::new(
std::iter::repeat::<Option<i128>>(None)
.take(6)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(10, 0)?,
);
Expand Down
1 change: 0 additions & 1 deletion datafusion/physical-expr/src/aggregate/sum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,6 @@ mod tests {
let array: ArrayRef = Arc::new(
std::iter::repeat::<Option<i128>>(None)
.take(6)
.into_iter()
.collect::<Decimal128Array>()
.with_precision_and_scale(10, 0)?,
);
Expand Down
1 change: 0 additions & 1 deletion datafusion/physical-expr/src/expressions/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3187,7 +3187,6 @@ mod tests {
// build a left deep tree ((((a + a) + a) + a ....
let tree_depth: i32 = 100;
let expr = (0..tree_depth)
.into_iter()
.map(|_| col("a", schema.as_ref()).unwrap())
.reduce(|l, r| binary_simple(l, Operator::Plus, r, &schema))
.unwrap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ pub(crate) fn is_not_distinct_from_null(
}

fn make_boolean_array(length: usize, value: bool) -> Result<BooleanArray> {
Ok((0..length).into_iter().map(|_| Some(value)).collect())
Ok((0..length).map(|_| Some(value)).collect())
}

pub(crate) fn is_not_distinct_from_utf8<OffsetSize: OffsetSizeTrait>(
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/expressions/in_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -924,7 +924,7 @@ mod tests {

// test the optimization: set
// expression: "a in (99..300), the data type of list is INT32
let list = (99i32..300).into_iter().map(lit).collect::<Vec<_>>();
let list = (99i32..300).map(lit).collect::<Vec<_>>();

in_list!(
batch,
Expand Down
1 change: 0 additions & 1 deletion datafusion/physical-expr/src/window/nth_value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,6 @@ mod tests {
let result = ranges
.iter()
.map(|range| evaluator.evaluate_inside_range(&values, range))
.into_iter()
.collect::<Result<Vec<ScalarValue>>>()?;
let result = ScalarValue::iter_to_array(result.into_iter())?;
let result = as_int32_array(&result)?;
Expand Down
22 changes: 9 additions & 13 deletions datafusion/substrait/src/logical_plan/consumer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -440,16 +440,14 @@ fn from_substrait_jointype(join_type: i32) -> Result<JoinType> {
join_rel::JoinType::Outer => Ok(JoinType::Full),
join_rel::JoinType::Anti => Ok(JoinType::LeftAnti),
join_rel::JoinType::Semi => Ok(JoinType::LeftSemi),
_ => {
return Err(DataFusionError::Internal(format!(
"unsupported join type {substrait_join_type:?}"
)))
}
_ => Err(DataFusionError::Internal(format!(
"unsupported join type {substrait_join_type:?}"
))),
}
} else {
return Err(DataFusionError::Internal(format!(
Err(DataFusionError::Internal(format!(
"invalid join type variant {join_type:?}"
)));
)))
}
}

Expand Down Expand Up @@ -671,12 +669,10 @@ pub async fn from_substrait_rex(
Some(LiteralType::Null(ntype)) => {
Ok(Arc::new(Expr::Literal(from_substrait_null(ntype)?)))
}
_ => {
return Err(DataFusionError::NotImplemented(format!(
"Unsupported literal_type: {:?}",
lit.literal_type
)))
}
_ => Err(DataFusionError::NotImplemented(format!(
"Unsupported literal_type: {:?}",
lit.literal_type
))),
}
}
_ => Err(DataFusionError::NotImplemented(
Expand Down

0 comments on commit c2c81f3

Please sign in to comment.