Skip to content

Commit

Permalink
Clippy fix on nightly (#1907)
Browse files Browse the repository at this point in the history
  • Loading branch information
yjshen authored Mar 3, 2022
1 parent a1a1815 commit 6f3ca5b
Show file tree
Hide file tree
Showing 10 changed files with 16 additions and 15 deletions.
4 changes: 2 additions & 2 deletions ballista/rust/core/src/execution_plans/distributed_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,13 +287,13 @@ async fn fetch_partition(
BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
Ok(ballista_client
ballista_client
.fetch_partition(
&partition_id.job_id,
partition_id.stage_id as usize,
partition_id.partition_id as usize,
&location.path,
)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?)
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))
}
4 changes: 2 additions & 2 deletions ballista/rust/core/src/execution_plans/shuffle_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,15 +208,15 @@ async fn fetch_partition(
BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
Ok(ballista_client
ballista_client
.fetch_partition(
&partition_id.job_id,
partition_id.stage_id as usize,
partition_id.partition_id as usize,
&location.path,
)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?)
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))
}

#[cfg(test)]
Expand Down
2 changes: 1 addition & 1 deletion ballista/rust/executor/src/cpu_bound_executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ fn set_current_thread_priority(prio: i32) {
}

#[cfg(not(unix))]
fn set_current_thread_priority(prio: i32) {
fn set_current_thread_priority(_prio: i32) {
warn!("Setting worker thread priority not supported on this platform");
}

Expand Down
2 changes: 1 addition & 1 deletion ballista/rust/scheduler/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ impl<T: 'static + AsLogicalPlan, U: 'static + AsExecutionPlan> SchedulerServer<T

async fn fetch_tasks(
&self,
available_executors: &mut Vec<ExecutorData>,
available_executors: &mut [ExecutorData],
job_id: &str,
) -> Result<(Vec<Vec<TaskDefinition>>, usize), BallistaError> {
let mut ret: Vec<Vec<TaskDefinition>> =
Expand Down
4 changes: 2 additions & 2 deletions ballista/rust/scheduler/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ async fn start_server(
),
};

Ok(Server::bind(&addr)
Server::bind(&addr)
.serve(make_service_fn(move |request: &AddrStream| {
let scheduler_grpc_server =
SchedulerGrpcServer::new(scheduler_server.clone());
Expand Down Expand Up @@ -145,7 +145,7 @@ async fn start_server(
))
}))
.await
.context("Could not start grpc server")?)
.context("Could not start grpc server")
}

#[tokio::main]
Expand Down
2 changes: 1 addition & 1 deletion datafusion-physical-expr/src/tdigest/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,7 @@ impl TDigest {
}

fn external_merge(
centroids: &mut Vec<Centroid>,
centroids: &mut [Centroid],
first: usize,
middle: usize,
last: usize,
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ impl FileFormat for ParquetFormat {
}

fn summarize_min_max(
max_values: &mut Vec<Option<MaxAccumulator>>,
min_values: &mut Vec<Option<MinAccumulator>>,
max_values: &mut [Option<MaxAccumulator>],
min_values: &mut [Option<MinAccumulator>],
fields: &[Field],
i: usize,
stat: &ParquetStatistics,
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/datasource/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,8 +177,8 @@ fn create_max_min_accs(
fn get_col_stats(
schema: &Schema,
null_counts: Vec<usize>,
max_values: &mut Vec<Option<MaxAccumulator>>,
min_values: &mut Vec<Option<MinAccumulator>>,
max_values: &mut [Option<MaxAccumulator>],
min_values: &mut [Option<MinAccumulator>],
) -> Vec<ColumnStatistics> {
(0..schema.fields().len())
.map(|i| {
Expand Down
1 change: 1 addition & 0 deletions datafusion/src/logical_plan/extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ pub trait UserDefinedLogicalNode: fmt::Debug {
/// of self.inputs and self.exprs.
///
/// So, `self.from_template(exprs, ..).expressions() == exprs
#[allow(clippy::wrong_self_convention)]
fn from_template(
&self,
exprs: &[Expr],
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/physical_plan/hash_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fn combine_hashes(l: u64, r: u64) -> u64 {
fn hash_decimal128<'a>(
array: &ArrayRef,
random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
hashes_buffer: &'a mut [u64],
mul_col: bool,
) {
let array = array.as_any().downcast_ref::<DecimalArray>().unwrap();
Expand Down Expand Up @@ -207,7 +207,7 @@ macro_rules! hash_array_float {
fn create_hashes_dictionary<K: ArrowDictionaryKeyType>(
array: &ArrayRef,
random_state: &RandomState,
hashes_buffer: &mut Vec<u64>,
hashes_buffer: &mut [u64],
multi_col: bool,
) -> Result<()> {
let dict_array = array.as_any().downcast_ref::<DictionaryArray<K>>().unwrap();
Expand Down

0 comments on commit 6f3ca5b

Please sign in to comment.