Skip to content

Commit

Permalink
fix clippy (#4190)
Browse files Browse the repository at this point in the history
* fix clippy

* fix more

* fix fmt
  • Loading branch information
jackwener committed Nov 13, 2022
1 parent 4272d96 commit 96512ac
Show file tree
Hide file tree
Showing 28 changed files with 48 additions and 50 deletions.
2 changes: 1 addition & 1 deletion benchmarks/src/bin/tpch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ async fn benchmark_query(
}

let elapsed = start.elapsed().as_secs_f64() * 1000.0;
millis.push(elapsed as f64);
millis.push(elapsed);
let row_count = result.iter().map(|b| b.num_rows()).sum();
println!(
"Query {} iteration {} took {:.1} ms and returned {} rows",
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/benches/data_utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ fn create_record_batch(
schema,
vec![
Arc::new(StringArray::from(keys)),
Arc::new(Float32Array::from_slice(&vec![i as f32; batch_size])),
Arc::new(Float32Array::from_slice(vec![i as f32; batch_size])),
Arc::new(Float64Array::from(values)),
Arc::new(UInt64Array::from(integer_values_wide)),
Arc::new(UInt64Array::from(integer_values_narrow)),
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/benches/filter_query_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ fn create_context(array_len: usize, batch_size: usize) -> Result<SessionContext>
RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(Float32Array::from_slice(&vec![i as f32; batch_size])),
Arc::new(Float64Array::from_slice(&vec![i as f64; batch_size])),
Arc::new(Float32Array::from_slice(vec![i as f32; batch_size])),
Arc::new(Float64Array::from_slice(vec![i as f64; batch_size])),
],
)
.unwrap()
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/benches/math_query_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ fn create_context(
RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(Float32Array::from_slice(&vec![i as f32; batch_size])),
Arc::new(Float64Array::from_slice(&vec![i as f64; batch_size])),
Arc::new(Float32Array::from_slice(vec![i as f32; batch_size])),
Arc::new(Float64Array::from_slice(vec![i as f64; batch_size])),
],
)
.unwrap()
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/catalog/information_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ impl SchemaProvider for InformationSchemaProvider {
}

fn table_exist(&self, name: &str) -> bool {
return matches!(name.to_ascii_lowercase().as_str(), TABLES | VIEWS | COLUMNS);
matches!(name.to_ascii_lowercase().as_str(), TABLES | VIEWS | COLUMNS)
}
}

Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/listing/url.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ mod tests {
let root = std::env::current_dir().unwrap();
let root = root.to_string_lossy();

let url = ListingTableUrl::parse(&root).unwrap();
let url = ListingTableUrl::parse(root).unwrap();
let child = url.prefix.child("partition").child("file");

let prefix: Vec<_> = url.strip_prefix(&child).unwrap().collect();
Expand Down
6 changes: 3 additions & 3 deletions datafusion/core/src/datasource/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,8 @@ pub async fn get_statistics_with_limit(
};

let statistics = Statistics {
num_rows: Some(num_rows as usize),
total_byte_size: Some(total_byte_size as usize),
num_rows: Some(num_rows),
total_byte_size: Some(total_byte_size),
column_statistics: column_stats,
is_exact,
};
Expand Down Expand Up @@ -167,7 +167,7 @@ fn get_col_stats(
None => None,
};
ColumnStatistics {
null_count: Some(null_counts[i] as usize),
null_count: Some(null_counts[i]),
max_value,
min_value,
distinct_count: None,
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/execution/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1630,7 +1630,7 @@ impl SessionState {
Some(host) => format!("{}://{}", url.scheme(), host),
None => format!("{}://", url.scheme()),
};
let path = &url.as_str()[authority.len() as usize..];
let path = &url.as_str()[authority.len()..];
let path = object_store::path::Path::parse(path).expect("Can't parse path");
let store = ObjectStoreUrl::parse(authority.as_str())
.expect("Invalid default catalog url");
Expand Down Expand Up @@ -2649,7 +2649,7 @@ mod tests {
// generate a partitioned file
for partition in 0..partition_count {
let filename = format!("partition-{}.{}", partition, file_extension);
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = File::create(file_path)?;

// generate some data
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/physical_plan/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -328,8 +328,8 @@ mod tests {
RecordBatch::try_new(
Arc::clone(&schema),
vec![
Arc::new(Float32Array::from_slice(&vec![i as f32; batch_size])),
Arc::new(Float64Array::from_slice(&vec![i as f64; batch_size])),
Arc::new(Float32Array::from_slice(vec![i as f32; batch_size])),
Arc::new(Float64Array::from_slice(vec![i as f64; batch_size])),
],
)
.unwrap()
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/physical_plan/file_format/csv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ pub async fn plan_to_csv(
for i in 0..plan.output_partitioning().partition_count() {
let plan = plan.clone();
let filename = format!("part-{}.csv", i);
let path = fs_path.join(&filename);
let path = fs_path.join(filename);
let file = fs::File::create(path)?;
let mut writer = csv::Writer::new(file);
let task_ctx = Arc::new(TaskContext::from(state));
Expand Down Expand Up @@ -539,7 +539,7 @@ mod tests {
// generate a partitioned file
for partition in 0..partition_count {
let filename = format!("partition-{}.{}", partition, file_extension);
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = File::create(file_path)?;

// generate some data
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_plan/file_format/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ pub async fn plan_to_json(
for i in 0..plan.output_partitioning().partition_count() {
let plan = plan.clone();
let filename = format!("part-{}.json", i);
let path = fs_path.join(&filename);
let path = fs_path.join(filename);
let file = fs::File::create(path)?;
let mut writer = json::LineDelimitedWriter::new(file);
let task_ctx = Arc::new(TaskContext::from(state));
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/physical_plan/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ pub async fn plan_to_parquet(
for i in 0..plan.output_partitioning().partition_count() {
let plan = plan.clone();
let filename = format!("part-{}.parquet", i);
let path = fs_path.join(&filename);
let path = fs_path.join(filename);
let file = fs::File::create(path)?;
let mut writer =
ArrowWriter::try_new(file, plan.schema(), writer_properties.clone())?;
Expand Down Expand Up @@ -1535,7 +1535,7 @@ mod tests {
// generate a partitioned file
for partition in 0..partition_count {
let filename = format!("partition-{}.{}", partition, file_extension);
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = File::create(file_path)?;

// generate some data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -399,7 +399,7 @@ mod test {
#[should_panic(expected = "building candidate failed")]
fn test_filter_candidate_builder_ignore_projected_columns() {
let testdata = crate::test_util::parquet_test_data();
let file = std::fs::File::open(&format!("{}/alltypes_plain.parquet", testdata))
let file = std::fs::File::open(format!("{}/alltypes_plain.parquet", testdata))
.expect("opening file");

let reader = SerializedFileReader::new(file).expect("creating reader");
Expand All @@ -423,7 +423,7 @@ mod test {
#[test]
fn test_filter_candidate_builder_ignore_complex_types() {
let testdata = crate::test_util::parquet_test_data();
let file = std::fs::File::open(&format!("{}/list_columns.parquet", testdata))
let file = std::fs::File::open(format!("{}/list_columns.parquet", testdata))
.expect("opening file");

let reader = SerializedFileReader::new(file).expect("creating reader");
Expand All @@ -447,7 +447,7 @@ mod test {
#[test]
fn test_filter_candidate_builder_rewrite_missing_column() {
let testdata = crate::test_util::parquet_test_data();
let file = std::fs::File::open(&format!("{}/alltypes_plain.parquet", testdata))
let file = std::fs::File::open(format!("{}/alltypes_plain.parquet", testdata))
.expect("opening file");

let reader = SerializedFileReader::new(file).expect("creating reader");
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/test/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ pub fn partitioned_file_groups(
.get_ext_with_compression(file_compression_type.to_owned())
.unwrap()
);
let filename = tmp_dir.join(&filename);
let filename = tmp_dir.join(filename);

let file = File::create(&filename).unwrap();

Expand All @@ -125,7 +125,7 @@ pub fn partitioned_file_groups(
files.push(filename);
}

let f = File::open(&path)?;
let f = File::open(path)?;
let f = BufReader::new(f);
for (i, line) in f.lines().enumerate() {
let line = line.unwrap();
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/path_partition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,7 @@ impl ObjectStore for MirroringObjectStore {
) -> object_store::Result<Bytes> {
self.files.iter().find(|x| *x == location.as_ref()).unwrap();
let path = std::path::PathBuf::from(&self.mirrored_file);
let mut file = File::open(&path).unwrap();
let mut file = File::open(path).unwrap();
file.seek(SeekFrom::Start(range.start as u64)).unwrap();

let to_read = range.end - range.start;
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/provider_filter_pushdown.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ impl TableProvider for CustomProvider {
Expr::Literal(ScalarValue::Int8(Some(i))) => *i as i64,
Expr::Literal(ScalarValue::Int16(Some(i))) => *i as i64,
Expr::Literal(ScalarValue::Int32(Some(i))) => *i as i64,
Expr::Literal(ScalarValue::Int64(Some(i))) => *i as i64,
Expr::Literal(ScalarValue::Int64(Some(i))) => *i,
Expr::Cast(Cast { expr, data_type: _ }) => match expr.deref() {
Expr::Literal(lit_value) => match lit_value {
ScalarValue::Int8(Some(v)) => *v as i64,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/aggregates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2185,7 +2185,7 @@ async fn run_count_distinct_integers_aggregated_scenario(

for (i, partition) in partitions.iter().enumerate() {
let filename = format!("partition-{}.csv", i);
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = File::create(file_path)?;
for row in partition {
let row_str = format!(
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/group_by.rs
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,7 @@ async fn group_by_date_trunc() -> Result<()> {
// generate a partitioned file
for partition in 0..4 {
let filename = format!("partition-{}.{}", partition, "csv");
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = File::create(file_path)?;

// generate some data
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -871,7 +871,7 @@ fn populate_csv_partitions(
// generate a partitioned file
for partition in 0..partition_count {
let filename = format!("partition-{}.{}", partition, file_extension);
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = File::create(file_path)?;

// generate some data
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/parquet_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ fn write_files(table_path: &Path, schemas: Vec<Schema>) {
for (i, schema) in schemas.into_iter().enumerate() {
let schema = Arc::new(schema);
let filename = format!("part-{}.parquet", i);
let path = table_path.join(&filename);
let path = table_path.join(filename);
let file = fs::File::create(path).unwrap();
let mut writer = ArrowWriter::try_new(file, schema.clone(), None).unwrap();

Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/partitioned_csv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ fn populate_csv_partitions(
// generate a partitioned file
for partition in 0..partition_count {
let filename = format!("partition-{}.{}", partition, file_extension);
let file_path = tmp_dir.path().join(&filename);
let file_path = tmp_dir.path().join(filename);
let mut file = std::fs::File::create(file_path)?;

// generate some data
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/common_subexpr_eliminate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ impl ExprRewriter for CommonSubexprRewriter<'_> {
// Alias this `Column` expr to it original "expr name",
// `projection_push_down` optimizer use "expr name" to eliminate useless
// projections.
Ok(col(id).alias(&expr_name))
Ok(col(id).alias(expr_name))
}
}

Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/filter_push_down.rs
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,7 @@ fn optimize(plan: &LogicalPlan, mut state: State) -> Result<LogicalPlan> {
// replace keys in join_cols_to_replace with values in resulting column
// set
.filter(|c| !join_cols_to_replace.contains_key(c))
.chain(join_cols_to_replace.iter().map(|(_, v)| (*v).clone()))
.chain(join_cols_to_replace.values().map(|v| (*v).clone()))
.collect();

Some(Ok((join_side_predicate, join_side_columns)))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ fn validate_input_percentile_expr(expr: &Arc<dyn PhysicalExpr>) -> Result<f64> {
.value();
let percentile = match lit {
ScalarValue::Float32(Some(q)) => *q as f64,
ScalarValue::Float64(Some(q)) => *q as f64,
ScalarValue::Float64(Some(q)) => *q,
got => return Err(DataFusionError::NotImplemented(format!(
"Percentile value for 'APPROX_PERCENTILE_CONT' must be Float32 or Float64 literal (got data type {})",
got.get_datatype()
Expand Down Expand Up @@ -388,7 +388,7 @@ impl Accumulator for ApproxPercentileAccumulator {
DataType::UInt32 => ScalarValue::UInt32(Some(q as u32)),
DataType::UInt64 => ScalarValue::UInt64(Some(q as u64)),
DataType::Float32 => ScalarValue::Float32(Some(q as f32)),
DataType::Float64 => ScalarValue::Float64(Some(q as f64)),
DataType::Float64 => ScalarValue::Float64(Some(q)),
v => unreachable!("unexpected return type {:?}", v),
})
}
Expand Down
8 changes: 3 additions & 5 deletions datafusion/physical-expr/src/aggregate/tdigest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -420,8 +420,7 @@ impl TDigest {
let mut compressed: Vec<Centroid> = Vec::with_capacity(max_size);

let mut k_limit: f64 = 1.0;
let mut q_limit_times_count =
Self::k_to_q(k_limit, max_size as f64) * (count as f64);
let mut q_limit_times_count = Self::k_to_q(k_limit, max_size as f64) * (count);

let mut iter_centroids = centroids.iter_mut();
let mut curr = iter_centroids.next().unwrap();
Expand All @@ -440,8 +439,7 @@ impl TDigest {
sums_to_merge = 0_f64;
weights_to_merge = 0_f64;
compressed.push(curr.clone());
q_limit_times_count =
Self::k_to_q(k_limit, max_size as f64) * (count as f64);
q_limit_times_count = Self::k_to_q(k_limit, max_size as f64) * (count);
k_limit += 1.0;
curr = centroid;
}
Expand All @@ -452,7 +450,7 @@ impl TDigest {
compressed.shrink_to_fit();
compressed.sort();

result.count = count as f64;
result.count = count;
result.min = min;
result.max = max;
result.centroids = compressed;
Expand Down
10 changes: 5 additions & 5 deletions datafusion/physical-expr/src/expressions/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2437,10 +2437,10 @@ mod tests {
let value: i128 = 123;
let decimal_array = Arc::new(create_decimal_array(
&[
Some(value as i128), // 1.23
Some(value), // 1.23
None,
Some((value - 1) as i128), // 1.22
Some((value + 1) as i128), // 1.24
Some(value + 1), // 1.24
],
10,
2,
Expand Down Expand Up @@ -2563,7 +2563,7 @@ mod tests {
&[
Some(value as i128), // 1.23
None,
Some((value - 1) as i128), // 1.22
Some(value - 1), // 1.22
Some((value + 1) as i128), // 1.24
],
10,
Expand Down Expand Up @@ -2680,10 +2680,10 @@ mod tests {
let value: i128 = 123;
let decimal_array = Arc::new(create_decimal_array(
&[
Some(value as i128), // 1.23
Some(value), // 1.23
None,
Some((value - 1) as i128), // 1.22
Some((value + 1) as i128), // 1.24
Some(value + 1), // 1.24
],
10,
2,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/expressions/datetime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ mod tests {
match res {
ColumnarValue::Scalar(ScalarValue::Date64(Some(d))) => {
let epoch = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap();
let res = epoch.add(Duration::milliseconds(d as i64));
let res = epoch.add(Duration::milliseconds(d));
assert_eq!(format!("{:?}", res).as_str(), "1969-12-16");
}
_ => Err(DataFusionError::NotImplemented(
Expand Down
6 changes: 3 additions & 3 deletions datafusion/sql/src/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
.project(
plan.schema().fields().iter().zip(columns.into_iter()).map(
|(field, ident)| {
col(field.name()).alias(&normalize_ident(&ident))
col(field.name()).alias(normalize_ident(&ident))
},
),
)
Expand Down Expand Up @@ -717,7 +717,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
JoinConstraint::Using(idents) => {
let keys: Vec<Column> = idents
.into_iter()
.map(|x| Column::from_name(&normalize_ident(&x)))
.map(|x| Column::from_name(normalize_ident(&x)))
.collect();
LogicalPlanBuilder::from(left)
.join_using(&right, join_type, keys)?
Expand Down Expand Up @@ -840,7 +840,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
Ok(LogicalPlanBuilder::from(plan.clone())
.project_with_alias(
plan.schema().fields().iter().zip(columns_alias.iter()).map(
|(field, ident)| col(field.name()).alias(&normalize_ident(ident)),
|(field, ident)| col(field.name()).alias(normalize_ident(ident)),
),
Some(normalize_ident(&alias.name)),
)?
Expand Down

0 comments on commit 96512ac

Please sign in to comment.