Skip to content

Commit

Permalink
style: fix warnings on latest clippy (#5876)
Browse files Browse the repository at this point in the history
Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>
  • Loading branch information
xxchan and mergify[bot] committed Oct 17, 2022
1 parent 7072ee1 commit a00021f
Show file tree
Hide file tree
Showing 45 changed files with 90 additions and 119 deletions.
4 changes: 2 additions & 2 deletions src/batch/src/executor/join/lookup_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ impl<C: BatchTaskContext> LookupExecutorBuilder for InnerSideExecutorBuilder<C>
let list = self
.pu_to_scan_range_mapping
.entry(parallel_unit_id)
.or_insert(vec![]);
.or_default();
list.push((scan_range, vnode));

Ok(())
Expand Down Expand Up @@ -585,7 +585,7 @@ impl BoxedExecutorBuilder for LookupJoinExecutorBuilder {

let inner_side_key_types = inner_side_key_idxs
.iter()
.map(|&i| inner_side_schema.fields[i as usize].data_type.clone())
.map(|&i| inner_side_schema.fields[i].data_type.clone())
.collect_vec();

let null_safe = lookup_join_node.get_null_safe().to_vec();
Expand Down
4 changes: 2 additions & 2 deletions src/common/src/array/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -659,7 +659,7 @@ mod tests {
fn test_filter() {
let mut builder = PrimitiveArrayBuilder::<i32>::new(0);
for i in 0..=60 {
builder.append(Some(i as i32));
builder.append(Some(i));
}
let array = filter(&builder.finish(), |x| x.unwrap_or(0) >= 60).unwrap();
assert_eq!(array.iter().collect::<Vec<Option<i32>>>(), vec![Some(60)]);
Expand Down Expand Up @@ -692,7 +692,7 @@ mod tests {
fn test_vectorized_add() {
let mut builder = PrimitiveArrayBuilder::<i32>::new(0);
for i in 0..=60 {
builder.append(Some(i as i32));
builder.append(Some(i));
}
let array1 = builder.finish();

Expand Down
2 changes: 1 addition & 1 deletion src/common/src/buffer/bitmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ impl Bitmap {
}

fn num_bytes(num_bits: usize) -> usize {
num_bits / 8 + if num_bits % 8 > 0 { 1 } else { 0 }
num_bits / 8 + usize::from(num_bits % 8 > 0)
}

/// Returns the number of valid bits in the bitmap,
Expand Down
4 changes: 2 additions & 2 deletions src/common/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,12 +338,12 @@ unsafe impl<K: LruKey, T: LruValue> Send for LruCacheShard<K, T> {}

impl<K: LruKey, T: LruValue> LruCacheShard<K, T> {
fn new(capacity: usize, object_capacity: usize) -> Self {
let mut lru = Box::new(LruHandle::default());
let mut lru = Box::<LruHandle<K, T>>::default();
lru.prev = lru.as_mut();
lru.next = lru.as_mut();
let mut object_pool = Vec::with_capacity(object_capacity);
for _ in 0..object_capacity {
object_pool.push(Box::new(LruHandle::default()));
object_pool.push(Box::default());
}
Self {
capacity,
Expand Down
16 changes: 8 additions & 8 deletions src/common/src/types/interval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ impl IntervalUnit {
IntervalUnit {
months: (months as i32),
days: (days as i32),
ms: (remaining_ms as i64),
ms: remaining_ms,
}
}

Expand Down Expand Up @@ -856,8 +856,8 @@ mod tests {
];

for (lhs, rhs, expected) in cases {
let lhs = IntervalUnit::new(lhs.0 as i32, lhs.1 as i32, lhs.2 as i64);
let rhs = IntervalUnit::new(rhs.0 as i32, rhs.1 as i32, rhs.2 as i64);
let lhs = IntervalUnit::new(lhs.0, lhs.1, lhs.2 as i64);
let rhs = IntervalUnit::new(rhs.0, rhs.1, rhs.2 as i64);
let result = std::panic::catch_unwind(|| {
let actual = lhs.exact_div(&rhs);
assert_eq!(actual, expected);
Expand Down Expand Up @@ -886,22 +886,22 @@ mod tests {
];

for (lhs, rhs, expected) in cases_int {
let lhs = IntervalUnit::new(lhs.0 as i32, lhs.1 as i32, lhs.2 as i64);
let expected = expected.map(|x| IntervalUnit::new(x.0 as i32, x.1 as i32, x.2 as i64));
let lhs = IntervalUnit::new(lhs.0, lhs.1, lhs.2 as i64);
let expected = expected.map(|x| IntervalUnit::new(x.0, x.1, x.2 as i64));

let actual = lhs.div_float(rhs as i16);
assert_eq!(actual, expected);

let actual = lhs.div_float(rhs as i32);
let actual = lhs.div_float(rhs);
assert_eq!(actual, expected);

let actual = lhs.div_float(rhs as i64);
assert_eq!(actual, expected);
}

for (lhs, rhs, expected) in cases_float {
let lhs = IntervalUnit::new(lhs.0 as i32, lhs.1 as i32, lhs.2 as i64);
let expected = expected.map(|x| IntervalUnit::new(x.0 as i32, x.1 as i32, x.2 as i64));
let lhs = IntervalUnit::new(lhs.0, lhs.1, lhs.2 as i64);
let expected = expected.map(|x| IntervalUnit::new(x.0, x.1, x.2 as i64));

let actual = lhs.div_float(OrderedFloat::<f32>(rhs));
assert_eq!(actual, expected);
Expand Down
2 changes: 1 addition & 1 deletion src/common/src/types/ordered_float.rs
Original file line number Diff line number Diff line change
Expand Up @@ -944,7 +944,7 @@ fn raw_double_bits<F: Float>(f: &F) -> u64 {
}

let exp_u64 = exp as u16 as u64;
let sign_u64 = if sign > 0 { 1u64 } else { 0u64 };
let sign_u64 = (sign > 0) as u64;
(man & MAN_MASK) | ((exp_u64 << 52) & EXP_MASK) | ((sign_u64 << 63) & SIGN_MASK)
}

Expand Down
8 changes: 3 additions & 5 deletions src/connector/src/source/nexmark/source/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ impl NexmarkEventGenerator {

if let Some(event) = last_event.take() {
msgs.push(
NexmarkMessage::new(self.split_id.clone(), self.events_so_far as u64, event)
.into(),
NexmarkMessage::new(self.split_id.clone(), self.events_so_far, event).into(),
);
}

Expand Down Expand Up @@ -101,8 +100,7 @@ impl NexmarkEventGenerator {
}

msgs.push(
NexmarkMessage::new(self.split_id.clone(), self.events_so_far as u64, event)
.into(),
NexmarkMessage::new(self.split_id.clone(), self.events_so_far, event).into(),
);
}

Expand All @@ -114,7 +112,7 @@ impl NexmarkEventGenerator {

if !self.use_real_time && self.min_event_gap_in_ns > 0 {
tokio::time::sleep(Duration::from_nanos(
(self.events_so_far - old_events_so_far) as u64 * self.min_event_gap_in_ns,
(self.events_so_far - old_events_so_far) * self.min_event_gap_in_ns,
))
.await;
}
Expand Down
2 changes: 1 addition & 1 deletion src/expr/src/expr/expr_unary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ mod tests {
for i in 0..100i16 {
if i % 2 == 0 {
target.push(Some(i as i32));
input.push(Some(i as i16));
input.push(Some(i));
} else {
input.push(None);
target.push(None);
Expand Down
4 changes: 2 additions & 2 deletions src/expr/src/vector_op/cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ fn parse_naive_datetime(s: &str) -> Result<NaiveDateTime> {
res.time.hour as u32,
res.time.minute as u32,
res.time.second as u32,
res.time.microsecond as u32,
res.time.microsecond,
);
Ok(NaiveDateTime::new(date, time))
} else {
Expand Down Expand Up @@ -94,7 +94,7 @@ fn parse_naive_time(s: &str) -> Result<NaiveTime> {
res.hour as u32,
res.minute as u32,
res.second as u32,
res.microsecond as u32,
res.microsecond,
))
}

Expand Down
6 changes: 3 additions & 3 deletions src/frontend/src/catalog/schema_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ impl SchemaCatalog {

/// Iterate all indices
pub fn iter_index(&self) -> impl Iterator<Item = &Arc<IndexCatalog>> {
self.index_by_name.iter().map(|(_, v)| v)
self.index_by_name.values()
}

/// Iterate all sources, including the materialized sources.
Expand All @@ -206,11 +206,11 @@ impl SchemaCatalog {
}

pub fn iter_sink(&self) -> impl Iterator<Item = &Arc<SinkCatalog>> {
self.sink_by_name.iter().map(|(_, v)| v)
self.sink_by_name.values()
}

pub fn iter_system_tables(&self) -> impl Iterator<Item = &SystemCatalog> {
self.system_table_by_name.iter().map(|(_, v)| v)
self.system_table_by_name.values()
}

pub fn get_table_by_name(&self, table_name: &str) -> Option<&Arc<TableCatalog>> {
Expand Down
2 changes: 1 addition & 1 deletion src/frontend/src/catalog/table_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ impl TableCatalog {

pub fn to_prost(&self, schema_id: SchemaId, database_id: DatabaseId) -> ProstTable {
ProstTable {
id: self.id.table_id as u32,
id: self.id.table_id,
schema_id,
database_id,
name: self.name.clone(),
Expand Down
2 changes: 1 addition & 1 deletion src/frontend/src/handler/drop_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ pub async fn handle_drop_schema(
};
let schema_id = {
// If the mode is `Restrict` or `None`, the `schema` need to be empty.
if Some(DropMode::Restrict) == mode || None == mode {
if Some(DropMode::Restrict) == mode || mode.is_none() {
if let Some(table) = schema.iter_table().next() {
return Err(CatalogError::NotEmpty(
"schema",
Expand Down
4 changes: 1 addition & 3 deletions src/frontend/src/optimizer/plan_node/batch_sort_agg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,7 @@ impl ToBatchProst for BatchSortAgg {
.group_key()
.iter()
.clone()
.map(|idx| {
ExprImpl::InputRef(Box::new(InputRef::new(*idx as usize, DataType::Int32)))
})
.map(|idx| ExprImpl::InputRef(Box::new(InputRef::new(*idx, DataType::Int32))))
.map(|expr| expr.to_expr_proto())
.collect::<Vec<ExprNode>>(),
})
Expand Down
8 changes: 4 additions & 4 deletions src/frontend/src/optimizer/plan_node/logical_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1140,13 +1140,13 @@ impl ToStream for LogicalJoin {
.logical_pk()
.iter()
.cloned()
.filter(|i| l2i.try_map(*i) == None);
.filter(|i| l2i.try_map(*i).is_none());

let right_to_add = right
.logical_pk()
.iter()
.cloned()
.filter(|i| r2i.try_map(*i) == None)
.filter(|i| r2i.try_map(*i).is_none())
.map(|i| i + left_len);

// NOTE(st1page): add join keys in the pk_indices a work around before we really have stream
Expand All @@ -1159,15 +1159,15 @@ impl ToStream for LogicalJoin {
eq_predicate
.left_eq_indexes()
.into_iter()
.filter(|i| l2i.try_map(*i) == None),
.filter(|i| l2i.try_map(*i).is_none()),
)
.unique();
let right_to_add = right_to_add
.chain(
eq_predicate
.right_eq_indexes()
.into_iter()
.filter(|i| r2i.try_map(*i) == None)
.filter(|i| r2i.try_map(*i).is_none())
.map(|i| i + left_len),
)
.unique();
Expand Down
5 changes: 4 additions & 1 deletion src/frontend/src/optimizer/plan_node/logical_project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,10 @@ impl ToStream for LogicalProject {
// Add missing columns of input_pk into the select list.
let input_pk = input.logical_pk();
let i2o = Self::i2o_col_mapping_inner(input.schema().len(), proj.exprs());
let col_need_to_add = input_pk.iter().cloned().filter(|i| i2o.try_map(*i) == None);
let col_need_to_add = input_pk
.iter()
.cloned()
.filter(|i| i2o.try_map(*i).is_none());
let input_schema = input.schema();
let exprs =
proj.exprs()
Expand Down
5 changes: 4 additions & 1 deletion src/frontend/src/optimizer/plan_node/logical_project_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,10 @@ impl ToStream for LogicalProjectSet {
// Add missing columns of input_pk into the select list.
let input_pk = input.logical_pk();
let i2o = Self::i2o_col_mapping_inner(input.schema().len(), project_set.select_list());
let col_need_to_add = input_pk.iter().cloned().filter(|i| i2o.try_map(*i) == None);
let col_need_to_add = input_pk
.iter()
.cloned()
.filter(|i| i2o.try_map(*i).is_none());
let input_schema = input.schema();
let select_list =
project_set
Expand Down
40 changes: 11 additions & 29 deletions src/frontend/src/optimizer/rule/apply_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,23 +199,14 @@ impl ApplyJoinRule {
let mut d_t1_bit_set = FixedBitSet::with_capacity(apply_len);
d_t1_bit_set.set_range(0..apply_left_len + join_left_len, true);

for (key, group) in &apply_on.into_iter().group_by(|expr| {
let (other, left): (Vec<_>, Vec<_>) = apply_on.into_iter().partition(|expr| {
let mut visitor = CollectInputRef::with_capacity(apply_len);
visitor.visit_expr(expr);
let collect_bit_set = FixedBitSet::from(visitor);
if collect_bit_set.is_subset(&d_t1_bit_set) {
0
} else {
1
}
}) {
let vec = group.collect_vec();
match key {
0 => left_apply_condition.extend(vec),
1 => other_condition.extend(vec),
_ => unreachable!(),
}
}
collect_bit_set.is_subset(&d_t1_bit_set)
});
left_apply_condition.extend(left);
other_condition.extend(other);
}
JoinType::RightSemi | JoinType::RightAnti | JoinType::Unspecified => unreachable!(),
}
Expand Down Expand Up @@ -297,23 +288,14 @@ impl ApplyJoinRule {
d_t2_bit_set.set_range(0..apply_left_len, true);
d_t2_bit_set.set_range(apply_left_len + join_left_len..apply_len, true);

for (key, group) in &apply_on.into_iter().group_by(|expr| {
let (other, right): (Vec<_>, Vec<_>) = apply_on.into_iter().partition(|expr| {
let mut visitor = CollectInputRef::with_capacity(apply_len);
visitor.visit_expr(expr);
let collect_bit_set = FixedBitSet::from(visitor);
if collect_bit_set.is_subset(&d_t2_bit_set) {
0
} else {
1
}
}) {
let vec = group.collect_vec();
match key {
0 => right_apply_condition.extend(vec),
1 => other_condition.extend(vec),
_ => unreachable!(),
}
}
let collected = FixedBitSet::from(visitor);
collected.is_subset(&d_t2_bit_set)
});
right_apply_condition.extend(right);
other_condition.extend(other);

// rewrite right condition
let mut right_apply_condition_rewriter = Rewriter {
Expand Down
2 changes: 1 addition & 1 deletion src/frontend/src/scheduler/plan_fragmenter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ fn vnode_mapping_to_owner_mapping(vnode_mapping: VnodeMapping) -> HashMap<Parall

fn bitmap_with_single_vnode(vnode: usize, num_vnodes: usize) -> Bitmap {
let mut bitmap = BitmapBuilder::zeroed(num_vnodes);
bitmap.set(vnode as usize, true);
bitmap.set(vnode, true);
bitmap.finish()
}

Expand Down
3 changes: 1 addition & 2 deletions src/frontend/src/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -387,9 +387,8 @@ impl FrontendEnv {
&self.user_info_reader
}

#[expect(clippy::explicit_auto_deref)]
pub fn worker_node_manager(&self) -> &WorkerNodeManager {
&*self.worker_node_manager
&self.worker_node_manager
}

pub fn worker_node_manager_ref(&self) -> WorkerNodeManagerRef {
Expand Down
6 changes: 1 addition & 5 deletions src/frontend/src/utils/condition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,11 +230,7 @@ impl Condition {
pub fn split_disjoint(self, columns: &FixedBitSet) -> (Self, Self) {
self.group_by::<_, 2>(|expr| {
let input_bits = expr.collect_input_refs(columns.len());
if input_bits.is_disjoint(columns) {
1
} else {
0
}
input_bits.is_disjoint(columns) as usize
})
.into_iter()
.next_tuple()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ impl CompactionPicker for LevelCompactionPicker {
table_infos: l0.sub_levels[0].table_infos.clone(),
},
InputLevel {
level_idx: target_level as u32,
level_idx: target_level,
level_type: LevelType::Nonoverlapping as i32,
table_infos: vec![],
},
Expand Down Expand Up @@ -131,7 +131,7 @@ impl CompactionPicker for LevelCompactionPicker {
// reverse because the ix of low sub-level is smaller.
input_levels.reverse();
input_levels.push(InputLevel {
level_idx: target_level as u32,
level_idx: target_level,
level_type: LevelType::Nonoverlapping as i32,
table_infos: levels.get_level(self.target_level).table_infos.clone(),
});
Expand Down

0 comments on commit a00021f

Please sign in to comment.