Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace &Option<T> with Option<&T> #4446

Merged
merged 4 commits into from Dec 1, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
12 changes: 9 additions & 3 deletions datafusion/common/src/scalar.rs
Expand Up @@ -2160,7 +2160,7 @@ impl ScalarValue {
fn eq_array_decimal(
array: &ArrayRef,
index: usize,
value: &Option<i128>,
value: Option<&i128>,
precision: u8,
scale: i8,
) -> Result<bool> {
Expand Down Expand Up @@ -2196,8 +2196,14 @@ impl ScalarValue {
pub fn eq_array(&self, array: &ArrayRef, index: usize) -> bool {
match self {
ScalarValue::Decimal128(v, precision, scale) => {
ScalarValue::eq_array_decimal(array, index, v, *precision, *scale)
.unwrap()
ScalarValue::eq_array_decimal(
array,
index,
v.as_ref(),
*precision,
*scale,
)
.unwrap()
}
ScalarValue::Boolean(val) => {
eq_array_primitive!(array, index, BooleanArray, val)
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/view.rs
Expand Up @@ -61,8 +61,8 @@ impl ViewTable {
}

/// Get definition ref
pub fn definition(&self) -> &Option<String> {
&self.definition
pub fn definition(&self) -> Option<&String> {
self.definition.as_ref()
}

/// Get logical_plan ref
Expand Down
10 changes: 5 additions & 5 deletions datafusion/core/src/physical_optimizer/join_selection.rs
Expand Up @@ -181,8 +181,8 @@ fn swap_reverting_projection(
}

/// Swaps join sides for filter column indices and produces new JoinFilter
fn swap_join_filter(filter: &Option<JoinFilter>) -> Option<JoinFilter> {
filter.as_ref().map(|filter| {
fn swap_join_filter(filter: Option<&JoinFilter>) -> Option<JoinFilter> {
filter.map(|filter| {
let column_indices = filter
.column_indices()
.iter()
Expand Down Expand Up @@ -334,7 +334,7 @@ fn try_collect_left(
Arc::clone(left),
Arc::clone(right),
hash_join.on().to_vec(),
hash_join.filter().clone(),
hash_join.filter().cloned(),
hash_join.join_type(),
PartitionMode::CollectLeft,
hash_join.null_equals_null(),
Expand All @@ -345,7 +345,7 @@ fn try_collect_left(
Arc::clone(left),
Arc::clone(right),
hash_join.on().to_vec(),
hash_join.filter().clone(),
hash_join.filter().cloned(),
hash_join.join_type(),
PartitionMode::CollectLeft,
hash_join.null_equals_null(),
Expand Down Expand Up @@ -377,7 +377,7 @@ fn partitioned_hash_join(hash_join: &HashJoinExec) -> Result<Arc<dyn ExecutionPl
Arc::clone(left),
Arc::clone(right),
hash_join.on().to_vec(),
hash_join.filter().clone(),
hash_join.filter().cloned(),
hash_join.join_type(),
PartitionMode::Partitioned,
hash_join.null_equals_null(),
Expand Down
8 changes: 4 additions & 4 deletions datafusion/core/src/physical_plan/joins/hash_join.rs
Expand Up @@ -249,8 +249,8 @@ impl HashJoinExec {
}

/// Filters applied before join output
pub fn filter(&self) -> &Option<JoinFilter> {
&self.filter
pub fn filter(&self) -> Option<&JoinFilter> {
self.filter.as_ref()
}

/// How the join is performed
Expand Down Expand Up @@ -701,7 +701,7 @@ fn build_batch(
left_data: &JoinLeftData,
on_left: &[Column],
on_right: &[Column],
filter: &Option<JoinFilter>,
filter: Option<&JoinFilter>,
join_type: JoinType,
schema: &Schema,
column_indices: &[ColumnIndex],
Expand Down Expand Up @@ -1529,7 +1529,7 @@ impl HashJoinStream {
left_data,
&self.on_left,
&self.on_right,
&self.filter,
self.filter.as_ref(),
self.join_type,
&self.schema,
&self.column_indices,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/expr/src/type_coercion/other.rs
Expand Up @@ -39,7 +39,7 @@ pub fn get_coerce_type_for_list(
/// Returns the common data type for `then_types` and `else_type`
pub fn get_coerce_type_for_case_when(
then_types: &[DataType],
else_type: &Option<DataType>,
else_type: Option<&DataType>,
) -> Option<DataType> {
let else_type = match else_type {
None => then_types[0].clone(),
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/type_coercion.rs
Expand Up @@ -319,7 +319,7 @@ impl ExprRewriter for TypeCoercionRewriter {
Some(expr) => expr.get_type(&self.schema).map(Some),
}?;
let case_when_coerce_type =
get_coerce_type_for_case_when(&then_types, &else_type);
get_coerce_type_for_case_when(&then_types, else_type.as_ref());
match case_when_coerce_type {
None => Err(DataFusionError::Internal(format!(
"Failed to coerce then ({:?}) and else ({:?}) to common types in CASE WHEN expression",
Expand Down
6 changes: 3 additions & 3 deletions datafusion/physical-expr/src/window/lead_lag.rs
Expand Up @@ -120,7 +120,7 @@ pub(crate) struct WindowShiftEvaluator {
}

fn create_empty_array(
value: &Option<ScalarValue>,
value: Option<&ScalarValue>,
data_type: &DataType,
size: usize,
) -> Result<ArrayRef> {
Expand All @@ -140,7 +140,7 @@ fn create_empty_array(
fn shift_with_default_value(
array: &ArrayRef,
offset: i64,
value: &Option<ScalarValue>,
value: Option<&ScalarValue>,
) -> Result<ArrayRef> {
use arrow::compute::concat;

Expand Down Expand Up @@ -172,7 +172,7 @@ impl PartitionEvaluator for WindowShiftEvaluator {
fn evaluate_partition(&self, partition: Range<usize>) -> Result<ArrayRef> {
let value = &self.values[0];
let value = value.slice(partition.start, partition.end - partition.start);
shift_with_default_value(&value, self.shift_offset, &self.default_value)
shift_with_default_value(&value, self.shift_offset, self.default_value.as_ref())
}
}

Expand Down
2 changes: 1 addition & 1 deletion datafusion/proto/src/logical_plan.rs
Expand Up @@ -929,7 +929,7 @@ impl AsLogicalPlan for LogicalPlanNode {
projection,
definition: view_table
.definition()
.clone()
.map(|s| s.to_string())
.unwrap_or_default(),
},
))),
Expand Down
4 changes: 2 additions & 2 deletions datafusion/sql/src/planner.rs
Expand Up @@ -1129,7 +1129,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
self.aggregate(
plan,
&select_exprs,
&having_expr_opt,
having_expr_opt.as_ref(),
group_by_exprs,
aggr_exprs,
)?
Expand Down Expand Up @@ -1267,7 +1267,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
&self,
input: LogicalPlan,
select_exprs: &[Expr],
having_expr_opt: &Option<Expr>,
having_expr_opt: Option<&Expr>,
group_by_exprs: Vec<Expr>,
aggr_exprs: Vec<Expr>,
) -> Result<(LogicalPlan, Vec<Expr>, Option<Expr>)> {
Expand Down