Skip to content
This repository has been archived by the owner on Feb 18, 2024. It is now read-only.

Commit

Permalink
Clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
jorgecarleitao committed Dec 18, 2022
1 parent 1b190fc commit 525a3fc
Show file tree
Hide file tree
Showing 20 changed files with 49 additions and 61 deletions.
2 changes: 1 addition & 1 deletion src/array/fixed_size_binary/ffi.rs
Expand Up @@ -16,7 +16,7 @@ unsafe impl ToFfi for FixedSizeBinaryArray {
}

fn offset(&self) -> Option<usize> {
let offset = self.values.offset() / self.size as usize;
let offset = self.values.offset() / self.size;
if let Some(bitmap) = self.validity.as_ref() {
if bitmap.offset() == offset {
Some(offset)
Expand Down
4 changes: 2 additions & 2 deletions src/array/fixed_size_binary/mod.rs
Expand Up @@ -160,7 +160,7 @@ impl FixedSizeBinaryArray {
/// Returns the length of this array
#[inline]
pub fn len(&self) -> usize {
self.values.len() / self.size as usize
self.values.len() / self.size
}

/// The optional validity.
Expand Down Expand Up @@ -329,6 +329,6 @@ impl FixedSizeBinaryValues for FixedSizeBinaryArray {

#[inline]
fn size(&self) -> usize {
self.size as usize
self.size
}
}
2 changes: 1 addition & 1 deletion src/array/fixed_size_binary/mutable.rs
Expand Up @@ -146,7 +146,7 @@ impl MutableFixedSizeBinaryArray {
/// Returns the length of this array
#[inline]
pub fn len(&self) -> usize {
self.values.len() / self.size as usize
self.values.len() / self.size
}

/// Pop the last entry from [`MutableFixedSizeBinaryArray`].
Expand Down
12 changes: 5 additions & 7 deletions src/array/fixed_size_list/mod.rs
Expand Up @@ -140,7 +140,7 @@ impl FixedSizeListArray {
let values = self
.values
.clone()
.slice_unchecked(offset * self.size as usize, length * self.size as usize);
.slice_unchecked(offset * self.size, length * self.size);
Self {
data_type: self.data_type.clone(),
size: self.size,
Expand Down Expand Up @@ -174,7 +174,7 @@ impl FixedSizeListArray {
/// Returns the length of this array
#[inline]
pub fn len(&self) -> usize {
self.values.len() / self.size as usize
self.values.len() / self.size
}

/// The optional validity.
Expand All @@ -193,17 +193,15 @@ impl FixedSizeListArray {
/// panics iff `i >= self.len()`
#[inline]
pub fn value(&self, i: usize) -> Box<dyn Array> {
self.values
.slice(i * self.size as usize, self.size as usize)
self.values.slice(i * self.size, self.size)
}

/// Returns the `Vec<T>` at position `i`.
/// # Safety
/// Caller must ensure that `i < self.len()`
#[inline]
pub unsafe fn value_unchecked(&self, i: usize) -> Box<dyn Array> {
self.values
.slice_unchecked(i * self.size as usize, self.size as usize)
self.values.slice_unchecked(i * self.size, self.size)
}
}

Expand All @@ -214,7 +212,7 @@ impl FixedSizeListArray {
if *size == 0 {
return Err(Error::oos("FixedSizeBinaryArray expects a positive size"));
}
Ok((child.as_ref(), *size as usize))
Ok((child.as_ref(), *size))
}
_ => Err(Error::oos(
"FixedSizeListArray expects DataType::FixedSizeList",
Expand Down
2 changes: 1 addition & 1 deletion src/array/growable/fixed_size_list.rs
Expand Up @@ -40,7 +40,7 @@ impl<'a> GrowableFixedSizeList<'a> {

let size =
if let DataType::FixedSizeList(_, size) = &arrays[0].data_type().to_logical_type() {
*size as usize
*size
} else {
unreachable!("`GrowableFixedSizeList` expects `DataType::FixedSizeList`")
};
Expand Down
8 changes: 2 additions & 6 deletions src/compute/cast/primitive_to.rs
Expand Up @@ -38,7 +38,7 @@ pub fn primitive_to_binary<T: NativeType + lexical_core::ToLexical, O: Offset>(
let len = lexical_core::write_unchecked(*x, bytes).len();

offset += len;
offsets.push(O::from_usize(offset as usize).unwrap());
offsets.push(O::from_usize(offset).unwrap());
}
values.set_len(offset);
values.shrink_to_fit();
Expand Down Expand Up @@ -398,11 +398,7 @@ pub fn time64_to_time32(
let from_size = time_unit_multiple(from_unit);
let to_size = time_unit_multiple(to_unit);
let divisor = from_size / to_size;
unary(
from,
|x| (x as i64 / divisor) as i32,
DataType::Time32(to_unit),
)
unary(from, |x| (x / divisor) as i32, DataType::Time32(to_unit))
}

/// Conversion of timestamp
Expand Down
13 changes: 5 additions & 8 deletions src/compute/comparison/primitive.rs
Expand Up @@ -335,10 +335,7 @@ mod tests {
let b = Int64Array::from_slice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
let c = b.slice(5, 5);
let d = eq(&c, &a);
assert_eq!(
d,
BooleanArray::from_slice(&vec![true, true, true, false, true])
);
assert_eq!(d, BooleanArray::from_slice([true, true, true, false, true]));
}

#[test]
Expand Down Expand Up @@ -584,11 +581,11 @@ mod tests {
#[test]
fn test_length_of_result_buffer() {
// `item_count` is chosen to not be a multiple of 64.
let item_count = 130;
const ITEM_COUNT: usize = 130;

let array_a = Int8Array::from_slice(&vec![1; item_count]);
let array_b = Int8Array::from_slice(&vec![2; item_count]);
let expected = BooleanArray::from_slice(&vec![false; item_count]);
let array_a = Int8Array::from_slice([1; ITEM_COUNT]);
let array_b = Int8Array::from_slice([2; ITEM_COUNT]);
let expected = BooleanArray::from_slice([false; ITEM_COUNT]);
let result = gt_eq(&array_a, &array_b);

assert_eq!(result, expected)
Expand Down
2 changes: 1 addition & 1 deletion src/compute/sort/binary.rs
Expand Up @@ -10,7 +10,7 @@ pub(super) fn indices_sorted_unstable_by<I: Index, O: Offset>(
options: &SortOptions,
limit: Option<usize>,
) -> PrimitiveArray<I> {
let get = |idx| unsafe { array.value_unchecked(idx as usize) };
let get = |idx| unsafe { array.value_unchecked(idx) };
let cmp = |lhs: &&[u8], rhs: &&[u8]| lhs.cmp(rhs);
common::indices_sorted_unstable_by(array.validity(), get, cmp, array.len(), options, limit)
}
2 changes: 1 addition & 1 deletion src/compute/sort/utf8.rs
Expand Up @@ -10,7 +10,7 @@ pub(super) fn indices_sorted_unstable_by<I: Index, O: Offset>(
options: &SortOptions,
limit: Option<usize>,
) -> PrimitiveArray<I> {
let get = |idx| unsafe { array.value_unchecked(idx as usize) };
let get = |idx| unsafe { array.value_unchecked(idx) };
let cmp = |lhs: &&str, rhs: &&str| lhs.cmp(rhs);
common::indices_sorted_unstable_by(array.validity(), get, cmp, array.len(), options, limit)
}
Expand Down
2 changes: 1 addition & 1 deletion src/compute/substring.rs
Expand Up @@ -90,7 +90,7 @@ fn binary_substring<O: Offset>(
} else {
length_i + start
};
let start = start.max(windows[0]).min(windows[1]);
let start = start.clamp(windows[0], windows[1]);

let length: O = length
.unwrap_or(length_i)
Expand Down
2 changes: 1 addition & 1 deletion src/compute/window.rs
Expand Up @@ -52,7 +52,7 @@ pub fn shift(array: &dyn Array, offset: i64) -> Result<Box<dyn Array>> {
let slice = array.slice(slice_offset, length);

// Generate array with remaining `null` items
let nulls = abs(offset as i64) as usize;
let nulls = abs(offset) as usize;

let null_array = new_null_array(array.data_type().clone(), nulls);

Expand Down
12 changes: 6 additions & 6 deletions src/io/avro/read/deserialize.rs
Expand Up @@ -51,10 +51,10 @@ fn make_mutable(
capacity,
)) as Box<dyn MutableArray>
}
DataType::FixedSizeBinary(size) => Box::new(MutableFixedSizeBinaryArray::with_capacity(
*size as usize,
capacity,
)) as Box<dyn MutableArray>,
DataType::FixedSizeBinary(size) => {
Box::new(MutableFixedSizeBinaryArray::with_capacity(*size, capacity))
as Box<dyn MutableArray>
}
DataType::Struct(fields) => {
let values = fields
.iter()
Expand Down Expand Up @@ -195,7 +195,7 @@ fn deserialize_value<'a>(
array.push(Some(value))
}
PrimitiveType::Int64 => {
let value = util::zigzag_i64(&mut block)? as i64;
let value = util::zigzag_i64(&mut block)?;
let array = array
.as_mut_any()
.downcast_mut::<MutablePrimitiveArray<i64>>()
Expand Down Expand Up @@ -274,7 +274,7 @@ fn deserialize_value<'a>(
.as_mut_any()
.downcast_mut::<MutablePrimitiveArray<i128>>()
.unwrap();
array.push(Some(data as i128))
array.push(Some(data))
}
_ => unreachable!(),
},
Expand Down
8 changes: 4 additions & 4 deletions src/io/ipc/read/stream.rs
Expand Up @@ -48,7 +48,7 @@ pub fn read_stream_metadata<R: Read>(reader: &mut R) -> Result<StreamMetadata> {
.map_err(|_| Error::from(OutOfSpecKind::NegativeFooterLength))?;

let mut buffer = vec![];
buffer.try_reserve(length as usize)?;
buffer.try_reserve(length)?;
reader
.by_ref()
.take(length as u64)
Expand Down Expand Up @@ -135,7 +135,7 @@ fn read_next<R: Read>(
}

message_buffer.clear();
message_buffer.try_reserve(meta_length as usize)?;
message_buffer.try_reserve(meta_length)?;
reader
.by_ref()
.take(meta_length as u64)
Expand All @@ -158,7 +158,7 @@ fn read_next<R: Read>(
match header {
arrow_format::ipc::MessageHeaderRef::RecordBatch(batch) => {
data_buffer.clear();
data_buffer.try_reserve(block_length as usize)?;
data_buffer.try_reserve(block_length)?;
reader
.by_ref()
.take(block_length as u64)
Expand Down Expand Up @@ -193,7 +193,7 @@ fn read_next<R: Read>(
}
arrow_format::ipc::MessageHeaderRef::DictionaryBatch(batch) => {
data_buffer.clear();
data_buffer.try_reserve(block_length as usize)?;
data_buffer.try_reserve(block_length)?;
reader
.by_ref()
.take(block_length as u64)
Expand Down
8 changes: 4 additions & 4 deletions src/io/ipc/read/stream_async.rs
Expand Up @@ -58,7 +58,7 @@ pub async fn read_stream_metadata_async<R: AsyncRead + Unpin + Send>(
.map_err(|_| Error::from(OutOfSpecKind::NegativeFooterLength))?;

let mut meta_buffer = vec![];
meta_buffer.try_reserve(meta_len as usize)?;
meta_buffer.try_reserve(meta_len)?;
reader
.take(meta_len as u64)
.read_to_end(&mut meta_buffer)
Expand Down Expand Up @@ -109,7 +109,7 @@ async fn maybe_next<R: AsyncRead + Unpin + Send>(
}

state.message_buffer.clear();
state.message_buffer.try_reserve(meta_length as usize)?;
state.message_buffer.try_reserve(meta_length)?;
(&mut state.reader)
.take(meta_length as u64)
.read_to_end(&mut state.message_buffer)
Expand All @@ -132,7 +132,7 @@ async fn maybe_next<R: AsyncRead + Unpin + Send>(
match header {
arrow_format::ipc::MessageHeaderRef::RecordBatch(batch) => {
state.data_buffer.clear();
state.data_buffer.try_reserve(block_length as usize)?;
state.data_buffer.try_reserve(block_length)?;
(&mut state.reader)
.take(block_length as u64)
.read_to_end(&mut state.data_buffer)
Expand All @@ -155,7 +155,7 @@ async fn maybe_next<R: AsyncRead + Unpin + Send>(
}
arrow_format::ipc::MessageHeaderRef::DictionaryBatch(batch) => {
state.data_buffer.clear();
state.data_buffer.try_reserve(block_length as usize)?;
state.data_buffer.try_reserve(block_length)?;
(&mut state.reader)
.take(block_length as u64)
.read_to_end(&mut state.data_buffer)
Expand Down
2 changes: 1 addition & 1 deletion src/io/ipc/write/common.rs
Expand Up @@ -371,7 +371,7 @@ pub struct EncodedData {
/// Calculate an 8-byte boundary and return the number of bytes needed to pad to 8 bytes
#[inline]
pub(crate) fn pad_to_64(len: usize) -> usize {
(((len + 63) & !63) - len) as usize
((len + 63) & !63) - len
}

/// An array [`Chunk`] with optional accompanying IPC fields.
Expand Down
2 changes: 1 addition & 1 deletion src/io/odbc/read/schema.rs
Expand Up @@ -23,7 +23,7 @@ pub fn infer_schema(resut_set_metadata: &impl ResultSetMetadata) -> Result<Vec<F

fn column_to_field(column_description: &api::ColumnDescription) -> Field {
Field::new(
&column_description
column_description
.name_to_string()
.expect("Column name must be representable in utf8"),
column_to_data_type(&column_description.data_type),
Expand Down
6 changes: 3 additions & 3 deletions src/io/parquet/read/deserialize/simple.rs
Expand Up @@ -127,7 +127,7 @@ pub fn page_iter_to_arrays<'a, I: Pages + 'a>(
data_type,
num_rows,
chunk_size,
|x: i32| x as i32,
|x: i32| x,
))),

Timestamp(time_unit, _) => {
Expand Down Expand Up @@ -255,7 +255,7 @@ pub fn page_iter_to_arrays<'a, I: Pages + 'a>(
data_type,
num_rows,
chunk_size,
|x: i64| x as i64,
|x: i64| x,
))),
UInt64 => dyn_iter(iden(primitive::IntegerIter::new(
pages,
Expand Down Expand Up @@ -493,7 +493,7 @@ fn dict_read<'a, K: DictionaryKey, I: Pages + 'a>(
data_type,
num_rows,
chunk_size,
|x: i32| x as i32,
|x: i32| x,
))
}

Expand Down
7 changes: 2 additions & 5 deletions src/io/parquet/read/row_group.rs
Expand Up @@ -134,10 +134,7 @@ where

let mut chunk = vec![];
chunk.try_reserve(length as usize)?;
reader
.by_ref()
.take(length as u64)
.read_to_end(&mut chunk)?;
reader.by_ref().take(length).read_to_end(&mut chunk)?;
Ok((meta, chunk))
}

Expand All @@ -155,7 +152,7 @@ where

let mut chunk = vec![];
chunk.try_reserve(length as usize)?;
reader.take(length as u64).read_to_end(&mut chunk).await?;
reader.take(length).read_to_end(&mut chunk).await?;
Result::Ok((meta, chunk))
}

Expand Down
10 changes: 5 additions & 5 deletions src/io/parquet/read/statistics/mod.rs
Expand Up @@ -441,7 +441,7 @@ fn push(
Boolean => boolean::push(from, min, max),
Int8 => primitive::push(from, min, max, |x: i32| Ok(x as i8)),
Int16 => primitive::push(from, min, max, |x: i32| Ok(x as i16)),
Date32 | Time32(_) => primitive::push(from, min, max, |x: i32| Ok(x as i32)),
Date32 | Time32(_) => primitive::push::<i32, i32, _>(from, min, max, Ok),
Interval(IntervalUnit::YearMonth) => fixlen::push_year_month(from, min, max),
Interval(IntervalUnit::DayTime) => fixlen::push_days_ms(from, min, max),
UInt8 => primitive::push(from, min, max, |x: i32| Ok(x as u8)),
Expand All @@ -455,9 +455,9 @@ fn push(
other
))),
},
Int32 => primitive::push(from, min, max, |x: i32| Ok(x as i32)),
Int32 => primitive::push::<i32, i32, _>(from, min, max, Ok),
Int64 | Date64 | Time64(_) | Duration(_) => {
primitive::push(from, min, max, |x: i64| Ok(x as i64))
primitive::push::<i64, i64, _>(from, min, max, Ok)
}
UInt64 => primitive::push(from, min, max, |x: i64| Ok(x as u64)),
Timestamp(time_unit, _) => {
Expand Down Expand Up @@ -498,8 +498,8 @@ fn push(
})
}
}
Float32 => primitive::push(from, min, max, |x: f32| Ok(x as f32)),
Float64 => primitive::push(from, min, max, |x: f64| Ok(x as f64)),
Float32 => primitive::push::<f32, f32, _>(from, min, max, Ok),
Float64 => primitive::push::<f64, f64, _>(from, min, max, Ok),
Decimal(_, _) => match physical_type {
ParquetPhysicalType::Int32 => primitive::push(from, min, max, |x: i32| Ok(x as i128)),
ParquetPhysicalType::Int64 => primitive::push(from, min, max, |x: i64| Ok(x as i128)),
Expand Down
4 changes: 2 additions & 2 deletions src/io/parquet/write/nested/def.rs
Expand Up @@ -117,7 +117,7 @@ impl<'a> Iterator for DefLevelsIter<'a> {
if *self.remaining.last().unwrap() > 0 {
*self.remaining.last_mut().unwrap() -= 1;

let primitive = self.primitive_validity.next()?.0 as u32;
let primitive = self.primitive_validity.next()?.0;
let r = Some(self.total + primitive);

for level in 0..self.current_level - 1 {
Expand All @@ -130,7 +130,7 @@ impl<'a> Iterator for DefLevelsIter<'a> {
}
if self.remaining[0] == 0 {
self.current_level -= 1;
self.total -= self.validity[0] as u32;
self.total -= self.validity[0];
}
self.remaining_values -= 1;
return r;
Expand Down

0 comments on commit 525a3fc

Please sign in to comment.