From 16c089e0c4c2b079da10b6fb8fbec19cdabaeab3 Mon Sep 17 00:00:00 2001 From: Jorge Leitao Date: Fri, 23 Jul 2021 06:48:15 +0200 Subject: [PATCH] Fixed clippy from 1.53 (#214) --- benches/take_kernels.rs | 2 +- examples/csv_read.rs | 2 +- examples/parquet_read.rs | 2 +- src/array/binary/mod.rs | 6 +- src/array/boolean/mod.rs | 16 ++--- src/array/equal/mod.rs | 4 +- src/array/growable/dictionary.rs | 2 +- src/array/ord.rs | 2 +- src/array/primitive/mod.rs | 6 +- src/array/utf8/mod.rs | 6 +- src/bitmap/bitmap_ops.rs | 14 ++-- src/bitmap/mutable.rs | 10 +-- src/bitmap/utils/mod.rs | 16 ++--- src/buffer/immutable.rs | 4 +- src/buffer/mutable.rs | 6 +- src/compute/aggregate/min_max.rs | 4 +- src/compute/arithmetics/time.rs | 4 +- src/compute/arity.rs | 2 +- src/compute/boolean.rs | 8 +-- src/compute/boolean_kleene.rs | 24 +++---- src/compute/cast/primitive_to.rs | 8 +-- src/compute/cast/timestamps.rs | 2 +- src/compute/comparison/mod.rs | 3 +- src/compute/filter.rs | 14 ++-- src/compute/regex_match.rs | 2 +- src/compute/sort/mod.rs | 84 +++++++++++------------- src/compute/take/mod.rs | 3 +- src/datatypes/field.rs | 2 +- src/ffi/ffi.rs | 2 +- src/io/csv/read/deserialize.rs | 2 +- src/io/csv/read/infer_schema.rs | 8 +-- src/io/ipc/read/deserialize.rs | 16 ++--- src/io/ipc/read/reader.rs | 6 +- src/io/ipc/read/stream.rs | 2 +- src/io/ipc/write/common.rs | 2 +- src/io/json/read/reader.rs | 40 +++++------ src/io/json_integration/read.rs | 6 +- src/io/parquet/mod.rs | 2 +- src/io/parquet/read/binary/basic.rs | 8 +-- src/io/parquet/read/boolean/basic.rs | 2 +- src/io/parquet/read/fixed_size_binary.rs | 6 +- src/io/parquet/read/primitive/basic.rs | 6 +- src/io/parquet/read/statistics/binary.rs | 4 +- src/io/parquet/write/levels.rs | 10 +-- src/io/parquet/write/mod.rs | 16 ++--- src/io/parquet/write/schema.rs | 2 +- src/io/print.rs | 2 +- 47 files changed, 192 insertions(+), 208 deletions(-) diff --git a/benches/take_kernels.rs b/benches/take_kernels.rs index 7b2c32749b8..2ab746b443c 100644 --- a/benches/take_kernels.rs +++ b/benches/take_kernels.rs @@ -46,7 +46,7 @@ fn create_random_index(size: usize, null_density: f32) -> PrimitiveArray { } fn bench_take(values: &dyn Array, indices: &PrimitiveArray) { - criterion::black_box(take::take(values, &indices).unwrap()); + criterion::black_box(take::take(values, indices).unwrap()); } fn add_benchmark(c: &mut Criterion) { diff --git a/examples/csv_read.rs b/examples/csv_read.rs index 2f7a66c7c13..2999f0d8b5a 100644 --- a/examples/csv_read.rs +++ b/examples/csv_read.rs @@ -37,7 +37,7 @@ fn main() -> Result<()> { let file_path = &args[1]; - let batch = read_path(&file_path, None)?; + let batch = read_path(file_path, None)?; println!("{:?}", batch); Ok(()) } diff --git a/examples/parquet_read.rs b/examples/parquet_read.rs index dceacdaf6a8..e3b82b4714d 100644 --- a/examples/parquet_read.rs +++ b/examples/parquet_read.rs @@ -45,7 +45,7 @@ fn main() -> Result<()> { let column = args[2].parse::().unwrap(); let row_group = args[3].parse::().unwrap(); - let array = read_column_chunk(&file_path, row_group, column)?; + let array = read_column_chunk(file_path, row_group, column)?; println!("{}", array); Ok(()) } diff --git a/src/array/binary/mod.rs b/src/array/binary/mod.rs index 11d79878eed..96425bac9e0 100644 --- a/src/array/binary/mod.rs +++ b/src/array/binary/mod.rs @@ -168,9 +168,9 @@ mod tests { array.validity(), &Some(Bitmap::from_u8_slice(&[0b00000101], 3)) ); - assert_eq!(array.is_valid(0), true); - assert_eq!(array.is_valid(1), false); - assert_eq!(array.is_valid(2), true); + assert!(array.is_valid(0)); + assert!(!array.is_valid(1)); + assert!(array.is_valid(2)); let array2 = BinaryArray::::from_data( array.offsets().clone(), diff --git a/src/array/boolean/mod.rs b/src/array/boolean/mod.rs index d0c95df0c13..af5087ee93f 100644 --- a/src/array/boolean/mod.rs +++ b/src/array/boolean/mod.rs @@ -141,24 +141,24 @@ mod tests { let array: BooleanArray = data.into_iter().collect(); - assert_eq!(array.value(0), true); - assert_eq!(array.value(1), false); - assert_eq!(array.value(2), false); + assert!(array.value(0)); + assert!(!array.value(1)); + assert!(!array.value(2)); assert_eq!(array.values(), &Bitmap::from_u8_slice(&[0b00000001], 3)); assert_eq!( array.validity(), &Some(Bitmap::from_u8_slice(&[0b00000101], 3)) ); - assert_eq!(array.is_valid(0), true); - assert_eq!(array.is_valid(1), false); - assert_eq!(array.is_valid(2), true); + assert!(array.is_valid(0)); + assert!(!array.is_valid(1)); + assert!(array.is_valid(2)); let array2 = BooleanArray::from_data(array.values().clone(), array.validity().clone()); assert_eq!(array, array2); let array = array.slice(1, 2); - assert_eq!(array.value(0), false); - assert_eq!(array.value(1), false); + assert!(!array.value(0)); + assert!(!array.value(1)); } #[test] diff --git a/src/array/equal/mod.rs b/src/array/equal/mod.rs index 5dfdbf812b8..45062e27c4f 100644 --- a/src/array/equal/mod.rs +++ b/src/array/equal/mod.rs @@ -424,8 +424,8 @@ mod tests { pub(super) fn test_equal(lhs: &dyn Array, rhs: &dyn Array, expected: bool) { // equality is symmetric - assert_eq!(equal(lhs, lhs), true, "\n{:?}\n{:?}", lhs, lhs); - assert_eq!(equal(rhs, rhs), true, "\n{:?}\n{:?}", rhs, rhs); + assert!(equal(lhs, lhs), "\n{:?}\n{:?}", lhs, lhs); + assert!(equal(rhs, rhs), "\n{:?}\n{:?}", rhs, rhs); assert_eq!(equal(lhs, rhs), expected, "\n{:?}\n{:?}", lhs, rhs); assert_eq!(equal(rhs, lhs), expected, "\n{:?}\n{:?}", rhs, lhs); diff --git a/src/array/growable/dictionary.rs b/src/array/growable/dictionary.rs index e67aa72bf99..94667b87852 100644 --- a/src/array/growable/dictionary.rs +++ b/src/array/growable/dictionary.rs @@ -25,7 +25,7 @@ fn concatenate_values( arrays_values: &[&dyn Array], capacity: usize, ) -> (Arc, Vec) { - let mut mutable = make_growable(&arrays_values, false, capacity); + let mut mutable = make_growable(arrays_values, false, capacity); let mut offsets = Vec::with_capacity(arrays_keys.len() + 1); offsets.push(0); for (i, values) in arrays_values.iter().enumerate() { diff --git a/src/array/ord.rs b/src/array/ord.rs index 2529ea423d4..8157dfbf72c 100644 --- a/src/array/ord.rs +++ b/src/array/ord.rs @@ -92,7 +92,7 @@ fn compare_f64<'a>(left: &'a dyn Array, right: &'a dyn Array) -> DynComparator<' fn compare_string<'a, O: Offset>(left: &'a dyn Array, right: &'a dyn Array) -> DynComparator<'a> { let left = left.as_any().downcast_ref::>().unwrap(); let right = right.as_any().downcast_ref::>().unwrap(); - Box::new(move |i, j| left.value(i).cmp(&right.value(j))) + Box::new(move |i, j| left.value(i).cmp(right.value(j))) } fn compare_dict<'a, K>( diff --git a/src/array/primitive/mod.rs b/src/array/primitive/mod.rs index c299957a849..5a394e817c4 100644 --- a/src/array/primitive/mod.rs +++ b/src/array/primitive/mod.rs @@ -225,9 +225,9 @@ mod tests { array.validity(), &Some(Bitmap::from_u8_slice(&[0b00000101], 3)) ); - assert_eq!(array.is_valid(0), true); - assert_eq!(array.is_valid(1), false); - assert_eq!(array.is_valid(2), true); + assert!(array.is_valid(0)); + assert!(!array.is_valid(1)); + assert!(array.is_valid(2)); let array2 = PrimitiveArray::::from_data( DataType::Int32, diff --git a/src/array/utf8/mod.rs b/src/array/utf8/mod.rs index 314b0c4e150..11f6ebdd10e 100644 --- a/src/array/utf8/mod.rs +++ b/src/array/utf8/mod.rs @@ -222,9 +222,9 @@ mod tests { array.validity(), &Some(Bitmap::from_u8_slice(&[0b00000101], 3)) ); - assert_eq!(array.is_valid(0), true); - assert_eq!(array.is_valid(1), false); - assert_eq!(array.is_valid(2), true); + assert!(array.is_valid(0)); + assert!(!array.is_valid(1)); + assert!(array.is_valid(2)); let array2 = Utf8Array::::from_data( array.offsets().clone(), diff --git a/src/bitmap/bitmap_ops.rs b/src/bitmap/bitmap_ops.rs index 2b491c2e678..67b036ff004 100644 --- a/src/bitmap/bitmap_ops.rs +++ b/src/bitmap/bitmap_ops.rs @@ -174,7 +174,7 @@ impl Not for &Bitmap { type Output = Bitmap; fn not(self) -> Bitmap { - unary(&self, |a| !a) + unary(self, |a| !a) } } @@ -194,28 +194,28 @@ mod test { fn test_eq() { let lhs = create_bitmap([0b01101010], 8); let rhs = create_bitmap([0b01001110], 8); - assert_eq!(eq(&lhs, &rhs), false); - assert_eq!(eq(&lhs, &lhs), true); + assert!(!eq(&lhs, &rhs)); + assert!(eq(&lhs, &lhs)); } #[test] fn test_eq_len() { let lhs = create_bitmap([0b01101010], 6); let rhs = create_bitmap([0b00101010], 6); - assert_eq!(eq(&lhs, &rhs), true); + assert!(eq(&lhs, &rhs)); let rhs = create_bitmap([0b00001010], 6); - assert_eq!(eq(&lhs, &rhs), false); + assert!(!eq(&lhs, &rhs)); } #[test] fn test_eq_slice() { let lhs = create_bitmap([0b10101010], 8).slice(1, 7); let rhs = create_bitmap([0b10101011], 8).slice(1, 7); - assert_eq!(eq(&lhs, &rhs), true); + assert!(eq(&lhs, &rhs)); let lhs = create_bitmap([0b10101010], 8).slice(2, 6); let rhs = create_bitmap([0b10101110], 8).slice(2, 6); - assert_eq!(eq(&lhs, &rhs), false); + assert!(!eq(&lhs, &rhs)); } #[test] diff --git a/src/bitmap/mutable.rs b/src/bitmap/mutable.rs index 509d8f6cec9..f2b94e2709c 100644 --- a/src/bitmap/mutable.rs +++ b/src/bitmap/mutable.rs @@ -560,7 +560,7 @@ mod tests { unsafe { b.extend_from_trusted_len_iter_unchecked(iter) }; let b: Bitmap = b.into(); let mut iter = b.iter().enumerate(); - assert_eq!(iter.next().unwrap().1, true); + assert!(iter.next().unwrap().1); for (i, v) in iter { assert_eq!((i - 1) % 6 == 0, v); } @@ -570,14 +570,14 @@ mod tests { fn test_set() { let mut bitmap = MutableBitmap::from_len_zeroed(12); bitmap.set(0, true); - assert_eq!(bitmap.get(0), true); + assert!(bitmap.get(0)); bitmap.set(0, false); - assert_eq!(bitmap.get(0), false); + assert!(!bitmap.get(0)); bitmap.set(11, true); - assert_eq!(bitmap.get(11), true); + assert!(bitmap.get(11)); bitmap.set(11, false); - assert_eq!(bitmap.get(11), false); + assert!(!bitmap.get(11)); bitmap.set(11, true); let bitmap: Option = bitmap.into(); diff --git a/src/bitmap/utils/mod.rs b/src/bitmap/utils/mod.rs index ab43b25d829..db284b28d76 100644 --- a/src/bitmap/utils/mod.rs +++ b/src/bitmap/utils/mod.rs @@ -91,21 +91,21 @@ mod tests { 0b01000000, 0b11111111, ]; for i in 0..8 { - assert_eq!(get_bit(input, i), false); + assert!(!get_bit(input, i)); } - assert_eq!(get_bit(input, 8), true); + assert!(get_bit(input, 8)); for i in 8 + 1..2 * 8 { - assert_eq!(get_bit(input, i), false); + assert!(!get_bit(input, i)); } - assert_eq!(get_bit(input, 2 * 8 + 1), true); + assert!(get_bit(input, 2 * 8 + 1)); for i in 2 * 8 + 2..3 * 8 { - assert_eq!(get_bit(input, i), false); + assert!(!get_bit(input, i)); } - assert_eq!(get_bit(input, 3 * 8 + 2), true); + assert!(get_bit(input, 3 * 8 + 2)); for i in 3 * 8 + 3..4 * 8 { - assert_eq!(get_bit(input, i), false); + assert!(!get_bit(input, i)); } - assert_eq!(get_bit(input, 4 * 8 + 3), true); + assert!(get_bit(input, 4 * 8 + 3)); } #[test] diff --git a/src/buffer/immutable.rs b/src/buffer/immutable.rs index 0a7ef6a6e08..065bfa47ba1 100644 --- a/src/buffer/immutable.rs +++ b/src/buffer/immutable.rs @@ -171,14 +171,14 @@ mod tests { fn test_new() { let buffer = Buffer::::new(); assert_eq!(buffer.len(), 0); - assert_eq!(buffer.is_empty(), true); + assert!(buffer.is_empty()); } #[test] fn test_new_zeroed() { let buffer = Buffer::::new_zeroed(2); assert_eq!(buffer.len(), 2); - assert_eq!(buffer.is_empty(), false); + assert!(!buffer.is_empty()); assert_eq!(buffer.as_slice(), &[0, 0]); } diff --git a/src/buffer/mutable.rs b/src/buffer/mutable.rs index 911bf7f18f1..96e19879602 100644 --- a/src/buffer/mutable.rs +++ b/src/buffer/mutable.rs @@ -621,21 +621,21 @@ mod tests { fn default() { let b = MutableBuffer::::default(); assert_eq!(b.len(), 0); - assert_eq!(b.is_empty(), true); + assert!(b.is_empty()); } #[test] fn with_capacity() { let b = MutableBuffer::::with_capacity(6); assert!(b.capacity() >= 6); - assert_eq!(b.is_empty(), true); + assert!(b.is_empty()); } #[test] fn from_len_zeroed() { let b = MutableBuffer::::from_len_zeroed(3); assert_eq!(b.len(), 3); - assert_eq!(b.is_empty(), false); + assert!(!b.is_empty()); assert_eq!(b.as_slice(), &[0, 0, 0]); } diff --git a/src/compute/aggregate/min_max.rs b/src/compute/aggregate/min_max.rs index b47573781a2..fdc08f05a30 100644 --- a/src/compute/aggregate/min_max.rs +++ b/src/compute/aggregate/min_max.rs @@ -34,7 +34,7 @@ fn min_max_string bool>( for i in 0..array.len() { let item = array.value(i); - if validity.get_bit(i) && (!has_value || cmp(&n, item)) { + if validity.get_bit(i) && (!has_value || cmp(n, item)) { has_value = true; n = item; } @@ -45,7 +45,7 @@ fn min_max_string bool>( for i in 1..array.len() { // loop is up to `len`. let item = unsafe { array.value_unchecked(i) }; - if cmp(&n, item) { + if cmp(n, item) { n = item; } } diff --git a/src/compute/arithmetics/time.rs b/src/compute/arithmetics/time.rs index 4bdb5354ef5..509fbc4a38f 100644 --- a/src/compute/arithmetics/time.rs +++ b/src/compute/arithmetics/time.rs @@ -496,7 +496,7 @@ mod tests { let expected = PrimitiveArray::from(&vec![Some(10i64), Some(20i64), None, Some(30i64)]) .to(DataType::Duration(TimeUnit::Second)); - let result = subtract_timestamps(×tamp_a, &×tamp_b).unwrap(); + let result = subtract_timestamps(×tamp_a, ×tamp_b).unwrap(); assert_eq!(result, expected); } @@ -526,7 +526,7 @@ mod tests { ]) .to(DataType::Duration(TimeUnit::Millisecond)); - let result = subtract_timestamps(×tamp_a, &×tamp_b).unwrap(); + let result = subtract_timestamps(×tamp_a, ×tamp_b).unwrap(); assert_eq!(result, expected); } diff --git a/src/compute/arity.rs b/src/compute/arity.rs index 2cd56d9a88a..0e99c488e16 100644 --- a/src/compute/arity.rs +++ b/src/compute/arity.rs @@ -120,7 +120,7 @@ where // the iteration, then the validity is changed to None to mark the value // as Null let bitmap: Bitmap = mut_bitmap.into(); - let validity = combine_validities(&array.validity(), &Some(bitmap)); + let validity = combine_validities(array.validity(), &Some(bitmap)); PrimitiveArray::::from_data(data_type, values, validity) } diff --git a/src/compute/boolean.rs b/src/compute/boolean.rs index 1ce6032e72d..9fe1d0aac6f 100644 --- a/src/compute/boolean.rs +++ b/src/compute/boolean.rs @@ -37,7 +37,7 @@ where let left_buffer = lhs.values(); let right_buffer = rhs.values(); - let values = op(&left_buffer, &right_buffer); + let values = op(left_buffer, right_buffer); Ok(BooleanArray::from_data(values, validity)) } @@ -60,7 +60,7 @@ where /// # } /// ``` pub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { - binary_boolean_kernel(&lhs, &rhs, |lhs, rhs| lhs & rhs) + binary_boolean_kernel(lhs, rhs, |lhs, rhs| lhs & rhs) } /// Performs `OR` operation on two arrays. If either left or right value is null then the @@ -81,7 +81,7 @@ pub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { /// # } /// ``` pub fn or(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { - binary_boolean_kernel(&lhs, &rhs, |lhs, rhs| lhs | rhs) + binary_boolean_kernel(lhs, rhs, |lhs, rhs| lhs | rhs) } /// Performs unary `NOT` operation on an arrays. If value is null then the result is also @@ -352,7 +352,7 @@ mod tests { let b = b.slice(2, 4); let b = b.as_any().downcast_ref::().unwrap(); - let c = and(&a, &b).unwrap(); + let c = and(a, b).unwrap(); let expected = BooleanArray::from(vec![Some(false), Some(false), None, Some(true)]); diff --git a/src/compute/boolean_kleene.rs b/src/compute/boolean_kleene.rs index 788a6344f9a..1cdd2b41c7e 100644 --- a/src/compute/boolean_kleene.rs +++ b/src/compute/boolean_kleene.rs @@ -37,8 +37,8 @@ pub fn or(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { let validity = match (lhs_validity, rhs_validity) { (Some(lhs_validity), Some(rhs_validity)) => { Some(quaternary( - &lhs_values, - &rhs_values, + lhs_values, + rhs_values, lhs_validity, rhs_validity, // see https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics @@ -55,8 +55,8 @@ pub fn or(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { (Some(lhs_validity), None) => { // B != U Some(ternary( - &lhs_values, - &rhs_values, + lhs_values, + rhs_values, lhs_validity, // see https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics |lhs, rhs, lhs_v| { @@ -71,8 +71,8 @@ pub fn or(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { } (None, Some(rhs_validity)) => { Some(ternary( - &lhs_values, - &rhs_values, + lhs_values, + rhs_values, rhs_validity, // see https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics |lhs, rhs, rhs_v| { @@ -123,8 +123,8 @@ pub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { let validity = match (lhs_validity, rhs_validity) { (Some(lhs_validity), Some(rhs_validity)) => { Some(quaternary( - &lhs_values, - &rhs_values, + lhs_values, + rhs_values, lhs_validity, rhs_validity, // see https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics @@ -140,8 +140,8 @@ pub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { } (Some(lhs_validity), None) => { Some(ternary( - &lhs_values, - &rhs_values, + lhs_values, + rhs_values, lhs_validity, // see https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics |lhs, rhs, lhs_v| { @@ -156,8 +156,8 @@ pub fn and(lhs: &BooleanArray, rhs: &BooleanArray) -> Result { } (None, Some(rhs_validity)) => { Some(ternary( - &lhs_values, - &rhs_values, + lhs_values, + rhs_values, rhs_validity, // see https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics |lhs, rhs, rhs_v| { diff --git a/src/compute/cast/primitive_to.rs b/src/compute/cast/primitive_to.rs index 8041d498855..dd34d93461a 100644 --- a/src/compute/cast/primitive_to.rs +++ b/src/compute/cast/primitive_to.rs @@ -205,8 +205,8 @@ pub fn time64_to_time32( from_unit: &TimeUnit, to_unit: &TimeUnit, ) -> PrimitiveArray { - let from_size = time_unit_multiple(&from_unit); - let to_size = time_unit_multiple(&to_unit); + let from_size = time_unit_multiple(from_unit); + let to_size = time_unit_multiple(to_unit); let divisor = from_size / to_size; unary( from, @@ -221,8 +221,8 @@ pub fn timestamp_to_timestamp( to_unit: &TimeUnit, tz: &Option, ) -> PrimitiveArray { - let from_size = time_unit_multiple(&from_unit); - let to_size = time_unit_multiple(&to_unit); + let from_size = time_unit_multiple(from_unit); + let to_size = time_unit_multiple(to_unit); let to_type = DataType::Timestamp(to_unit.clone(), tz.clone()); // we either divide or multiply, depending on size of each unit if from_size >= to_size { diff --git a/src/compute/cast/timestamps.rs b/src/compute/cast/timestamps.rs index 79335fee51b..354dd652a80 100644 --- a/src/compute/cast/timestamps.rs +++ b/src/compute/cast/timestamps.rs @@ -208,7 +208,7 @@ mod tests { // Note: Use chrono APIs that are different than // naive_datetime_to_timestamp to compute the utc offset to // try and double check the logic - let utc_offset_secs = match Local.offset_from_local_datetime(&naive_datetime) { + let utc_offset_secs = match Local.offset_from_local_datetime(naive_datetime) { LocalResult::Single(local_offset) => local_offset.fix().local_minus_utc() as i64, _ => panic!("Unexpected failure converting to local datetime"), }; diff --git a/src/compute/comparison/mod.rs b/src/compute/comparison/mod.rs index d7d1857f3fd..8a8697e6a26 100644 --- a/src/compute/comparison/mod.rs +++ b/src/compute/comparison/mod.rs @@ -236,11 +236,10 @@ mod tests { datatypes.into_iter().for_each(|d1| { let array = new_null_array(d1.clone(), 10); + let op = Operator::Eq; if can_compare(&d1) { - let op = Operator::Eq; assert!(compare(array.as_ref(), array.as_ref(), op).is_ok()); } else { - let op = Operator::Eq; assert!(compare(array.as_ref(), array.as_ref(), op).is_err()); } }); diff --git a/src/compute/filter.rs b/src/compute/filter.rs index d69d0b9b9c0..cc3d837c7a7 100644 --- a/src/compute/filter.rs +++ b/src/compute/filter.rs @@ -316,9 +316,9 @@ mod tests { assert_eq!(67, d.len()); assert_eq!(3, d.null_count()); assert_eq!(1, d.value(0)); - assert_eq!(true, d.is_null(1)); + assert!(d.is_null(1)); assert_eq!(64, d.value(63)); - assert_eq!(true, d.is_null(64)); + assert!(d.is_null(64)); assert_eq!(67, d.value(65)); } @@ -344,7 +344,7 @@ mod tests { let c = filter(&a, &b).unwrap(); let d = c.as_ref().as_any().downcast_ref::().unwrap(); assert_eq!(1, d.len()); - assert_eq!(true, d.is_null(0)); + assert!(d.is_null(0)); } #[test] @@ -359,8 +359,8 @@ mod tests { .unwrap(); assert_eq!(2, d.len()); assert_eq!("hello", d.value(0)); - assert_eq!(false, d.is_null(0)); - assert_eq!(true, d.is_null(1)); + assert!(!d.is_null(0)); + assert!(d.is_null(1)); } #[test] @@ -376,8 +376,8 @@ mod tests { .unwrap(); assert_eq!(2, d.len()); assert_eq!(b"hello", d.value(0)); - assert_eq!(false, d.is_null(0)); - assert_eq!(true, d.is_null(1)); + assert!(!d.is_null(0)); + assert!(d.is_null(1)); } /* diff --git a/src/compute/regex_match.rs b/src/compute/regex_match.rs index f8e2f9f2b59..3e8d8220b67 100644 --- a/src/compute/regex_match.rs +++ b/src/compute/regex_match.rs @@ -104,7 +104,7 @@ mod tests { ) { let lhs = Utf8Array::::from_slice(lhs); let expected = BooleanArray::from_slice(expected); - let result = op(&lhs, &pattern).unwrap(); + let result = op(&lhs, pattern).unwrap(); assert_eq!(result, expected); } diff --git a/src/compute/sort/mod.rs b/src/compute/sort/mod.rs index 12734505694..4c28712a327 100644 --- a/src/compute/sort/mod.rs +++ b/src/compute/sort/mod.rs @@ -112,7 +112,7 @@ pub fn sort_to_indices(values: &dyn Array, options: &SortOptions) -> Result { let (v, n) = partition_validity(values); - Ok(sort_boolean(values, v, n, &options)) + Ok(sort_boolean(values, v, n, options)) } DataType::Int8 => dyn_sort_indices!(i8, values, ord::total_cmp, options), DataType::Int16 => dyn_sort_indices!(i16, values, ord::total_cmp, options), @@ -138,23 +138,23 @@ pub fn sort_to_indices(values: &dyn Array, options: &SortOptions) -> Result { let (v, n) = partition_validity(values); - Ok(sort_utf8::(values, v, n, &options)) + Ok(sort_utf8::(values, v, n, options)) } DataType::LargeUtf8 => { let (v, n) = partition_validity(values); - Ok(sort_utf8::(values, v, n, &options)) + Ok(sort_utf8::(values, v, n, options)) } DataType::List(field) => { let (v, n) = partition_validity(values); match field.data_type() { - DataType::Int8 => Ok(sort_list::(values, v, n, &options)), - DataType::Int16 => Ok(sort_list::(values, v, n, &options)), - DataType::Int32 => Ok(sort_list::(values, v, n, &options)), - DataType::Int64 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt8 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt16 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt32 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt64 => Ok(sort_list::(values, v, n, &options)), + DataType::Int8 => Ok(sort_list::(values, v, n, options)), + DataType::Int16 => Ok(sort_list::(values, v, n, options)), + DataType::Int32 => Ok(sort_list::(values, v, n, options)), + DataType::Int64 => Ok(sort_list::(values, v, n, options)), + DataType::UInt8 => Ok(sort_list::(values, v, n, options)), + DataType::UInt16 => Ok(sort_list::(values, v, n, options)), + DataType::UInt32 => Ok(sort_list::(values, v, n, options)), + DataType::UInt64 => Ok(sort_list::(values, v, n, options)), t => Err(ArrowError::NotYetImplemented(format!( "Sort not supported for list type {:?}", t @@ -164,14 +164,14 @@ pub fn sort_to_indices(values: &dyn Array, options: &SortOptions) -> Result { let (v, n) = partition_validity(values); match field.data_type() { - DataType::Int8 => Ok(sort_list::(values, v, n, &options)), - DataType::Int16 => Ok(sort_list::(values, v, n, &options)), - DataType::Int32 => Ok(sort_list::(values, v, n, &options)), - DataType::Int64 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt8 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt16 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt32 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt64 => Ok(sort_list::(values, v, n, &options)), + DataType::Int8 => Ok(sort_list::(values, v, n, options)), + DataType::Int16 => Ok(sort_list::(values, v, n, options)), + DataType::Int32 => Ok(sort_list::(values, v, n, options)), + DataType::Int64 => Ok(sort_list::(values, v, n, options)), + DataType::UInt8 => Ok(sort_list::(values, v, n, options)), + DataType::UInt16 => Ok(sort_list::(values, v, n, options)), + DataType::UInt32 => Ok(sort_list::(values, v, n, options)), + DataType::UInt64 => Ok(sort_list::(values, v, n, options)), t => Err(ArrowError::NotYetImplemented(format!( "Sort not supported for list type {:?}", t @@ -181,14 +181,14 @@ pub fn sort_to_indices(values: &dyn Array, options: &SortOptions) -> Result { let (v, n) = partition_validity(values); match field.data_type() { - DataType::Int8 => Ok(sort_list::(values, v, n, &options)), - DataType::Int16 => Ok(sort_list::(values, v, n, &options)), - DataType::Int32 => Ok(sort_list::(values, v, n, &options)), - DataType::Int64 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt8 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt16 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt32 => Ok(sort_list::(values, v, n, &options)), - DataType::UInt64 => Ok(sort_list::(values, v, n, &options)), + DataType::Int8 => Ok(sort_list::(values, v, n, options)), + DataType::Int16 => Ok(sort_list::(values, v, n, options)), + DataType::Int32 => Ok(sort_list::(values, v, n, options)), + DataType::Int64 => Ok(sort_list::(values, v, n, options)), + DataType::UInt8 => Ok(sort_list::(values, v, n, options)), + DataType::UInt16 => Ok(sort_list::(values, v, n, options)), + DataType::UInt32 => Ok(sort_list::(values, v, n, options)), + DataType::UInt64 => Ok(sort_list::(values, v, n, options)), t => Err(ArrowError::NotYetImplemented(format!( "Sort not supported for list type {:?}", t @@ -198,14 +198,14 @@ pub fn sort_to_indices(values: &dyn Array, options: &SortOptions) -> Result { let (v, n) = partition_validity(values); match key_type.as_ref() { - DataType::Int8 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::Int16 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::Int32 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::Int64 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::UInt8 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::UInt16 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::UInt32 => Ok(sort_string_dictionary::(values, v, n, &options)), - DataType::UInt64 => Ok(sort_string_dictionary::(values, v, n, &options)), + DataType::Int8 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::Int16 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::Int32 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::Int64 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::UInt8 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::UInt16 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::UInt32 => Ok(sort_string_dictionary::(values, v, n, options)), + DataType::UInt64 => Ok(sort_string_dictionary::(values, v, n, options)), t => Err(ArrowError::NotYetImplemented(format!( "Sort not supported for dictionary key type {:?}", t @@ -407,7 +407,7 @@ where { let mut valids = value_indices .into_iter() - .map(|index| (index, value_fn(&values, index))) + .map(|index| (index, value_fn(values, index))) .collect::>(); let mut nulls = null_indices; if !options.descending { @@ -1095,17 +1095,13 @@ mod tests { datatypes.into_iter().for_each(|d1| { let array = new_null_array(d1.clone(), 10); + let options = SortOptions { + descending: true, + nulls_first: true, + }; if can_sort(&d1) { - let options = SortOptions { - descending: true, - nulls_first: true, - }; assert!(sort(array.as_ref(), &options).is_ok()); } else { - let options = SortOptions { - descending: true, - nulls_first: true, - }; assert!(sort(array.as_ref(), &options).is_err()); } }); diff --git a/src/compute/take/mod.rs b/src/compute/take/mod.rs index 06dc7e990b4..8723167f230 100644 --- a/src/compute/take/mod.rs +++ b/src/compute/take/mod.rs @@ -334,11 +334,10 @@ mod tests { datatypes.into_iter().for_each(|d1| { let array = new_null_array(d1.clone(), 10); + let indices = Int32Array::from(&[Some(1), Some(2), None, Some(3)]); if can_take(&d1) { - let indices = Int32Array::from(&[Some(1), Some(2), None, Some(3)]); assert!(take(array.as_ref(), &indices).is_ok()); } else { - let indices = Int32Array::from(&[Some(1), Some(2), None, Some(3)]); assert!(take(array.as_ref(), &indices).is_err()); } }); diff --git a/src/datatypes/field.rs b/src/datatypes/field.rs index b83bb960b05..205a9a041dc 100644 --- a/src/datatypes/field.rs +++ b/src/datatypes/field.rs @@ -187,7 +187,7 @@ impl Field { continue; } is_new_field = false; - self_field.try_merge(&from_field)?; + self_field.try_merge(from_field)?; } if is_new_field { nested_fields.push(from_field.clone()); diff --git a/src/ffi/ffi.rs b/src/ffi/ffi.rs index 725dc42cbec..70ca45409b0 100644 --- a/src/ffi/ffi.rs +++ b/src/ffi/ffi.rs @@ -607,7 +607,7 @@ impl ArrowArrayRef for Arc { } fn parent(&self) -> &Arc { - &self + self } fn array(&self) -> &Ffi_ArrowArray { diff --git a/src/io/csv/read/deserialize.rs b/src/io/csv/read/deserialize.rs index 351674ead75..84fc1bead6e 100644 --- a/src/io/csv/read/deserialize.rs +++ b/src/io/csv/read/deserialize.rs @@ -174,7 +174,7 @@ where F: Fn(&[ByteRecord], usize, DataType, usize) -> Result>, { let projection: Vec = match projection { - Some(ref v) => v.to_vec(), + Some(v) => v.to_vec(), None => fields.iter().enumerate().map(|(i, _)| i).collect(), }; let projected_fields: Vec = projection.iter().map(|i| fields[*i].clone()).collect(); diff --git a/src/io/csv/read/infer_schema.rs b/src/io/csv/read/infer_schema.rs index b52f1bf721f..be48e3ef677 100644 --- a/src/io/csv/read/infer_schema.rs +++ b/src/io/csv/read/infer_schema.rs @@ -69,7 +69,7 @@ pub fn infer_schema DataType>( match possibilities.len() { 1 => { for dtype in possibilities.iter() { - fields.push(Field::new(&field_name, dtype.clone(), true)); + fields.push(Field::new(field_name, dtype.clone(), true)); } } 2 => { @@ -77,13 +77,13 @@ pub fn infer_schema DataType>( && possibilities.contains(&DataType::Float64) { // we have an integer and double, fall down to double - fields.push(Field::new(&field_name, DataType::Float64, true)); + fields.push(Field::new(field_name, DataType::Float64, true)); } else { // default to Utf8 for conflicting datatypes (e.g bool and int) - fields.push(Field::new(&field_name, DataType::Utf8, true)); + fields.push(Field::new(field_name, DataType::Utf8, true)); } } - _ => fields.push(Field::new(&field_name, DataType::Utf8, true)), + _ => fields.push(Field::new(field_name, DataType::Utf8, true)), } } diff --git a/src/io/ipc/read/deserialize.rs b/src/io/ipc/read/deserialize.rs index a8105270f58..8393a668c95 100644 --- a/src/io/ipc/read/deserialize.rs +++ b/src/io/ipc/read/deserialize.rs @@ -303,7 +303,7 @@ where let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -336,7 +336,7 @@ fn read_boolean( let length = field_node.length() as usize; let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -371,7 +371,7 @@ where let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -417,7 +417,7 @@ where let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -461,7 +461,7 @@ fn read_fixed_size_binary( let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -502,7 +502,7 @@ where let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -549,7 +549,7 @@ fn read_fixed_size_list( let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, @@ -585,7 +585,7 @@ fn read_struct( let validity = read_validity( buffers, - &field_node, + field_node, reader, block_offset, is_little_endian, diff --git a/src/io/ipc/read/reader.rs b/src/io/ipc/read/reader.rs index 870de58f472..8860426baed 100644 --- a/src/io/ipc/read/reader.rs +++ b/src/io/ipc/read/reader.rs @@ -115,12 +115,10 @@ pub fn read_file_metadata(reader: &mut R) -> Result( batch, metadata.schema.clone(), metadata.is_little_endian, - &dictionaries_by_field, + dictionaries_by_field, &mut reader, 0, ) diff --git a/src/io/ipc/write/common.rs b/src/io/ipc/write/common.rs index 2b54e4a98dd..fd7f820379d 100644 --- a/src/io/ipc/write/common.rs +++ b/src/io/ipc/write/common.rs @@ -386,7 +386,7 @@ pub fn write_message( let aligned_size = (flatbuf_size + prefix_size + a) & !a; let padding_bytes = aligned_size - flatbuf_size - prefix_size; - write_continuation(writer, &write_options, (aligned_size - prefix_size) as i32)?; + write_continuation(writer, write_options, (aligned_size - prefix_size) as i32)?; // write the flatbuf if flatbuf_size > 0 { diff --git a/src/io/json/read/reader.rs b/src/io/json/read/reader.rs index 69c1ed721ae..c78fb92e2e6 100644 --- a/src/io/json/read/reader.rs +++ b/src/io/json/read/reader.rs @@ -341,8 +341,8 @@ mod tests { .as_any() .downcast_ref::() .unwrap(); - assert_eq!(false, cc.value(0)); - assert_eq!(true, cc.value(10)); + assert!(!cc.value(0)); + assert!(cc.value(10)); let dd = batch .column(d.0) .as_any() @@ -381,34 +381,34 @@ mod tests { .as_any() .downcast_ref::>() .unwrap(); - assert_eq!(true, aa.is_valid(0)); - assert_eq!(false, aa.is_valid(1)); - assert_eq!(false, aa.is_valid(11)); + assert!(aa.is_valid(0)); + assert!(!aa.is_valid(1)); + assert!(!aa.is_valid(11)); let bb = batch .column(b.0) .as_any() .downcast_ref::>() .unwrap(); - assert_eq!(true, bb.is_valid(0)); - assert_eq!(false, bb.is_valid(2)); - assert_eq!(false, bb.is_valid(11)); + assert!(bb.is_valid(0)); + assert!(!bb.is_valid(2)); + assert!(!bb.is_valid(11)); let cc = batch .column(c.0) .as_any() .downcast_ref::() .unwrap(); - assert_eq!(true, cc.is_valid(0)); - assert_eq!(false, cc.is_valid(4)); - assert_eq!(false, cc.is_valid(11)); + assert!(cc.is_valid(0)); + assert!(!cc.is_valid(4)); + assert!(!cc.is_valid(11)); let dd = batch .column(d.0) .as_any() .downcast_ref::>() .unwrap(); - assert_eq!(false, dd.is_valid(0)); - assert_eq!(true, dd.is_valid(1)); - assert_eq!(false, dd.is_valid(4)); - assert_eq!(false, dd.is_valid(11)); + assert!(!dd.is_valid(0)); + assert!(dd.is_valid(1)); + assert!(!dd.is_valid(4)); + assert!(!dd.is_valid(11)); } #[test] @@ -451,7 +451,7 @@ mod tests { .unwrap(); assert_eq!(1, aa.value(0)); // test that a 64bit value is returned as null due to overflowing - assert_eq!(false, aa.is_valid(11)); + assert!(!aa.is_valid(11)); let bb = batch .column(b.0) .as_any() @@ -546,7 +546,7 @@ mod tests { assert_eq!(9, bb.len()); assert!((2.0 - bb.value(0)).abs() < f64::EPSILON); assert!((-6.1 - bb.value(5)).abs() < f64::EPSILON); - assert_eq!(false, bb.is_valid(7)); + assert!(!bb.is_valid(7)); let cc = batch .column(c.0) @@ -556,9 +556,9 @@ mod tests { let cc = cc.values(); let cc = cc.as_any().downcast_ref::().unwrap(); assert_eq!(6, cc.len()); - assert_eq!(false, cc.value(0)); - assert_eq!(false, cc.value(4)); - assert_eq!(false, cc.is_valid(5)); + assert!(!cc.value(0)); + assert!(!cc.value(4)); + assert!(!cc.is_valid(5)); } #[test] diff --git a/src/io/json_integration/read.rs b/src/io/json_integration/read.rs index 266c35aa2df..6063335a95b 100644 --- a/src/io/json_integration/read.rs +++ b/src/io/json_integration/read.rs @@ -181,7 +181,7 @@ fn to_list( let child_field = ListArray::::get_child_field(&data_type); let children = &json_col.children.as_ref().unwrap()[0]; - let values = to_array(&child_field, children, dictionaries)?; + let values = to_array(child_field, children, dictionaries)?; let offsets = to_offsets::(json_col.offset.as_ref()); Ok(Arc::new(ListArray::::from_data( data_type, offsets, values, validity, @@ -290,7 +290,7 @@ pub fn to_array( let validity = to_validity(&json_col.validity); let children = &json_col.children.as_ref().unwrap()[0]; - let values = to_array(&child_field, children, dictionaries)?; + let values = to_array(child_field, children, dictionaries)?; Ok(Arc::new(FixedSizeListArray::from_data( data_type.clone(), @@ -337,7 +337,7 @@ pub fn to_record_batch( .fields() .iter() .zip(&json_batch.columns) - .map(|(field, json_col)| to_array(field, &json_col, json_dictionaries)) + .map(|(field, json_col)| to_array(field, json_col, json_dictionaries)) .collect::>>()?; RecordBatch::try_new(Arc::new(schema.clone()), columns) diff --git a/src/io/parquet/mod.rs b/src/io/parquet/mod.rs index def7c45e529..1150fec4db3 100644 --- a/src/io/parquet/mod.rs +++ b/src/io/parquet/mod.rs @@ -419,7 +419,7 @@ mod tests_integration { version: Version::V1, }; - let parquet_schema = to_parquet_schema(&schema)?; + let parquet_schema = to_parquet_schema(schema)?; let descritors = parquet_schema.columns().to_vec().into_iter(); let row_groups = batches.iter().map(|batch| { diff --git a/src/io/parquet/read/binary/basic.rs b/src/io/parquet/read/binary/basic.rs index 2ca0ec03e19..535741798a7 100644 --- a/src/io/parquet/read/binary/basic.rs +++ b/src/io/parquet/read/binary/basic.rs @@ -35,14 +35,14 @@ fn read_dict_buffer( let bit_width = indices_buffer[0]; let indices_buffer = &indices_buffer[1..]; - let (_, consumed) = uleb128::decode(&indices_buffer); + let (_, consumed) = uleb128::decode(indices_buffer); let indices_buffer = &indices_buffer[consumed..]; let non_null_indices_len = indices_buffer.len() * 8 / bit_width as usize; let mut indices = bitpacking::Decoder::new(indices_buffer, bit_width, non_null_indices_len); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); for run in validity_iterator { match run { @@ -96,7 +96,7 @@ fn read_delta_optional( // values_buffer: first 4 bytes are len, remaining is values let mut values_iterator = delta_length_byte_array::Decoder::new(values_buffer); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); // offsets: for run in validity_iterator { @@ -148,7 +148,7 @@ fn read_plain_optional( // values_buffer: first 4 bytes are len, remaining is values let mut values_iterator = utils::BinaryIter::new(values_buffer); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); for run in validity_iterator { match run { diff --git a/src/io/parquet/read/boolean/basic.rs b/src/io/parquet/read/boolean/basic.rs index 06d3caf5483..29f959e49e3 100644 --- a/src/io/parquet/read/boolean/basic.rs +++ b/src/io/parquet/read/boolean/basic.rs @@ -23,7 +23,7 @@ fn read_optional( values: &mut MutableBitmap, validity: &mut MutableBitmap, ) { - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); // in PLAIN, booleans are LSB bitpacked and thus we can read them as if they were a bitmap. // note that `values_buffer` contains only non-null values. diff --git a/src/io/parquet/read/fixed_size_binary.rs b/src/io/parquet/read/fixed_size_binary.rs index 9356d49395b..9408aaa7d2a 100644 --- a/src/io/parquet/read/fixed_size_binary.rs +++ b/src/io/parquet/read/fixed_size_binary.rs @@ -34,7 +34,7 @@ pub(crate) fn read_dict_buffer( let bit_width = indices_buffer[0]; let indices_buffer = &indices_buffer[1..]; - let (_, consumed) = uleb128::decode(&indices_buffer); + let (_, consumed) = uleb128::decode(indices_buffer); let indices_buffer = &indices_buffer[consumed..]; let non_null_indices_len = (indices_buffer.len() * 8 / bit_width as usize) as u32; @@ -42,7 +42,7 @@ pub(crate) fn read_dict_buffer( let mut indices = bitpacking::Decoder::new(indices_buffer, bit_width, non_null_indices_len as usize); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); for run in validity_iterator { match run { @@ -89,7 +89,7 @@ pub(crate) fn read_optional( assert_eq!(values_buffer.len() % size, 0); let mut values_iterator = values_buffer.chunks_exact(size); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); for run in validity_iterator { match run { diff --git a/src/io/parquet/read/primitive/basic.rs b/src/io/parquet/read/primitive/basic.rs index d136874c992..9cfaa7ced25 100644 --- a/src/io/parquet/read/primitive/basic.rs +++ b/src/io/parquet/read/primitive/basic.rs @@ -34,14 +34,14 @@ fn read_dict_buffer_optional( let bit_width = indices_buffer[0]; let indices_buffer = &indices_buffer[1..]; - let (_, consumed) = uleb128::decode(&indices_buffer); + let (_, consumed) = uleb128::decode(indices_buffer); let indices_buffer = &indices_buffer[consumed..]; let non_null_indices_len = indices_buffer.len() * 8 / bit_width as usize; let mut indices = bitpacking::Decoder::new(indices_buffer, bit_width, non_null_indices_len); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); for run in validity_iterator { match run { @@ -89,7 +89,7 @@ fn read_nullable( { let mut chunks = ExactChunksIter::::new(values_buffer); - let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1); + let validity_iterator = hybrid_rle::Decoder::new(validity_buffer, 1); for run in validity_iterator { match run { diff --git a/src/io/parquet/read/statistics/binary.rs b/src/io/parquet/read/statistics/binary.rs index 682b7572b27..4f883fa6e11 100644 --- a/src/io/parquet/read/statistics/binary.rs +++ b/src/io/parquet/read/statistics/binary.rs @@ -57,12 +57,12 @@ impl TryFrom<&ParquetByteArrayStatistics> for Utf8Statistics { min_value: stats .min_value .as_ref() - .map(|x| std::str::from_utf8(&x).map(|x| x.to_string())) + .map(|x| std::str::from_utf8(x).map(|x| x.to_string())) .transpose()?, max_value: stats .max_value .as_ref() - .map(|x| std::str::from_utf8(&x).map(|x| x.to_string())) + .map(|x| std::str::from_utf8(x).map(|x| x.to_string())) .transpose()?, }) } diff --git a/src/io/parquet/write/levels.rs b/src/io/parquet/write/levels.rs index f81422bced0..59b74b80d8f 100644 --- a/src/io/parquet/write/levels.rs +++ b/src/io/parquet/write/levels.rs @@ -164,7 +164,7 @@ impl<'a, O: Offset> NestedInfo<'a, O> { } pub fn offsets(&self) -> &'a [O] { - &self.offsets + self.offsets } } @@ -197,13 +197,13 @@ pub fn write_rep_levels( match version { Version::V1 => { write_levels_v1(buffer, |buffer: &mut Vec| { - let levels = RepLevelsIter::new(&nested.offsets); + let levels = RepLevelsIter::new(nested.offsets); encode_u32(buffer, levels, num_bits)?; Ok(()) })?; } Version::V2 => { - let levels = RepLevelsIter::new(&nested.offsets); + let levels = RepLevelsIter::new(nested.offsets); encode_u32(buffer, levels, num_bits)?; } @@ -224,13 +224,13 @@ pub fn write_def_levels( match version { Version::V1 => { write_levels_v1(buffer, |buffer: &mut Vec| { - let levels = DefLevelsIter::new(&nested.offsets, &nested.validity, validity); + let levels = DefLevelsIter::new(nested.offsets, nested.validity, validity); encode_u32(buffer, levels, num_bits)?; Ok(()) })?; } Version::V2 => { - let levels = DefLevelsIter::new(&nested.offsets, &nested.validity, validity); + let levels = DefLevelsIter::new(nested.offsets, nested.validity, validity); encode_u32(buffer, levels, num_bits)?; } } diff --git a/src/io/parquet/write/mod.rs b/src/io/parquet/write/mod.rs index deabba2742e..612c9f7b0f0 100644 --- a/src/io/parquet/write/mod.rs +++ b/src/io/parquet/write/mod.rs @@ -248,11 +248,11 @@ pub fn array_to_page( ), DataType::Decimal(precision, _) => { let precision = *precision; + let array = array + .as_any() + .downcast_ref::>() + .unwrap(); if precision <= 9 { - let array = array - .as_any() - .downcast_ref::>() - .unwrap(); let values = array.values().iter().map(|x| *x as i32); let values = Buffer::from_trusted_len_iter(values); let array = PrimitiveArray::::from_data( @@ -262,10 +262,6 @@ pub fn array_to_page( ); primitive::array_to_page::(&array, options, descriptor) } else if precision <= 18 { - let array = array - .as_any() - .downcast_ref::>() - .unwrap(); let values = array.values().iter().map(|x| *x as i64); let values = Buffer::from_trusted_len_iter(values); let array = PrimitiveArray::::from_data( @@ -275,10 +271,6 @@ pub fn array_to_page( ); primitive::array_to_page::(&array, options, descriptor) } else { - let array = array - .as_any() - .downcast_ref::>() - .unwrap(); let size = decimal_length_from_precision(precision); let mut values = MutableBuffer::::new(); // todo: this can be estimated diff --git a/src/io/parquet/write/schema.rs b/src/io/parquet/write/schema.rs index a25aca99b29..b3169d8785e 100644 --- a/src/io/parquet/write/schema.rs +++ b/src/io/parquet/write/schema.rs @@ -13,7 +13,7 @@ use crate::{ use super::super::ARROW_SCHEMA_META_KEY; pub fn schema_to_metadata_key(schema: &Schema) -> KeyValue { - let serialized_schema = schema_to_bytes(&schema, MetadataVersion::V5); + let serialized_schema = schema_to_bytes(schema, MetadataVersion::V5); // manually prepending the length to the schema as arrow uses the legacy IPC format // TODO: change after addressing ARROW-9777 diff --git a/src/io/print.rs b/src/io/print.rs index 9d4a6054407..cc4b9ac26ae 100644 --- a/src/io/print.rs +++ b/src/io/print.rs @@ -44,7 +44,7 @@ fn create_table(results: &[RecordBatch]) -> Result { let mut header = Vec::new(); for field in schema.fields() { - header.push(Cell::new(&field.name())); + header.push(Cell::new(field.name())); } table.set_titles(Row::new(header));