Skip to content

Commit

Permalink
Fix up clippy for Rust 1.78 (#5710)
Browse files Browse the repository at this point in the history
* Fix up clippy for Rust 1.78

* Should not be pub
  • Loading branch information
alamb authored May 4, 2024
1 parent eb2d00b commit b3f06f6
Show file tree
Hide file tree
Showing 9 changed files with 54 additions and 39 deletions.
11 changes: 10 additions & 1 deletion arrow-cast/src/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -435,11 +435,20 @@ impl Parser for Float64Type {
}
}

/// This API is only stable since 1.70 so can't use it when current MSRV is lower
#[inline(always)]
fn is_some_and<T>(opt: Option<T>, f: impl FnOnce(T) -> bool) -> bool {
match opt {
None => false,
Some(x) => f(x),
}
}

macro_rules! parser_primitive {
($t:ty) => {
impl Parser for $t {
fn parse(string: &str) -> Option<Self::Native> {
if !string.as_bytes().last().is_some_and(|x| x.is_ascii_digit()) {
if !is_some_and(string.as_bytes().last(), |x| x.is_ascii_digit()) {
return None;
}
match atoi::FromRadix10SignedChecked::from_radix_10_signed_checked(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,13 @@ struct Args {
arrow: String,
#[clap(short, long, help("Path to JSON file"))]
json: String,
#[clap(value_enum, short, long, default_value_t = Mode::Validate, help="Mode of integration testing tool")]
#[clap(
value_enum,
short,
long,
default_value = "VALIDATE",
help = "Mode of integration testing tool"
)]
mode: Mode,
#[clap(short, long)]
verbose: bool,
Expand Down
28 changes: 12 additions & 16 deletions arrow-json/src/reader/serializer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -309,16 +309,16 @@ impl<'a, 'b> SerializeMap for ObjectSerializer<'a, 'b> {
type Ok = ();
type Error = SerializerError;

fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
fn serialize_key<T>(&mut self, key: &T) -> Result<(), Self::Error>
where
T: Serialize,
T: Serialize + ?Sized,
{
key.serialize(&mut *self.serializer)
}

fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
fn serialize_value<T>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
T: Serialize + ?Sized,
{
value.serialize(&mut *self.serializer)
}
Expand All @@ -333,13 +333,9 @@ impl<'a, 'b> SerializeStruct for ObjectSerializer<'a, 'b> {
type Ok = ();
type Error = SerializerError;

fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
T: Serialize + ?Sized,
{
key.serialize(&mut *self.serializer)?;
value.serialize(&mut *self.serializer)
Expand Down Expand Up @@ -376,9 +372,9 @@ impl<'a, 'b> SerializeSeq for ListSerializer<'a, 'b> {
type Ok = ();
type Error = SerializerError;

fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
fn serialize_element<T>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
T: Serialize + ?Sized,
{
value.serialize(&mut *self.serializer)
}
Expand All @@ -393,9 +389,9 @@ impl<'a, 'b> SerializeTuple for ListSerializer<'a, 'b> {
type Ok = ();
type Error = SerializerError;

fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
fn serialize_element<T>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
T: Serialize + ?Sized,
{
value.serialize(&mut *self.serializer)
}
Expand All @@ -410,9 +406,9 @@ impl<'a, 'b> SerializeTupleStruct for ListSerializer<'a, 'b> {
type Ok = ();
type Error = SerializerError;

fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
fn serialize_field<T>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
T: Serialize + ?Sized,
{
value.serialize(&mut *self.serializer)
}
Expand Down
17 changes: 13 additions & 4 deletions arrow-json/src/writer/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,12 +155,21 @@ struct StructArrayEncoder<'a> {
explicit_nulls: bool,
}

/// This API is only stable since 1.70 so can't use it when current MSRV is lower
#[inline(always)]
fn is_some_and<T>(opt: Option<T>, f: impl FnOnce(T) -> bool) -> bool {
match opt {
None => false,
Some(x) => f(x),
}
}

impl<'a> Encoder for StructArrayEncoder<'a> {
fn encode(&mut self, idx: usize, out: &mut Vec<u8>) {
out.push(b'{');
let mut is_first = true;
for field_encoder in &mut self.encoders {
let is_null = field_encoder.nulls.as_ref().is_some_and(|n| n.is_null(idx));
let is_null = is_some_and(field_encoder.nulls.as_ref(), |n| n.is_null(idx));
if is_null && !self.explicit_nulls {
continue;
}
Expand Down Expand Up @@ -447,13 +456,13 @@ impl<'a> MapEncoder<'a> {
let (values, value_nulls) = make_encoder_impl(values, options)?;

// We sanity check nulls as these are currently not enforced by MapArray (#1697)
if key_nulls.is_some_and(|x| x.null_count() != 0) {
if is_some_and(key_nulls, |x| x.null_count() != 0) {
return Err(ArrowError::InvalidArgumentError(
"Encountered nulls in MapArray keys".to_string(),
));
}

if array.entries().nulls().is_some_and(|x| x.null_count() != 0) {
if is_some_and(array.entries().nulls(), |x| x.null_count() != 0) {
return Err(ArrowError::InvalidArgumentError(
"Encountered nulls in MapArray entries".to_string(),
));
Expand All @@ -478,7 +487,7 @@ impl<'a> Encoder for MapEncoder<'a> {

out.push(b'{');
for idx in start..end {
let is_null = self.value_nulls.as_ref().is_some_and(|n| n.is_null(idx));
let is_null = is_some_and(self.value_nulls.as_ref(), |n| n.is_null(idx));
if is_null && !self.explicit_nulls {
continue;
}
Expand Down
4 changes: 2 additions & 2 deletions arrow-select/src/take.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1401,9 +1401,9 @@ mod tests {
);
}

fn _test_take_string<'a, K: 'static>()
fn _test_take_string<'a, K>()
where
K: Array + PartialEq + From<Vec<Option<&'a str>>>,
K: Array + PartialEq + From<Vec<Option<&'a str>>> + 'static,
{
let index = UInt32Array::from(vec![Some(3), None, Some(1), Some(3), Some(4)]);

Expand Down
8 changes: 4 additions & 4 deletions arrow/src/util/string_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
//! }
//! ```
use std::fmt::Formatter;
use std::io::{Error, ErrorKind, Result, Write};

#[derive(Debug)]
Expand All @@ -83,10 +84,9 @@ impl Default for StringWriter {
Self::new()
}
}

impl ToString for StringWriter {
fn to_string(&self) -> String {
self.data.clone()
impl std::fmt::Display for StringWriter {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.data)
}
}

Expand Down
7 changes: 0 additions & 7 deletions parquet/src/column/writer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1172,13 +1172,6 @@ fn compare_greater<T: ParquetValueType>(descr: &ColumnDescriptor, a: &T, b: &T)
// https://github.com/apache/parquet-mr/blob/master/parquet-column/src/main/java/org/apache/parquet/column/values/factory/DefaultV1ValuesWriterFactory.java
// https://github.com/apache/parquet-mr/blob/master/parquet-column/src/main/java/org/apache/parquet/column/values/factory/DefaultV2ValuesWriterFactory.java

/// Trait to define default encoding for types, including whether or not the type
/// supports dictionary encoding.
trait EncodingWriteSupport {
/// Returns true if dictionary is supported for column writer, false otherwise.
fn has_dictionary_support(props: &WriterProperties) -> bool;
}

/// Returns encoding for a column when no other encoding is provided in writer properties.
fn fallback_encoding(kind: Type, props: &WriterProperties) -> Encoding {
match (kind, props.writer_version()) {
Expand Down
6 changes: 4 additions & 2 deletions parquet/src/encodings/encoding/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use crate::data_type::private::ParquetValueType;
use crate::data_type::*;
use crate::encodings::rle::RleEncoder;
use crate::errors::{ParquetError, Result};
use crate::util::bit_util::{self, num_required_bits, BitWriter};
use crate::util::bit_util::{num_required_bits, BitWriter};

use bytes::Bytes;
pub use dict_encoder::DictEncoder;
Expand All @@ -47,12 +47,13 @@ pub trait Encoder<T: DataType>: Send {
/// identified by `valid_bits`.
///
/// Returns the number of non-null values encoded.
#[cfg(test)]
fn put_spaced(&mut self, values: &[T::T], valid_bits: &[u8]) -> Result<usize> {
let num_values = values.len();
let mut buffer = Vec::with_capacity(num_values);
// TODO: this is pretty inefficient. Revisit in future.
for (i, item) in values.iter().enumerate().take(num_values) {
if bit_util::get_bit(valid_bits, i) {
if crate::util::bit_util::get_bit(valid_bits, i) {
buffer.push(item.clone());
}
}
Expand Down Expand Up @@ -727,6 +728,7 @@ mod tests {
use crate::encodings::decoding::{get_decoder, Decoder, DictDecoder, PlainDecoder};
use crate::schema::types::{ColumnDescPtr, ColumnDescriptor, ColumnPath, Type as SchemaType};
use crate::util::test_common::rand_gen::{random_bytes, RandGen};
use crate::util::bit_util;

const TEST_SET_SIZE: usize = 1024;

Expand Down
4 changes: 2 additions & 2 deletions parquet/src/file/statistics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,8 +271,8 @@ pub fn to_thrift(stats: Option<&Statistics>) -> Option<TStatistics> {

if stats.is_min_max_backwards_compatible() {
// Copy to deprecated min, max values for compatibility with older readers
thrift_stats.min = min.clone();
thrift_stats.max = max.clone();
thrift_stats.min.clone_from(&min);
thrift_stats.max.clone_from(&max);
}

if !stats.is_min_max_deprecated() {
Expand Down

0 comments on commit b3f06f6

Please sign in to comment.