Skip to content

Commit

Permalink
build(rust): Update Rust toolchain (#15353)
Browse files Browse the repository at this point in the history
  • Loading branch information
stinodego authored Mar 28, 2024
1 parent 4b0c86f commit 2e46f8d
Show file tree
Hide file tree
Showing 30 changed files with 76 additions and 87 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ build-release-native: .venv ## Same as build-release, except with native CPU op

.PHONY: check
check: ## Run cargo check with all features
cargo clippy --workspace --all-targets --all-features
cargo check --workspace --all-targets --all-features

.PHONY: clippy
clippy: ## Run clippy with all features
Expand Down
24 changes: 0 additions & 24 deletions crates/polars-arrow/src/array/list/iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,27 +49,3 @@ impl<'a, O: Offset> ListArray<O> {
NonNullValuesIter::new(self, self.validity())
}
}

struct Iter<T, I: Iterator<Item = Option<T>>> {
current: i32,
offsets: std::vec::IntoIter<i32>,
values: I,
}

impl<T, I: Iterator<Item = Option<T>> + Clone> Iterator for Iter<T, I> {
type Item = Option<std::iter::Take<std::iter::Skip<I>>>;

fn next(&mut self) -> Option<Self::Item> {
let next = self.offsets.next();
next.map(|next| {
let length = next - self.current;
let iter = self
.values
.clone()
.skip(self.current as usize)
.take(length as usize);
self.current = next;
Some(iter)
})
}
}
6 changes: 6 additions & 0 deletions crates/polars-arrow/src/compute/arity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,9 @@ where
{
let mut mut_bitmap = MutableBitmap::with_capacity(array.len());

// TODO: Clippy lint is broken, remove attr once fixed.
// https://github.com/rust-lang/rust-clippy/issues/12580
#[cfg_attr(feature = "nightly", allow(clippy::manual_unwrap_or_default))]
let values = array
.values()
.iter()
Expand Down Expand Up @@ -254,6 +257,9 @@ where

let mut mut_bitmap = MutableBitmap::with_capacity(lhs.len());

// TODO: Clippy lint is broken, remove attr once fixed.
// https://github.com/rust-lang/rust-clippy/issues/12580
#[cfg_attr(feature = "nightly", allow(clippy::manual_unwrap_or_default))]
let values = lhs
.values()
.iter()
Expand Down
6 changes: 6 additions & 0 deletions crates/polars-arrow/src/compute/decimal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@ pub struct FormatBuffer {
len: usize,
}

impl Default for FormatBuffer {
fn default() -> Self {
Self::new()
}
}

impl FormatBuffer {
#[inline]
pub const fn new() -> Self {
Expand Down
7 changes: 2 additions & 5 deletions crates/polars-arrow/src/legacy/kernels/atan2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,9 @@ use crate::array::PrimitiveArray;
use crate::compute::arity::binary;
use crate::types::NativeType;

pub fn atan2<T: NativeType>(
arr_1: &PrimitiveArray<T>,
arr_2: &PrimitiveArray<T>,
) -> PrimitiveArray<T>
pub fn atan2<T>(arr_1: &PrimitiveArray<T>, arr_2: &PrimitiveArray<T>) -> PrimitiveArray<T>
where
T: Float,
T: Float + NativeType,
{
binary(arr_1, arr_2, arr_1.data_type().clone(), |a, b| a.atan2(b))
}
16 changes: 6 additions & 10 deletions crates/polars-arrow/src/legacy/kernels/take_agg/var.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,14 @@ where
/// Take kernel for single chunk and an iterator as index.
/// # Safety
/// caller must ensure iterators indexes are in bounds
pub unsafe fn take_var_no_null_primitive_iter_unchecked<
T: NativeType + ToPrimitive,
I: IntoIterator<Item = usize>,
>(
pub unsafe fn take_var_no_null_primitive_iter_unchecked<T, I>(
arr: &PrimitiveArray<T>,
indices: I,
ddof: u8,
) -> Option<f64>
where
T: ToPrimitive,
T: NativeType + ToPrimitive,
I: IntoIterator<Item = usize>,
{
debug_assert!(arr.null_count() == 0);
let array_values = arr.values().as_slice();
Expand All @@ -67,16 +65,14 @@ where
/// Take kernel for single chunk and an iterator as index.
/// # Safety
/// caller must ensure iterators indexes are in bounds
pub unsafe fn take_var_nulls_primitive_iter_unchecked<
T: NativeType + ToPrimitive,
I: IntoIterator<Item = usize>,
>(
pub unsafe fn take_var_nulls_primitive_iter_unchecked<T, I>(
arr: &PrimitiveArray<T>,
indices: I,
ddof: u8,
) -> Option<f64>
where
T: ToPrimitive,
T: NativeType + ToPrimitive,
I: IntoIterator<Item = usize>,
{
debug_assert!(arr.null_count() > 0);
let array_values = arr.values().as_slice();
Expand Down
8 changes: 4 additions & 4 deletions crates/polars-compute/src/float_sum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,9 +256,9 @@ where
}
}

pub fn sum_arr_as_f32<T: NativeType>(arr: &PrimitiveArray<T>) -> f32
pub fn sum_arr_as_f32<T>(arr: &PrimitiveArray<T>) -> f32
where
T: FloatSum<f32>,
T: NativeType + FloatSum<f32>,
{
let validity = arr.validity().filter(|_| arr.null_count() > 0);
if let Some(mask) = validity {
Expand All @@ -268,9 +268,9 @@ where
}
}

pub fn sum_arr_as_f64<T: NativeType>(arr: &PrimitiveArray<T>) -> f64
pub fn sum_arr_as_f64<T>(arr: &PrimitiveArray<T>) -> f64
where
T: FloatSum<f64>,
T: NativeType + FloatSum<f64>,
{
let validity = arr.validity().filter(|_| arr.null_count() > 0);
if let Some(mask) = validity {
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-core/src/chunked_array/ops/aggregate/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ pub trait ChunkAggSeries {
}
}

fn sum<T: NumericNative + NativeType>(array: &PrimitiveArray<T>) -> T
fn sum<T>(array: &PrimitiveArray<T>) -> T
where
T: NumericNative + NativeType,
<T as Simd>::Simd: Add<Output = <T as Simd>::Simd> + compute::aggregate::Sum<T>,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-core/src/chunked_array/ops/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ mod interpolate;
pub(crate) mod min_max_binary;
pub(crate) mod nulls;
mod reverse;
#[cfg(feature = "rolling_window")]
pub(crate) mod rolling_window;
pub mod search_sorted;
mod set;
Expand Down
3 changes: 3 additions & 0 deletions crates/polars-core/src/chunked_array/upstream_traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,9 @@ impl<T: PolarsObject> FromIterator<Option<T>> for ObjectChunked<T> {
let size = iter.size_hint().0;
let mut null_mask_builder = MutableBitmap::with_capacity(size);

// TODO: Clippy lint is broken, remove attr once fixed.
// https://github.com/rust-lang/rust-clippy/issues/12580
#[cfg_attr(feature = "nightly", allow(clippy::manual_unwrap_or_default))]
let values: Vec<T> = iter
.map(|value| match value {
Some(value) => {
Expand Down
3 changes: 3 additions & 0 deletions crates/polars-core/src/frame/group_by/aggregations/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,9 @@ where
unsafe { agg_window.update(start as usize, end as usize) }
};

// TODO: Clippy lint is broken, remove attr once fixed.
// https://github.com/rust-lang/rust-clippy/issues/12580
#[cfg_attr(feature = "nightly", allow(clippy::manual_unwrap_or_default))]
match agg {
Some(val) => val,
None => {
Expand Down
4 changes: 2 additions & 2 deletions crates/polars-core/src/series/series_trait.rs
Original file line number Diff line number Diff line change
Expand Up @@ -493,9 +493,9 @@ pub trait SeriesTrait:
}

impl<'a> (dyn SeriesTrait + 'a) {
pub fn unpack<N: 'static>(&self) -> PolarsResult<&ChunkedArray<N>>
pub fn unpack<N>(&self) -> PolarsResult<&ChunkedArray<N>>
where
N: PolarsDataType,
N: 'static + PolarsDataType,
{
polars_ensure!(&N::get_dtype() == self.dtype(), unpack);
Ok(self.as_ref())
Expand Down
10 changes: 0 additions & 10 deletions crates/polars-io/src/csv/write_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,16 +266,6 @@ impl Default for SerializeOptions {
}
}

/// Utility to write to `&mut Vec<u8>` buffer.
struct StringWrap<'a>(pub &'a mut Vec<u8>);

impl<'a> std::fmt::Write for StringWrap<'a> {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
self.0.extend_from_slice(s.as_bytes());
Ok(())
}
}

pub(crate) fn write<W: Write>(
writer: &mut W,
df: &DataFrame,
Expand Down
3 changes: 3 additions & 0 deletions crates/polars-ops/src/chunked_array/strings/substring.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ fn substring_ternary(
// If we didn't find our char that means our offset was so negative it
// is before the start of our string. This means our length must be
// reduced, assuming it is finite.
// TODO: Clippy lint is broken, remove attr once fixed.
// https://github.com/rust-lang/rust-clippy/issues/12580
#[cfg_attr(feature = "nightly", allow(clippy::manual_unwrap_or_default))]
if let Some(off) = found {
off
} else {
Expand Down
8 changes: 4 additions & 4 deletions crates/polars-parquet/src/arrow/read/deserialize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ fn is_primitive(data_type: &ArrowDataType) -> bool {
)
}

fn columns_to_iter_recursive<'a, I: 'a>(
fn columns_to_iter_recursive<'a, I>(
mut columns: Vec<I>,
mut types: Vec<&PrimitiveType>,
field: Field,
Expand All @@ -136,7 +136,7 @@ fn columns_to_iter_recursive<'a, I: 'a>(
chunk_size: Option<usize>,
) -> PolarsResult<NestedArrayIter<'a>>
where
I: PagesIter,
I: 'a + PagesIter,
{
if init.is_empty() && is_primitive(&field.data_type) {
return Ok(Box::new(
Expand Down Expand Up @@ -197,15 +197,15 @@ pub fn n_columns(data_type: &ArrowDataType) -> usize {
/// For nested types, `columns` must be composed by all parquet columns with associated types `types`.
///
/// The arrays are guaranteed to be at most of size `chunk_size` and data type `field.data_type`.
pub fn column_iter_to_arrays<'a, I: 'a>(
pub fn column_iter_to_arrays<'a, I>(
columns: Vec<I>,
types: Vec<&PrimitiveType>,
field: Field,
chunk_size: Option<usize>,
num_rows: usize,
) -> PolarsResult<ArrayIter<'a>>
where
I: PagesIter,
I: 'a + PagesIter,
{
Ok(Box::new(
columns_to_iter_recursive(columns, types, field, vec![], num_rows, chunk_size)?
Expand Down
4 changes: 2 additions & 2 deletions crates/polars-parquet/src/arrow/read/deserialize/nested.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ where
}))
}

pub fn columns_to_iter_recursive<'a, I: 'a>(
pub fn columns_to_iter_recursive<'a, I>(
mut columns: Vec<I>,
mut types: Vec<&PrimitiveType>,
field: Field,
Expand All @@ -43,7 +43,7 @@ pub fn columns_to_iter_recursive<'a, I: 'a>(
chunk_size: Option<usize>,
) -> PolarsResult<NestedArrayIter<'a>>
where
I: PagesIter,
I: 'a + PagesIter,
{
use arrow::datatypes::PhysicalType::*;
use arrow::datatypes::PrimitiveType::*;
Expand Down
3 changes: 1 addition & 2 deletions crates/polars-parquet/src/arrow/write/nested/rep.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ pub fn num_values(nested: &[Nested]) -> usize {

iter(nested)
.into_iter()
.enumerate()
.map(|(_, lengths)| {
.map(|lengths| {
lengths
.map(|length| if length == 0 { 1 } else { 0 })
.sum::<usize>()
Expand Down
1 change: 1 addition & 0 deletions crates/polars-parquet/src/parquet/write/page.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ fn maybe_bytes(uncompressed: usize, compressed: usize) -> Result<(i32, i32)> {
/// Contains page write metrics.
pub struct PageWriteSpec {
pub header: ParquetPageHeader,
#[allow(dead_code)]
pub num_values: usize,
pub num_rows: Option<usize>,
pub header_size: u64,
Expand Down
8 changes: 4 additions & 4 deletions crates/polars-plan/src/dsl/functions/arity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ macro_rules! prepare_binary_function {
/// Apply a closure on the two columns that are evaluated from [`Expr`] a and [`Expr`] b.
///
/// The closure takes two arguments, each a [`Series`]. `output_type` must be the output dtype of the resulting [`Series`].
pub fn map_binary<F: 'static>(a: Expr, b: Expr, f: F, output_type: GetOutput) -> Expr
pub fn map_binary<F>(a: Expr, b: Expr, f: F, output_type: GetOutput) -> Expr
where
F: Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync,
F: 'static + Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync,
{
let function = prepare_binary_function!(f);
a.map_many(function, &[b], output_type)
Expand All @@ -25,9 +25,9 @@ where
/// Like [`map_binary`], but used in a group_by-aggregation context.
///
/// See [`Expr::apply`] for the difference between [`map`](Expr::map) and [`apply`](Expr::apply).
pub fn apply_binary<F: 'static>(a: Expr, b: Expr, f: F, output_type: GetOutput) -> Expr
pub fn apply_binary<F>(a: Expr, b: Expr, f: F, output_type: GetOutput) -> Expr
where
F: Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync,
F: 'static + Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync,
{
let function = prepare_binary_function!(f);
a.apply_many(function, &[b], output_type)
Expand Down
25 changes: 12 additions & 13 deletions crates/polars-plan/src/dsl/functions/horizontal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ fn cum_fold_dtype() -> GetOutput {
}

/// Accumulate over multiple columns horizontally / row wise.
pub fn fold_exprs<F: 'static, E: AsRef<[Expr]>>(acc: Expr, f: F, exprs: E) -> Expr
pub fn fold_exprs<F, E>(acc: Expr, f: F, exprs: E) -> Expr
where
F: Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
F: 'static + Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
E: AsRef<[Expr]>,
{
let mut exprs = exprs.as_ref().to_vec();
exprs.push(acc);
Expand Down Expand Up @@ -58,9 +59,10 @@ where
/// An accumulator is initialized to the series given by the first expression in `exprs`, and then each subsequent value
/// of the accumulator is computed from `f(acc, next_expr_series)`. If `exprs` is empty, an error is returned when
/// `collect` is called.
pub fn reduce_exprs<F: 'static, E: AsRef<[Expr]>>(f: F, exprs: E) -> Expr
pub fn reduce_exprs<F, E>(f: F, exprs: E) -> Expr
where
F: Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
F: 'static + Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
E: AsRef<[Expr]>,
{
let exprs = exprs.as_ref().to_vec();

Expand Down Expand Up @@ -98,9 +100,10 @@ where

/// Accumulate over multiple columns horizontally / row wise.
#[cfg(feature = "dtype-struct")]
pub fn cum_reduce_exprs<F: 'static, E: AsRef<[Expr]>>(f: F, exprs: E) -> Expr
pub fn cum_reduce_exprs<F, E>(f: F, exprs: E) -> Expr
where
F: Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
F: 'static + Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
E: AsRef<[Expr]>,
{
let exprs = exprs.as_ref().to_vec();

Expand Down Expand Up @@ -143,14 +146,10 @@ where

/// Accumulate over multiple columns horizontally / row wise.
#[cfg(feature = "dtype-struct")]
pub fn cum_fold_exprs<F: 'static, E: AsRef<[Expr]>>(
acc: Expr,
f: F,
exprs: E,
include_init: bool,
) -> Expr
pub fn cum_fold_exprs<F, E>(acc: Expr, f: F, exprs: E, include_init: bool) -> Expr
where
F: Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
F: 'static + Fn(Series, Series) -> PolarsResult<Option<Series>> + Send + Sync + Clone,
E: AsRef<[Expr]>,
{
let mut exprs = exprs.as_ref().to_vec();
exprs.push(acc);
Expand Down
2 changes: 2 additions & 0 deletions crates/polars-plan/src/logical_plan/aexpr/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#[cfg(feature = "cse")]
mod hash;
mod schema;

use std::hash::{Hash, Hasher};

#[cfg(feature = "cse")]
pub(super) use hash::traverse_and_hash_aexpr;
use polars_core::prelude::*;
use polars_core::utils::{get_time_units, try_get_supertype};
Expand Down
Loading

0 comments on commit 2e46f8d

Please sign in to comment.