Skip to content

feat: nan_value_counts support #907

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 19 commits into from
Mar 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions crates/iceberg/src/arrow/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,12 @@

mod schema;
pub use schema::*;

mod nan_val_cnt_visitor;
pub(crate) use nan_val_cnt_visitor::*;

pub(crate) mod delete_file_manager;

mod reader;
pub(crate) mod record_batch_projector;
pub(crate) mod record_batch_transformer;
Expand Down
177 changes: 177 additions & 0 deletions crates/iceberg/src/arrow/nan_val_cnt_visitor.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

//! The module contains the visitor for calculating NaN values in give arrow record batch.

use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::sync::Arc;

use arrow_array::{ArrayRef, Float32Array, Float64Array, RecordBatch, StructArray};
use arrow_schema::DataType;

use crate::arrow::ArrowArrayAccessor;
use crate::spec::{
visit_struct_with_partner, ListType, MapType, NestedFieldRef, PrimitiveType, Schema, SchemaRef,
SchemaWithPartnerVisitor, StructType,
};
use crate::Result;

macro_rules! cast_and_update_cnt_map {
($t:ty, $col:ident, $self:ident, $field_id:ident) => {
let nan_val_cnt = $col
.as_any()
.downcast_ref::<$t>()
.unwrap()
.iter()
.filter(|value| value.map_or(false, |v| v.is_nan()))
.count() as u64;

match $self.nan_value_counts.entry($field_id) {
Entry::Occupied(mut ele) => {
let total_nan_val_cnt = ele.get() + nan_val_cnt;
ele.insert(total_nan_val_cnt);
}
Entry::Vacant(v) => {
v.insert(nan_val_cnt);
}
};
};
}

macro_rules! count_float_nans {
($col:ident, $self:ident, $field_id:ident) => {
match $col.data_type() {
DataType::Float32 => {
cast_and_update_cnt_map!(Float32Array, $col, $self, $field_id);
}
DataType::Float64 => {
cast_and_update_cnt_map!(Float64Array, $col, $self, $field_id);
}
_ => {}
}
};
}

/// Visitor which counts and keeps track of NaN value counts in given record batch(s)
pub struct NanValueCountVisitor {
/// Stores field ID to NaN value count mapping
pub nan_value_counts: HashMap<i32, u64>,
}

impl SchemaWithPartnerVisitor<ArrayRef> for NanValueCountVisitor {
type T = ();

fn schema(
&mut self,
_schema: &Schema,
_partner: &ArrayRef,
_value: Self::T,
) -> Result<Self::T> {
Ok(())
}

fn field(
&mut self,
_field: &NestedFieldRef,
_partner: &ArrayRef,
_value: Self::T,
) -> Result<Self::T> {
Ok(())
}

fn r#struct(
&mut self,
_struct: &StructType,
_partner: &ArrayRef,
_results: Vec<Self::T>,
) -> Result<Self::T> {
Ok(())
}

fn list(&mut self, _list: &ListType, _list_arr: &ArrayRef, _value: Self::T) -> Result<Self::T> {
Ok(())
}

fn map(
&mut self,
_map: &MapType,
_partner: &ArrayRef,
_key_value: Self::T,
_value: Self::T,
) -> Result<Self::T> {
Ok(())
}

fn primitive(&mut self, _p: &PrimitiveType, _col: &ArrayRef) -> Result<Self::T> {
Ok(())
}

fn after_struct_field(&mut self, field: &NestedFieldRef, partner: &ArrayRef) -> Result<()> {
let field_id = field.id;
count_float_nans!(partner, self, field_id);
Ok(())
}

fn after_list_element(&mut self, field: &NestedFieldRef, partner: &ArrayRef) -> Result<()> {
let field_id = field.id;
count_float_nans!(partner, self, field_id);
Ok(())
}

fn after_map_key(&mut self, field: &NestedFieldRef, partner: &ArrayRef) -> Result<()> {
let field_id = field.id;
count_float_nans!(partner, self, field_id);
Ok(())
}

fn after_map_value(&mut self, field: &NestedFieldRef, partner: &ArrayRef) -> Result<()> {
let field_id = field.id;
count_float_nans!(partner, self, field_id);
Ok(())
}
}

impl NanValueCountVisitor {
/// Creates new instance of NanValueCountVisitor
pub fn new() -> Self {
Self {
nan_value_counts: HashMap::new(),
}
}

/// Compute nan value counts in given schema and record batch
pub fn compute(&mut self, schema: SchemaRef, batch: RecordBatch) -> Result<()> {
let arrow_arr_partner_accessor = ArrowArrayAccessor {};

let struct_arr = Arc::new(StructArray::from(batch)) as ArrayRef;
visit_struct_with_partner(
schema.as_struct(),
&struct_arr,
self,
&arrow_arr_partner_accessor,
)?;

Ok(())
}
}

impl Default for NanValueCountVisitor {
fn default() -> Self {
Self::new()
}
}
6 changes: 5 additions & 1 deletion crates/iceberg/src/arrow/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,8 @@ impl SchemaWithPartnerVisitor<ArrayRef> for ArrowArrayToIcebergStructConverter {
}
}

struct ArrowArrayAccessor;
/// Partner type representing accessing and walking arrow arrays alongside iceberg schema
pub struct ArrowArrayAccessor;

impl PartnerAccessor<ArrayRef> for ArrowArrayAccessor {
fn struct_parner<'a>(&self, schema_partner: &'a ArrayRef) -> Result<&'a ArrayRef> {
Expand All @@ -435,6 +436,7 @@ impl PartnerAccessor<ArrayRef> for ArrowArrayAccessor {
"The schema partner is not a struct type",
));
}

Ok(schema_partner)
}

Expand All @@ -452,6 +454,7 @@ impl PartnerAccessor<ArrayRef> for ArrowArrayAccessor {
"The struct partner is not a struct array",
)
})?;

let field_pos = struct_array
.fields()
.iter()
Expand All @@ -466,6 +469,7 @@ impl PartnerAccessor<ArrayRef> for ArrowArrayAccessor {
format!("Field id {} not found in struct array", field.id),
)
})?;

Ok(struct_array.column(field_pos))
}

Expand Down
22 changes: 18 additions & 4 deletions crates/iceberg/src/writer/base_writer/data_file_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,13 @@ impl<B: FileWriterBuilder> CurrentFileStatus for DataFileWriter<B> {

#[cfg(test)]
mod test {
use std::collections::HashMap;
use std::sync::Arc;

use arrow_array::{Int32Array, StringArray};
use arrow_schema::{DataType, Field};
use parquet::arrow::arrow_reader::{ArrowReaderMetadata, ArrowReaderOptions};
use parquet::arrow::PARQUET_FIELD_ID_META_KEY;
use parquet::file::properties::WriterProperties;
use tempfile::TempDir;

Expand Down Expand Up @@ -153,8 +155,14 @@ mod test {
.unwrap();

let arrow_schema = arrow_schema::Schema::new(vec![
Field::new("foo", DataType::Int32, false),
Field::new("bar", DataType::Utf8, false),
Field::new("foo", DataType::Int32, false).with_metadata(HashMap::from([(
PARQUET_FIELD_ID_META_KEY.to_string(),
3.to_string(),
)])),
Field::new("bar", DataType::Utf8, false).with_metadata(HashMap::from([(
PARQUET_FIELD_ID_META_KEY.to_string(),
4.to_string(),
)])),
]);
let batch = RecordBatch::try_new(Arc::new(arrow_schema.clone()), vec![
Arc::new(Int32Array::from(vec![1, 2, 3])),
Expand Down Expand Up @@ -224,8 +232,14 @@ mod test {
.await?;

let arrow_schema = arrow_schema::Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("name", DataType::Utf8, false),
Field::new("id", DataType::Int32, false).with_metadata(HashMap::from([(
PARQUET_FIELD_ID_META_KEY.to_string(),
5.to_string(),
)])),
Field::new("name", DataType::Utf8, false).with_metadata(HashMap::from([(
PARQUET_FIELD_ID_META_KEY.to_string(),
6.to_string(),
)])),
]);
let batch = RecordBatch::try_new(Arc::new(arrow_schema.clone()), vec![
Arc::new(Int32Array::from(vec![1, 2, 3])),
Expand Down
Loading
Loading