parquet/column/writer/
encoder.rs

1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements.  See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership.  The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License.  You may obtain a copy of the License at
8//
9//   http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied.  See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18use bytes::Bytes;
19use half::f16;
20
21use crate::basic::{ConvertedType, Encoding, LogicalType, Type};
22use crate::bloom_filter::Sbbf;
23use crate::column::writer::{
24    compare_greater, fallback_encoding, has_dictionary_support, is_nan, update_max, update_min,
25};
26use crate::data_type::DataType;
27use crate::data_type::private::ParquetValueType;
28use crate::encodings::encoding::{DictEncoder, Encoder, get_encoder};
29use crate::errors::{ParquetError, Result};
30use crate::file::properties::{EnabledStatistics, WriterProperties};
31use crate::geospatial::accumulator::{GeoStatsAccumulator, try_new_geo_stats_accumulator};
32use crate::geospatial::statistics::GeospatialStatistics;
33use crate::schema::types::{ColumnDescPtr, ColumnDescriptor};
34
35/// A collection of [`ParquetValueType`] encoded by a [`ColumnValueEncoder`]
36pub trait ColumnValues {
37    /// The number of values in this collection
38    fn len(&self) -> usize;
39}
40
41#[cfg(feature = "arrow")]
42impl ColumnValues for dyn arrow_array::Array {
43    fn len(&self) -> usize {
44        arrow_array::Array::len(self)
45    }
46}
47
48impl<T: ParquetValueType> ColumnValues for [T] {
49    fn len(&self) -> usize {
50        self.len()
51    }
52}
53
54/// The encoded data for a dictionary page
55pub struct DictionaryPage {
56    pub buf: Bytes,
57    pub num_values: usize,
58    pub is_sorted: bool,
59}
60
61/// The encoded values for a data page, with optional statistics
62pub struct DataPageValues<T> {
63    pub buf: Bytes,
64    pub num_values: usize,
65    pub encoding: Encoding,
66    pub min_value: Option<T>,
67    pub max_value: Option<T>,
68    pub variable_length_bytes: Option<i64>,
69}
70
71/// A generic encoder of [`ColumnValues`] to data and dictionary pages used by
72/// [super::GenericColumnWriter`]
73pub trait ColumnValueEncoder {
74    /// The underlying value type of [`Self::Values`]
75    ///
76    /// Note: this avoids needing to fully qualify `<Self::Values as ColumnValues>::T`
77    type T: ParquetValueType;
78
79    /// The values encoded by this encoder
80    type Values: ColumnValues + ?Sized;
81
82    /// Create a new [`ColumnValueEncoder`]
83    fn try_new(descr: &ColumnDescPtr, props: &WriterProperties) -> Result<Self>
84    where
85        Self: Sized;
86
87    /// Write the corresponding values to this [`ColumnValueEncoder`]
88    fn write(&mut self, values: &Self::Values, offset: usize, len: usize) -> Result<()>;
89
90    /// Write the values at the indexes in `indices` to this [`ColumnValueEncoder`]
91    fn write_gather(&mut self, values: &Self::Values, indices: &[usize]) -> Result<()>;
92
93    /// Returns the number of buffered values
94    fn num_values(&self) -> usize;
95
96    /// Returns true if this encoder has a dictionary page
97    fn has_dictionary(&self) -> bool;
98
99    /// Returns the estimated total memory usage of the encoder
100    ///
101    fn estimated_memory_size(&self) -> usize;
102
103    /// Returns an estimate of the encoded size of dictionary page size in bytes, or `None` if no dictionary
104    fn estimated_dict_page_size(&self) -> Option<usize>;
105
106    /// Returns an estimate of the encoded data page size in bytes
107    ///
108    /// This should include:
109    /// <already_written_encoded_byte_size> + <estimated_encoded_size_of_unflushed_bytes>
110    fn estimated_data_page_size(&self) -> usize;
111
112    /// Flush the dictionary page for this column chunk if any. Any subsequent calls to
113    /// [`Self::write`] will not be dictionary encoded
114    ///
115    /// Note: [`Self::flush_data_page`] must be called first, as this will error if there
116    /// are any pending page values
117    fn flush_dict_page(&mut self) -> Result<Option<DictionaryPage>>;
118
119    /// Flush the next data page for this column chunk
120    fn flush_data_page(&mut self) -> Result<DataPageValues<Self::T>>;
121
122    /// Flushes bloom filter if enabled and returns it, otherwise returns `None`. Subsequent writes
123    /// will *not* be tracked by the bloom filter as it is empty since. This should be called once
124    /// near the end of encoding.
125    fn flush_bloom_filter(&mut self) -> Option<Sbbf>;
126
127    /// Computes [`GeospatialStatistics`], if any, and resets internal state such that any internal
128    /// accumulator is prepared to accumulate statistics for the next column chunk.
129    fn flush_geospatial_statistics(&mut self) -> Option<Box<GeospatialStatistics>>;
130}
131
132pub struct ColumnValueEncoderImpl<T: DataType> {
133    encoder: Box<dyn Encoder<T>>,
134    dict_encoder: Option<DictEncoder<T>>,
135    descr: ColumnDescPtr,
136    num_values: usize,
137    statistics_enabled: EnabledStatistics,
138    min_value: Option<T::T>,
139    max_value: Option<T::T>,
140    bloom_filter: Option<Sbbf>,
141    variable_length_bytes: Option<i64>,
142    geo_stats_accumulator: Option<Box<dyn GeoStatsAccumulator>>,
143}
144
145impl<T: DataType> ColumnValueEncoderImpl<T> {
146    fn min_max(&self, values: &[T::T], value_indices: Option<&[usize]>) -> Option<(T::T, T::T)> {
147        match value_indices {
148            Some(indices) => get_min_max(&self.descr, indices.iter().map(|x| &values[*x])),
149            None => get_min_max(&self.descr, values.iter()),
150        }
151    }
152
153    fn write_slice(&mut self, slice: &[T::T]) -> Result<()> {
154        if self.statistics_enabled != EnabledStatistics::None
155            // INTERVAL, Geometry, and Geography have undefined sort order, so don't write min/max stats for them
156            && self.descr.converted_type() != ConvertedType::INTERVAL
157        {
158            if let Some(accumulator) = self.geo_stats_accumulator.as_deref_mut() {
159                update_geo_stats_accumulator(accumulator, slice.iter());
160            } else if let Some((min, max)) = self.min_max(slice, None) {
161                update_min(&self.descr, &min, &mut self.min_value);
162                update_max(&self.descr, &max, &mut self.max_value);
163            }
164
165            if let Some(var_bytes) = T::T::variable_length_bytes(slice) {
166                *self.variable_length_bytes.get_or_insert(0) += var_bytes;
167            }
168        }
169
170        // encode the values into bloom filter if enabled
171        if let Some(bloom_filter) = &mut self.bloom_filter {
172            for value in slice {
173                bloom_filter.insert(value);
174            }
175        }
176
177        match &mut self.dict_encoder {
178            Some(encoder) => encoder.put(slice),
179            _ => self.encoder.put(slice),
180        }
181    }
182}
183
184impl<T: DataType> ColumnValueEncoder for ColumnValueEncoderImpl<T> {
185    type T = T::T;
186
187    type Values = [T::T];
188
189    fn flush_bloom_filter(&mut self) -> Option<Sbbf> {
190        self.bloom_filter.take()
191    }
192
193    fn try_new(descr: &ColumnDescPtr, props: &WriterProperties) -> Result<Self> {
194        let dict_supported = props.dictionary_enabled(descr.path())
195            && has_dictionary_support(T::get_physical_type(), props);
196        let dict_encoder = dict_supported.then(|| DictEncoder::new(descr.clone()));
197
198        // Set either main encoder or fallback encoder.
199        let encoder = get_encoder(
200            props
201                .encoding(descr.path())
202                .unwrap_or_else(|| fallback_encoding(T::get_physical_type(), props)),
203            descr,
204        )?;
205
206        let statistics_enabled = props.statistics_enabled(descr.path());
207
208        let bloom_filter = props
209            .bloom_filter_properties(descr.path())
210            .map(|props| Sbbf::new_with_ndv_fpp(props.ndv, props.fpp))
211            .transpose()?;
212
213        let geo_stats_accumulator = try_new_geo_stats_accumulator(descr);
214
215        Ok(Self {
216            encoder,
217            dict_encoder,
218            descr: descr.clone(),
219            num_values: 0,
220            statistics_enabled,
221            bloom_filter,
222            min_value: None,
223            max_value: None,
224            variable_length_bytes: None,
225            geo_stats_accumulator,
226        })
227    }
228
229    fn write(&mut self, values: &[T::T], offset: usize, len: usize) -> Result<()> {
230        self.num_values += len;
231
232        let slice = values.get(offset..offset + len).ok_or_else(|| {
233            general_err!(
234                "Expected to write {} values, but have only {}",
235                len,
236                values.len() - offset
237            )
238        })?;
239
240        self.write_slice(slice)
241    }
242
243    fn write_gather(&mut self, values: &Self::Values, indices: &[usize]) -> Result<()> {
244        self.num_values += indices.len();
245        let slice: Vec<_> = indices.iter().map(|idx| values[*idx].clone()).collect();
246        self.write_slice(&slice)
247    }
248
249    fn num_values(&self) -> usize {
250        self.num_values
251    }
252
253    fn has_dictionary(&self) -> bool {
254        self.dict_encoder.is_some()
255    }
256
257    fn estimated_memory_size(&self) -> usize {
258        let encoder_size = self.encoder.estimated_memory_size();
259
260        let dict_encoder_size = self
261            .dict_encoder
262            .as_ref()
263            .map(|encoder| encoder.estimated_memory_size())
264            .unwrap_or_default();
265
266        let bloom_filter_size = self
267            .bloom_filter
268            .as_ref()
269            .map(|bf| bf.estimated_memory_size())
270            .unwrap_or_default();
271
272        encoder_size + dict_encoder_size + bloom_filter_size
273    }
274
275    fn estimated_dict_page_size(&self) -> Option<usize> {
276        Some(self.dict_encoder.as_ref()?.dict_encoded_size())
277    }
278
279    fn estimated_data_page_size(&self) -> usize {
280        match &self.dict_encoder {
281            Some(encoder) => encoder.estimated_data_encoded_size(),
282            _ => self.encoder.estimated_data_encoded_size(),
283        }
284    }
285
286    fn flush_dict_page(&mut self) -> Result<Option<DictionaryPage>> {
287        match self.dict_encoder.take() {
288            Some(encoder) => {
289                if self.num_values != 0 {
290                    return Err(general_err!(
291                        "Must flush data pages before flushing dictionary"
292                    ));
293                }
294
295                let buf = encoder.write_dict()?;
296
297                Ok(Some(DictionaryPage {
298                    buf,
299                    num_values: encoder.num_entries(),
300                    is_sorted: encoder.is_sorted(),
301                }))
302            }
303            _ => Ok(None),
304        }
305    }
306
307    fn flush_data_page(&mut self) -> Result<DataPageValues<T::T>> {
308        let (buf, encoding) = match &mut self.dict_encoder {
309            Some(encoder) => (encoder.write_indices()?, Encoding::RLE_DICTIONARY),
310            _ => (self.encoder.flush_buffer()?, self.encoder.encoding()),
311        };
312
313        Ok(DataPageValues {
314            buf,
315            encoding,
316            num_values: std::mem::take(&mut self.num_values),
317            min_value: self.min_value.take(),
318            max_value: self.max_value.take(),
319            variable_length_bytes: self.variable_length_bytes.take(),
320        })
321    }
322
323    fn flush_geospatial_statistics(&mut self) -> Option<Box<GeospatialStatistics>> {
324        self.geo_stats_accumulator.as_mut().map(|a| a.finish())?
325    }
326}
327
328fn get_min_max<'a, T, I>(descr: &ColumnDescriptor, mut iter: I) -> Option<(T, T)>
329where
330    T: ParquetValueType + 'a,
331    I: Iterator<Item = &'a T>,
332{
333    let first = loop {
334        let next = iter.next()?;
335        if !is_nan(descr, next) {
336            break next;
337        }
338    };
339
340    let mut min = first;
341    let mut max = first;
342    for val in iter {
343        if is_nan(descr, val) {
344            continue;
345        }
346        if compare_greater(descr, min, val) {
347            min = val;
348        }
349        if compare_greater(descr, val, max) {
350            max = val;
351        }
352    }
353
354    // Float/Double statistics have special case for zero.
355    //
356    // If computed min is zero, whether negative or positive,
357    // the spec states that the min should be written as -0.0
358    // (negative zero)
359    //
360    // For max, it has similar logic but will be written as 0.0
361    // (positive zero)
362    let min = replace_zero(min, descr, -0.0);
363    let max = replace_zero(max, descr, 0.0);
364
365    Some((min, max))
366}
367
368#[inline]
369fn replace_zero<T: ParquetValueType>(val: &T, descr: &ColumnDescriptor, replace: f32) -> T {
370    match T::PHYSICAL_TYPE {
371        Type::FLOAT if f32::from_le_bytes(val.as_bytes().try_into().unwrap()) == 0.0 => {
372            T::try_from_le_slice(&f32::to_le_bytes(replace)).unwrap()
373        }
374        Type::DOUBLE if f64::from_le_bytes(val.as_bytes().try_into().unwrap()) == 0.0 => {
375            T::try_from_le_slice(&f64::to_le_bytes(replace as f64)).unwrap()
376        }
377        Type::FIXED_LEN_BYTE_ARRAY
378            if descr.logical_type() == Some(LogicalType::Float16)
379                && f16::from_le_bytes(val.as_bytes().try_into().unwrap()) == f16::NEG_ZERO =>
380        {
381            T::try_from_le_slice(&f16::to_le_bytes(f16::from_f32(replace))).unwrap()
382        }
383        _ => val.clone(),
384    }
385}
386
387fn update_geo_stats_accumulator<'a, T, I>(bounder: &mut dyn GeoStatsAccumulator, iter: I)
388where
389    T: ParquetValueType + 'a,
390    I: Iterator<Item = &'a T>,
391{
392    if bounder.is_valid() {
393        for val in iter {
394            bounder.update_wkb(val.as_bytes());
395        }
396    }
397}