parquet_show_bloom_filter/
parquet-show-bloom-filter.rs1use clap::Parser;
37use parquet::basic::Type;
38use parquet::bloom_filter::Sbbf;
39use parquet::file::metadata::ColumnChunkMetaData;
40use parquet::file::{
41 properties::ReaderProperties,
42 reader::{FileReader, SerializedFileReader},
43 serialized_reader::ReadOptionsBuilder,
44};
45use std::{fs::File, path::Path};
46
47#[derive(Debug, Parser)]
48#[clap(author, version, about("Binary file to read bloom filter data from a Parquet file"), long_about = None)]
49struct Args {
50 #[clap(help("Path to the parquet file"))]
51 file_name: String,
52 #[clap(help("Check the bloom filter indexes for the given column. Only string typed columns or columns with an Int32 or Int64 physical type are supported"))]
53 column: String,
54 #[clap(
55 help(
56 "Check if the given values match bloom filter, the values will be parsed to the physical type of the column"
57 ),
58 required = true
59 )]
60 values: Vec<String>,
61}
62
63fn main() {
64 let args = Args::parse();
65 let file_name = args.file_name;
66 let path = Path::new(&file_name);
67 let file = File::open(path).expect("Unable to open file");
68
69 let file_reader = SerializedFileReader::new_with_options(
70 file,
71 ReadOptionsBuilder::new()
72 .with_reader_properties(
73 ReaderProperties::builder()
74 .set_read_bloom_filter(true)
75 .build(),
76 )
77 .build(),
78 )
79 .expect("Unable to open file as Parquet");
80 let metadata = file_reader.metadata();
81 for (ri, row_group) in metadata.row_groups().iter().enumerate() {
82 println!("Row group #{ri}");
83 println!("{}", "=".repeat(80));
84 if let Some((column_index, column)) = row_group
85 .columns()
86 .iter()
87 .enumerate()
88 .find(|(_, column)| column.column_path().string() == args.column)
89 {
90 let row_group_reader = file_reader
91 .get_row_group(ri)
92 .expect("Unable to read row group");
93 if let Some(sbbf) = row_group_reader.get_column_bloom_filter(column_index) {
94 args.values.iter().for_each(|value| {
95 match check_filter(sbbf, value, column) {
96 Ok(present) => {
97 println!(
98 "Value {} is {} in bloom filter",
99 value,
100 if present { "present" } else { "absent" }
101 )
102 }
103 Err(err) => {
104 println!("{err}");
105 }
106 };
107 });
108 } else {
109 println!("No bloom filter found for column {}", args.column);
110 }
111 } else {
112 println!(
113 "No column named {} found, candidate columns are: {}",
114 args.column,
115 row_group
116 .columns()
117 .iter()
118 .map(|c| c.column_path().string())
119 .collect::<Vec<_>>()
120 .join(", ")
121 );
122 }
123 }
124}
125
126fn check_filter(sbbf: &Sbbf, value: &String, column: &ColumnChunkMetaData) -> Result<bool, String> {
127 match column.column_type() {
128 Type::INT32 => {
129 let value: i32 = value
130 .parse()
131 .map_err(|e| format!("Unable to parse value '{}' to i32: {}", value, e))?;
132 Ok(sbbf.check(&value))
133 }
134 Type::INT64 => {
135 let value: i64 = value
136 .parse()
137 .map_err(|e| format!("Unable to parse value '{}' to i64: {}", value, e))?;
138 Ok(sbbf.check(&value))
139 }
140 Type::BYTE_ARRAY => Ok(sbbf.check(&value.as_str())),
141 _ => Err(format!(
142 "Unsupported column type for checking bloom filter: {}",
143 column.column_type()
144 )),
145 }
146}