Jefffrey commented on code in PR #6290: URL: https://github.com/apache/arrow-rs/pull/6290#discussion_r1778096161
########## parquet/tests/arrow_reader/checksum.rs: ########## @@ -0,0 +1,73 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! This file contains an end to end test for verifying checksums when reading parquet files. + +use std::path::PathBuf; + +use arrow::util::test_util::parquet_test_data; +use parquet::arrow::arrow_reader::ArrowReaderBuilder; + +#[test] +fn test_datapage_v1_corrupt_checksum() { + let errors = read_file_batch_errors("datapage_v1-corrupt-checksum.parquet"); + assert_eq!(errors, [ + Err("Parquet argument error: Parquet error: Page CRC checksum mismatch".to_string()), + Ok(()), + Ok(()), + Err("Parquet argument error: Parquet error: Page CRC checksum mismatch".to_string()), + Err("Parquet argument error: Parquet error: Not all children array length are the same!".to_string()) + ]); +} + +#[test] +fn test_datapage_v1_uncompressed_checksum() { + let errors = read_file_batch_errors("datapage_v1-uncompressed-checksum.parquet"); + assert_eq!(errors, [Ok(()), Ok(()), Ok(()), Ok(()), Ok(())]); +} + +#[test] +fn test_datapage_v1_snappy_compressed_checksum() { + let errors = read_file_batch_errors("datapage_v1-snappy-compressed-checksum.parquet"); + assert_eq!(errors, [Ok(()), Ok(()), Ok(()), Ok(()), Ok(())]); +} + +#[test] +fn test_plain_dict_uncompressed_checksum() { + let errors = read_file_batch_errors("plain-dict-uncompressed-checksum.parquet"); + assert_eq!(errors, [Ok(())]); +} +#[test] +fn test_rle_dict_snappy_checksum() { + let errors = read_file_batch_errors("rle-dict-snappy-checksum.parquet"); + assert_eq!(errors, [Ok(())]); +} + +/// Reads a file and returns a vector with one element per record batch. +/// The record batch data is replaced with () and errors are stringified. +fn read_file_batch_errors(name: &str) -> Vec<Result<(), String>> { + let path = PathBuf::from(parquet_test_data()).join(name); + println!("Reading file: {:?}", path); + let file = std::fs::File::open(&path).unwrap(); + let reader = ArrowReaderBuilder::try_new(file).unwrap().build().unwrap(); + reader + .map(|x| match x { + Ok(_) => Ok(()), + Err(e) => Err(e.to_string()), + }) + .collect() +} Review Comment: :+1: ########## parquet/src/file/serialized_reader.rs: ########## @@ -391,6 +391,15 @@ pub(crate) fn decode_page( physical_type: Type, decompressor: Option<&mut Box<dyn Codec>>, ) -> Result<Page> { + // Verify the 32-bit CRC checksum of the page + #[cfg(feature = "crc")] + if let Some(expected_crc) = page_header.crc { + let crc = crc32fast::hash(&buffer); + if crc != expected_crc as u32 { + return Err(general_err!("Page CRC checksum mismatch")); + } + } Review Comment: I wonder if there is any benefit to some more complex code that hangs on to a persistent Hasher instead of creating one anew for each data page (which is what crc32fast::hash does here) :thinking: I don't think it's necessary to merge this PR, but it could be worth exploring in the future to see if it makes any difference on benchmarks. ########## parquet/README.md: ########## @@ -60,6 +60,7 @@ The `parquet` crate provides the following features which may be enabled in your - `zstd` (default) - support for parquet using `zstd` compression - `snap` (default) - support for parquet using `snappy` compression - `cli` - parquet [CLI tools](https://github.com/apache/arrow-rs/tree/master/parquet/src/bin) +- `crc` - verifies checksums when reading data pages Review Comment: ```suggestion - `crc` - enables functionality to automatically verify checksums of each page (if present) when decoding ``` Just to clarify this flag itself enables the functionality by default, instead of providing the option to enable the functionality -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
