alamb commented on code in PR #1:
URL: 
https://github.com/apache/arrow-datafusion-comet/pull/1#discussion_r1466735019


##########
common/src/main/java/org/apache/comet/parquet/AbstractColumnReader.java:
##########
@@ -0,0 +1,116 @@
+/*

Review Comment:
   I am fascinated to know (can be later) why comet needs its own parquet 
reader in Java -- maybe we can add any missing functionality upstream in 
parquet-rs



##########
core/src/execution/mod.rs:
##########
@@ -0,0 +1,55 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! PoC of vectorization execution through JNI to Rust.
+pub mod datafusion;
+pub mod jni_api;
+
+pub mod kernels; // for benchmarking
+
+mod metrics;
+pub mod operators;
+pub mod serde;
+mod timezone;
+pub(crate) mod utils;
+
+// Include generated modules from .proto files.
+#[allow(missing_docs)]
+pub mod spark_expression {
+    include!(concat!("generated", "/spark.spark_expression.rs"));
+}
+
+// Include generated modules from .proto files.
+#[allow(missing_docs)]
+pub mod spark_partitioning {
+    include!(concat!("generated", "/spark.spark_partitioning.rs"));
+}
+
+// Include generated modules from .proto files.
+#[allow(missing_docs)]
+pub mod spark_operator {
+    include!(concat!("generated", "/spark.spark_operator.rs"));
+}
+
+#[cfg(test)]
+mod tests {

Review Comment:
   😆 



##########
common/src/main/java/org/apache/comet/parquet/BloomFilterReader.java:
##########
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.comet.parquet;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.parquet.column.values.bloomfilter.BlockSplitBloomFilter;
+import org.apache.parquet.column.values.bloomfilter.BloomFilter;
+import org.apache.parquet.crypto.AesCipher;
+import org.apache.parquet.crypto.InternalColumnDecryptionSetup;
+import org.apache.parquet.crypto.InternalFileDecryptor;
+import org.apache.parquet.crypto.ModuleCipherFactory;
+import org.apache.parquet.crypto.ParquetCryptoRuntimeException;
+import org.apache.parquet.filter2.predicate.FilterPredicate;
+import org.apache.parquet.filter2.predicate.Operators;
+import org.apache.parquet.filter2.predicate.UserDefinedPredicate;
+import org.apache.parquet.format.BlockCipher;
+import org.apache.parquet.format.BloomFilterHeader;
+import org.apache.parquet.format.Util;
+import org.apache.parquet.hadoop.metadata.BlockMetaData;
+import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
+import org.apache.parquet.hadoop.metadata.ColumnPath;
+import org.apache.parquet.io.SeekableInputStream;
+
+public class BloomFilterReader implements FilterPredicate.Visitor<Boolean> {

Review Comment:
   FWIW  DataFusion's parquet reader handle bloom filters natively now thanks 
to @hengfeiyang  
https://github.com/apache/arrow-datafusion/blob/5e9c9a1f7cecabe6e6c40c8296adb517fac0da13/datafusion/core/src/datasource/physical_plan/parquet/row_groups.rs#L113
   
   Though I don't think it supports encrypted ciphers 



##########
core/src/execution/kernels/temporal.rs:
##########
@@ -0,0 +1,438 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! temporal kernels
+
+use chrono::{DateTime, Datelike, Duration, NaiveDateTime, Timelike, Utc};
+
+use std::sync::Arc;
+
+use arrow::{array::*, datatypes::DataType};
+use arrow_array::{
+    downcast_dictionary_array, downcast_temporal_array,
+    temporal_conversions::*,
+    timezone::Tz,
+    types::{ArrowTemporalType, Date32Type, TimestampMicrosecondType},
+    ArrowNumericType,
+};
+
+use arrow_schema::TimeUnit;
+
+use crate::errors::ExpressionError;
+
+// Copied from arrow_arith/temporal.rs
+macro_rules! return_compute_error_with {
+    ($msg:expr, $param:expr) => {
+        return {
+            Err(ExpressionError::ArrowError(format!(
+                "{}: {:?}",
+                $msg, $param
+            )))
+        }
+    };
+}
+
+// The number of days between the beginning of the proleptic gregorian 
calendar (0001-01-01)
+// and the beginning of the Unix Epoch (1970-01-01)
+const DAYS_TO_UNIX_EPOCH: i32 = 719_163;
+const MICROS_TO_UNIX_EPOCH: i64 = 62_167_132_800 * 1_000_000;
+
+// Copied from arrow_arith/temporal.rs with modification to the output datatype
+// Transforms a array of NaiveDate to an array of Date32 after applying an 
operation
+fn as_datetime_with_op<A: ArrayAccessor<Item = T::Native>, T: 
ArrowTemporalType, F>(
+    iter: ArrayIter<A>,
+    mut builder: PrimitiveBuilder<Date32Type>,
+    op: F,
+) -> Date32Array
+where
+    F: Fn(NaiveDateTime) -> i32,
+    i64: From<T::Native>,
+{
+    iter.into_iter().for_each(|value| {
+        if let Some(value) = value {
+            match as_datetime::<T>(i64::from(value)) {
+                Some(dt) => builder.append_value(op(dt)),
+                None => builder.append_null(),
+            }
+        } else {
+            builder.append_null();
+        }
+    });
+
+    builder.finish()
+}
+
+// Based on arrow_arith/temporal.rs:extract_component_from_datetime_array
+// Transforms an array of DateTime<Tz> to an arrayOf TimeStampMicrosecond 
after applying an
+// operation
+fn as_timestamp_tz_with_op<A: ArrayAccessor<Item = T::Native>, T: 
ArrowTemporalType, F>(
+    iter: ArrayIter<A>,
+    mut builder: PrimitiveBuilder<TimestampMicrosecondType>,
+    tz: &str,
+    op: F,
+) -> Result<TimestampMicrosecondArray, ExpressionError>
+where
+    F: Fn(DateTime<Tz>) -> i64,
+    i64: From<T::Native>,
+{
+    let tz: Tz = tz.parse()?;
+    for value in iter {
+        match value {
+            Some(value) => match as_datetime_with_timezone::<T>(value.into(), 
tz) {
+                Some(time) => builder.append_value(op(time)),
+                _ => {
+                    return Err(ExpressionError::ArrowError(
+                        "Unable to read value as datetime".to_string(),
+                    ));
+                }
+            },
+            None => builder.append_null(),
+        }
+    }
+    Ok(builder.finish())
+}
+
+#[inline]
+fn as_days_from_unix_epoch(dt: Option<NaiveDateTime>) -> i32 {
+    dt.unwrap().num_days_from_ce() - DAYS_TO_UNIX_EPOCH
+}
+
+// Apply the Tz to the Naive Date Time,,convert to UTC, and return as 
microseconds in Unix epoch
+#[inline]
+fn as_micros_from_unix_epoch_utc(dt: Option<DateTime<Tz>>) -> i64 {
+    dt.unwrap().with_timezone(&Utc).timestamp_micros()
+}
+
+#[inline]
+fn trunc_date_to_year<T: Datelike + Timelike>(dt: T) -> Option<T> {
+    Some(dt)
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+        .and_then(|d| d.with_minute(0))
+        .and_then(|d| d.with_hour(0))
+        .and_then(|d| d.with_day0(0))
+        .and_then(|d| d.with_month0(0))
+}
+
+/// returns the month of the beginning of the quarter
+#[inline]
+fn quarter_month<T: Datelike>(dt: &T) -> u32 {
+    1 + 3 * ((dt.month() - 1) / 3)
+}
+
+#[inline]
+fn trunc_date_to_quarter<T: Datelike + Timelike>(dt: T) -> Option<T> {
+    Some(dt)
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+        .and_then(|d| d.with_minute(0))
+        .and_then(|d| d.with_hour(0))
+        .and_then(|d| d.with_day0(0))
+        .and_then(|d| d.with_month(quarter_month(&d)))
+}
+
+#[inline]
+fn trunc_date_to_month<T: Datelike + Timelike>(dt: T) -> Option<T> {
+    Some(dt)
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+        .and_then(|d| d.with_minute(0))
+        .and_then(|d| d.with_hour(0))
+        .and_then(|d| d.with_day0(0))
+}
+
+#[inline]
+fn trunc_date_to_week<T>(dt: T) -> Option<T>
+where
+    T: Datelike + Timelike + std::ops::Sub<Duration, Output = T> + Copy,
+{
+    Some(dt)
+        .map(|d| d - Duration::seconds(60 * 60 * 24 * d.weekday() as i64))
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+        .and_then(|d| d.with_minute(0))
+        .and_then(|d| d.with_hour(0))
+}
+
+#[inline]
+fn trunc_date_to_day<T: Timelike>(dt: T) -> Option<T> {
+    Some(dt)
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+        .and_then(|d| d.with_minute(0))
+        .and_then(|d| d.with_hour(0))
+}
+
+#[inline]
+fn trunc_date_to_hour<T: Timelike>(dt: T) -> Option<T> {
+    Some(dt)
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+        .and_then(|d| d.with_minute(0))
+}
+
+#[inline]
+fn trunc_date_to_minute<T: Timelike>(dt: T) -> Option<T> {
+    Some(dt)
+        .and_then(|d| d.with_nanosecond(0))
+        .and_then(|d| d.with_second(0))
+}
+
+#[inline]
+fn trunc_date_to_second<T: Timelike>(dt: T) -> Option<T> {
+    Some(dt).and_then(|d| d.with_nanosecond(0))
+}
+
+#[inline]
+fn trunc_date_to_ms<T: Timelike>(dt: T) -> Option<T> {
+    Some(dt).and_then(|d| d.with_nanosecond(1_000_000 * (d.nanosecond() / 
1_000_000)))
+}
+
+#[inline]
+fn trunc_date_to_microsec<T: Timelike>(dt: T) -> Option<T> {
+    Some(dt).and_then(|d| d.with_nanosecond(1_000 * (d.nanosecond() / 1_000)))
+}
+
+pub fn date_trunc_dyn(array: &dyn Array, format: String) -> Result<ArrayRef, 
ExpressionError> {

Review Comment:
   FWIW over time I hope we can move most functions like `date_trunc` out of 
DataFusion's core and potentially have versions like this with spark compatible 
behavior available for others to use and help maintain



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to