jonathanc-n commented on code in PR #20460:
URL: https://github.com/apache/datafusion/pull/20460#discussion_r2839507105
##########
datafusion/functions-aggregate/src/count.rs:
##########
@@ -1038,4 +1212,82 @@ mod tests {
assert_eq!(merged.evaluate()?, ScalarValue::Int64(Some(3)));
Ok(())
}
+
+ #[test]
+ fn multi_column_accumulator_basic() -> Result<()> {
+ let mut acc = MultiColumnDistinctCountAccumulator::new(vec![
+ DataType::Int32,
+ DataType::Utf8,
+ ]);
+
+ // (1, a), (1, b), (1, a), (2, b), (3, b)
+ let col1 = Arc::new(Int32Array::from(vec![
+ Some(1),
+ Some(1),
+ Some(1),
+ Some(2),
+ Some(3),
+ ]));
+ let col2 = Arc::new(StringArray::from(vec![
+ Some("a"),
+ Some("b"),
+ Some("a"),
+ Some("b"),
+ Some("b"),
+ ])) as ArrayRef;
+
+ acc.update_batch(&[col1, col2])?;
+ // Expected (1, a), (1, b), (2, b), (3, b)
+ assert_eq!(acc.evaluate()?, ScalarValue::Int64(Some(4)));
+ Ok(())
+ }
+
+ #[test]
+ fn multi_column_accumulator_merge() -> Result<()> {
+ let mut acc1 = MultiColumnDistinctCountAccumulator::new(vec![
+ DataType::Int32,
+ DataType::Utf8,
+ ]);
+
+ // (1, a), (1, b)
+ let col1 = Arc::new(Int32Array::from(vec![Some(1), Some(1)]));
+ let col2 = Arc::new(StringArray::from(vec![Some("a"), Some("b")])) as
ArrayRef;
+
+ acc1.update_batch(&[col1, col2])?;
+
+ let mut acc2 = MultiColumnDistinctCountAccumulator::new(vec![
+ DataType::Int32,
+ DataType::Utf8,
+ ]);
+
+ // (1, a), (2, b), (3, b)
+ let col1 = Arc::new(Int32Array::from(vec![Some(1), Some(2), Some(3)]));
+ let col2 = Arc::new(StringArray::from(vec![Some("a"), Some("b"),
Some("b")]))
+ as ArrayRef;
+
+ acc2.update_batch(&[col1, col2])?;
+
+ let state_sv1 = acc1.state()?;
+ let state_sv2 = acc2.state()?;
+ let state_arr1: Vec<ArrayRef> = state_sv1
+ .into_iter()
+ .map(|sv| sv.to_array())
+ .collect::<Result<_>>()?;
+ let state_arr2: Vec<ArrayRef> = state_sv2
+ .into_iter()
+ .map(|sv| sv.to_array())
+ .collect::<Result<_>>()?;
+
+ let mut merged = MultiColumnDistinctCountAccumulator::new(vec![
+ DataType::Int32,
+ DataType::Utf8,
+ ]);
+ merged.merge_batch(&state_arr1)?;
+ merged.merge_batch(&state_arr2)?;
+
+ // Expected (1, a), (1, b), (1, a), (2, b), (3, b)
Review Comment:
this is not a correct comment, we only expect 4
##########
datafusion/functions-aggregate/src/count.rs:
##########
@@ -293,20 +293,37 @@ impl AggregateUDFImpl for Count {
fn state_fields(&self, args: StateFieldsArgs) -> Result<Vec<FieldRef>> {
if args.is_distinct {
- let dtype: DataType = match &args.input_fields[0].data_type() {
- DataType::Dictionary(_, values_type) =>
(**values_type).clone(),
- &dtype => dtype.clone(),
- };
+ if args.input_fields.len() > 1 {
+ Ok(args
+ .input_fields
+ .iter()
+ .map(|field| {
+ Arc::new(Field::new(
+ format_state_name(args.name, "count distinct"),
Review Comment:
same column names will look identical here. we should include original field
name or col index to differentiate
##########
datafusion/functions-aggregate/src/count.rs:
##########
@@ -841,6 +865,156 @@ impl Accumulator for DistinctCountAccumulator {
}
}
+#[derive(Debug)]
+struct MultiColumnDistinctCountAccumulator {
+ values: HashSet<Vec<ScalarValue>, RandomState>,
+ state_data_types: Vec<DataType>,
+}
+
+impl MultiColumnDistinctCountAccumulator {
+ fn new(state_data_types: Vec<DataType>) -> Self {
+ Self {
+ values: HashSet::default(),
+ state_data_types,
+ }
+ }
+
+ fn update(&mut self, values: &[ScalarValue]) -> Result<()> {
+ if !values.iter().any(|v| v.is_null()) {
+ self.values.insert(values.to_vec());
+ }
+ Ok(())
+ }
+
+ fn fixed_size(&self) -> usize {
+ std::mem::size_of_val(self)
Review Comment:
Lets import `use std::mem::size_of_val`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]