gparai commented on a change in pull request #729: Drill 1328: Support table 
statistics for Parquet
URL: https://github.com/apache/drill/pull/729#discussion_r257074535
 
 

 ##########
 File path: 
exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StatisticsAggrFunctions.java
 ##########
 @@ -0,0 +1,5516 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This class is automatically generated from AggrTypeFunctions2.tdd using 
FreeMarker.
+ */
+
+package org.apache.drill.exec.expr.fn.impl;
+
+import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
+import org.apache.drill.exec.expr.DrillAggFunc;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
+import org.apache.drill.exec.expr.holders.DateHolder;
+import org.apache.drill.exec.expr.holders.Decimal18Holder;
+import org.apache.drill.exec.expr.holders.Decimal28DenseHolder;
+import org.apache.drill.exec.expr.holders.Decimal28SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal38DenseHolder;
+import org.apache.drill.exec.expr.holders.Decimal38SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal9Holder;
+import org.apache.drill.exec.expr.holders.Float4Holder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
+import org.apache.drill.exec.expr.holders.IntHolder;
+import org.apache.drill.exec.expr.holders.IntervalHolder;
+import org.apache.drill.exec.expr.holders.NullableBigIntHolder;
+import org.apache.drill.exec.expr.holders.NullableBitHolder;
+import org.apache.drill.exec.expr.holders.NullableDateHolder;
+import org.apache.drill.exec.expr.holders.NullableDecimal18Holder;
+import org.apache.drill.exec.expr.holders.NullableDecimal28DenseHolder;
+import org.apache.drill.exec.expr.holders.NullableDecimal28SparseHolder;
+import org.apache.drill.exec.expr.holders.NullableDecimal38DenseHolder;
+import org.apache.drill.exec.expr.holders.NullableDecimal38SparseHolder;
+import org.apache.drill.exec.expr.holders.NullableDecimal9Holder;
+import org.apache.drill.exec.expr.holders.NullableFloat4Holder;
+import org.apache.drill.exec.expr.holders.NullableFloat8Holder;
+import org.apache.drill.exec.expr.holders.NullableIntHolder;
+import org.apache.drill.exec.expr.holders.NullableIntervalHolder;
+import org.apache.drill.exec.expr.holders.NullableTimeHolder;
+import org.apache.drill.exec.expr.holders.NullableTimeStampHolder;
+import org.apache.drill.exec.expr.holders.NullableVar16CharHolder;
+import org.apache.drill.exec.expr.holders.NullableVarBinaryHolder;
+import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.ObjectHolder;
+import org.apache.drill.exec.expr.holders.TimeHolder;
+import org.apache.drill.exec.expr.holders.TimeStampHolder;
+import org.apache.drill.exec.expr.holders.Var16CharHolder;
+import org.apache.drill.exec.expr.holders.VarBinaryHolder;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+@SuppressWarnings("unused")
+public class StatisticsAggrFunctions {
+  static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(StatisticsAggrFunctions.class);
+
+  /* IMPORTANT NOTE: Please make sure to create a new function for each 
datatype. See the examples below.
+  * This will result in more performant generated code. Use 
switch-case/if-else statements judiciously
+  * as it MAY cause the generated code to slow down considerably.
+  * */
+  @FunctionTemplate(name = "rowcount", scope = 
FunctionTemplate.FunctionScope.POINT_AGGREGATE)
+  public static class RowCount implements DrillAggFunc {
+    @Param FieldReader in;
+    @Workspace BigIntHolder count;
+    @Output NullableBigIntHolder out;
+
+    @Override
+    public void setup() {
+      count = new BigIntHolder();
+    }
+
+    @Override
+    public void add() {
+      count.value++;
+    }
+
+    @Override
+    public void output() {
+      out.isSet = 1;
+      out.value = count.value;
+    }
+
+    @Override
+    public void reset() {
+      count.value = 0;
+    }
+  }
+
+  @FunctionTemplate(name = "nonnullrowcount", scope = 
FunctionTemplate.FunctionScope.POINT_AGGREGATE)
+  public static class NonNullRowCount implements DrillAggFunc {
+    @Param FieldReader in;
+    @Workspace BigIntHolder count;
+    @Output NullableBigIntHolder out;
+
+    @Override
+    public void setup() {
+      count = new BigIntHolder();
+    }
+
+    @Override
+    public void add() {
+      if (in.isSet()) {
+        count.value++;
+      }
+    }
+
+    @Override
+    public void output() {
+      out.isSet = 1;
+      out.value = count.value;
+    }
+
+    @Override
+    public void reset() {
+      count.value = 0;
+    }
+  }
+
+  /**
+   * The log2m parameter defines the accuracy of the counter.  The larger the
+   * log2m the better the accuracy.
+   * accuracy = 1.04/sqrt(2^log2m)
+   * where
+   * log2m - the number of bits to use as the basis for the HLL instance
+   */
+  @FunctionTemplate(name = "hll", scope = 
FunctionTemplate.FunctionScope.POINT_AGGREGATE)
+  public static class HllFieldReader implements DrillAggFunc {
+    @Param FieldReader in;
+    @Workspace ObjectHolder work;
+    @Output NullableVarBinaryHolder out;
+    @Inject OptionManager options;
+    @Inject DrillBuf buffer;
+    @Workspace IntHolder hllAccuracy;
+
+    @Override
+    public void setup() {
+      work = new ObjectHolder();
+      hllAccuracy.value = (int) 
options.getLong(org.apache.drill.exec.ExecConstants.HLL_ACCURACY);
+      work.obj = new 
com.clearspring.analytics.stream.cardinality.HyperLogLog(hllAccuracy.value);
+    }
+
+    @Override
+    public void add() {
+      if (work.obj != null) {
+        com.clearspring.analytics.stream.cardinality.HyperLogLog hll =
+            (com.clearspring.analytics.stream.cardinality.HyperLogLog) 
work.obj;
+        int mode = in.getType().getMode().getNumber();
+        int type = in.getType().getMinorType().getNumber();
+
+        switch (mode) {
+          case 
org.apache.drill.common.types.TypeProtos.DataMode.OPTIONAL_VALUE:
+            if (!in.isSet()) {
+              hll.offer(null);
+              break;
+            }
+            // fall through //
+          case 
org.apache.drill.common.types.TypeProtos.DataMode.REQUIRED_VALUE:
+            switch (type) {
+              case 
org.apache.drill.common.types.TypeProtos.MinorType.VARCHAR_VALUE:
+                hll.offer(in.readText().toString());
+                break;
+              case 
org.apache.drill.common.types.TypeProtos.MinorType.BIGINT_VALUE:
+                hll.offer(in.readLong());
+                break;
+              default:
+                work.obj = null;
+            }
+            break;
+          default:
+            work.obj = null;
+        }
+      }
+    }
+
+    @Override
+    public void output() {
+      if (work.obj != null) {
+        com.clearspring.analytics.stream.cardinality.HyperLogLog hll =
+            (com.clearspring.analytics.stream.cardinality.HyperLogLog) 
work.obj;
+
+        try {
+          byte[] ba = hll.getBytes();
+          out.buffer = buffer.reallocIfNeeded(ba.length);
+          out.start = 0;
+          out.end = ba.length;
+          out.buffer.setBytes(0, ba);
+          out.isSet = 1;
+        } catch (java.io.IOException e) {
+          throw new 
org.apache.drill.common.exceptions.DrillRuntimeException("Failed to get 
HyperLogLog output", e);
+        }
+      } else {
+        out.isSet = 0;
+      }
+    }
+
+    @Override
+    public void reset() {
+      work.obj = new 
com.clearspring.analytics.stream.cardinality.HyperLogLog(hllAccuracy.value);
+    }
+  }
+
+  @FunctionTemplate(name = "ndv", scope = 
FunctionTemplate.FunctionScope.POINT_AGGREGATE)
 
 Review comment:
   Yes, you are right. I forgot that we agreed to only use parallel stats so 
NDV will not be used. By HLL functions, I meant HLL_MERGE & HLL_DECODE 
functions. The HLL functions are used in the first phase.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to