This is an automated email from the ASF dual-hosted git repository.

jackie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/pinot.git


The following commit(s) were added to refs/heads/master by this push:
     new b345357  Add support for IS NULL and NOT IS NULL in transform 
functions (#8264)
b345357 is described below

commit b345357b2d1c8aad47c39e698c5cd32353430ead
Author: Kartik Khare <[email protected]>
AuthorDate: Wed Mar 23 00:07:54 2022 +0530

    Add support for IS NULL and NOT IS NULL in transform functions (#8264)
---
 .../common/function/TransformFunctionType.java     |   3 +
 .../function/IdentifierTransformFunction.java      |   4 +
 .../function/IsNotNullTransformFunction.java       |  92 ++++++++
 .../function/IsNullTransformFunction.java          |  91 ++++++++
 .../function/TransformFunctionFactory.java         |  13 +-
 .../NullHandlingTransformFunctionTest.java         | 235 +++++++++++++++++++++
 .../tests/NullHandlingIntegrationTest.java         |  14 ++
 7 files changed, 447 insertions(+), 5 deletions(-)

diff --git 
a/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
 
b/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
index 42f948f..b0955cf 100644
--- 
a/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
+++ 
b/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
@@ -56,6 +56,9 @@ public enum TransformFunctionType {
   LESS_THAN_OR_EQUAL("less_than_or_equal"),
   IN("in"),
 
+  IS_NULL("is_null"),
+  IS_NOT_NULL("is_not_null"),
+
   AND("and"),
   OR("or"),
 
diff --git 
a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IdentifierTransformFunction.java
 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IdentifierTransformFunction.java
index eb48436..11db593 100644
--- 
a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IdentifierTransformFunction.java
+++ 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IdentifierTransformFunction.java
@@ -45,6 +45,10 @@ public class IdentifierTransformFunction implements 
TransformFunction, PushDownT
         _dictionary != null);
   }
 
+  public String getColumnName() {
+    return _columnName;
+  }
+
   @Override
   public String getName() {
     throw new UnsupportedOperationException();
diff --git 
a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IsNotNullTransformFunction.java
 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IsNotNullTransformFunction.java
new file mode 100644
index 0000000..0a5a902
--- /dev/null
+++ 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IsNotNullTransformFunction.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.operator.transform.function;
+
+import com.google.common.base.Preconditions;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import org.apache.pinot.common.function.TransformFunctionType;
+import org.apache.pinot.core.operator.blocks.ProjectionBlock;
+import org.apache.pinot.core.operator.transform.TransformResultMetadata;
+import org.apache.pinot.segment.spi.datasource.DataSource;
+import org.apache.pinot.segment.spi.index.reader.NullValueVectorReader;
+import org.roaringbitmap.PeekableIntIterator;
+
+
+public class IsNotNullTransformFunction extends BaseTransformFunction {
+
+  private int[] _results;
+  private PeekableIntIterator _nullValueVectorIterator;
+
+  @Override
+  public String getName() {
+    return TransformFunctionType.IS_NOT_NULL.getName();
+  }
+
+  @Override
+  public void init(List<TransformFunction> arguments, Map<String, DataSource> 
dataSourceMap) {
+    Preconditions.checkArgument(arguments.size() == 1,
+        "Exact 1 argument is required for IS_NOT_NULL operator function");
+    TransformFunction transformFunction = arguments.get(0);
+    if (!(transformFunction instanceof IdentifierTransformFunction)) {
+      throw new IllegalArgumentException(
+          "Only column names are supported in IS_NOT_NULL. Support for 
functions is planned for future release");
+    }
+    String columnName = ((IdentifierTransformFunction) 
transformFunction).getColumnName();
+    NullValueVectorReader nullValueVectorReader = 
dataSourceMap.get(columnName).getNullValueVector();
+    if (nullValueVectorReader != null) {
+      _nullValueVectorIterator = 
nullValueVectorReader.getNullBitmap().getIntIterator();
+    } else {
+      _nullValueVectorIterator = null;
+    }
+  }
+
+  @Override
+  public TransformResultMetadata getResultMetadata() {
+    return BOOLEAN_SV_NO_DICTIONARY_METADATA;
+  }
+
+  @Override
+  public int[] transformToIntValuesSV(ProjectionBlock projectionBlock) {
+    int length = projectionBlock.getNumDocs();
+    if (_results == null || _results.length < length) {
+      _results = new int[length];
+    }
+
+    int[] docIds = projectionBlock.getDocIds();
+
+    Arrays.fill(_results, 1);
+    if (_nullValueVectorIterator != null) {
+      int currentDocIdIndex = 0;
+      while (_nullValueVectorIterator.hasNext() & currentDocIdIndex < length) {
+        _nullValueVectorIterator.advanceIfNeeded(docIds[currentDocIdIndex]);
+        currentDocIdIndex = Arrays.binarySearch(docIds, currentDocIdIndex, 
length, _nullValueVectorIterator.next());
+        if (currentDocIdIndex >= 0) {
+          _results[currentDocIdIndex] = 0;
+          currentDocIdIndex++;
+        } else {
+          currentDocIdIndex = -currentDocIdIndex - 1;
+        }
+      }
+    }
+
+    return _results;
+  }
+}
diff --git 
a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IsNullTransformFunction.java
 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IsNullTransformFunction.java
new file mode 100644
index 0000000..16dcc3f
--- /dev/null
+++ 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/IsNullTransformFunction.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.operator.transform.function;
+
+import com.google.common.base.Preconditions;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import org.apache.pinot.common.function.TransformFunctionType;
+import org.apache.pinot.core.operator.blocks.ProjectionBlock;
+import org.apache.pinot.core.operator.transform.TransformResultMetadata;
+import org.apache.pinot.segment.spi.datasource.DataSource;
+import org.apache.pinot.segment.spi.index.reader.NullValueVectorReader;
+import org.roaringbitmap.PeekableIntIterator;
+
+
+public class IsNullTransformFunction extends BaseTransformFunction {
+
+  private int[] _results;
+  private PeekableIntIterator _nullValueVectorIterator;
+
+  @Override
+  public String getName() {
+    return TransformFunctionType.IS_NULL.getName();
+  }
+
+  @Override
+  public void init(List<TransformFunction> arguments, Map<String, DataSource> 
dataSourceMap) {
+    Preconditions.checkArgument(arguments.size() == 1, "Exact 1 argument is 
required for IS_NULL operator function");
+    TransformFunction transformFunction = arguments.get(0);
+    if (!(transformFunction instanceof IdentifierTransformFunction)) {
+      throw new IllegalArgumentException(
+          "Only column names are supported in IS_NULL. Support for functions 
is planned for future release");
+    }
+    String columnName = ((IdentifierTransformFunction) 
transformFunction).getColumnName();
+    NullValueVectorReader nullValueVectorReader = 
dataSourceMap.get(columnName).getNullValueVector();
+    if (nullValueVectorReader != null) {
+      _nullValueVectorIterator = 
nullValueVectorReader.getNullBitmap().getIntIterator();
+    } else {
+      _nullValueVectorIterator = null;
+    }
+  }
+
+  @Override
+  public TransformResultMetadata getResultMetadata() {
+    return BOOLEAN_SV_NO_DICTIONARY_METADATA;
+  }
+
+  @Override
+  public int[] transformToIntValuesSV(ProjectionBlock projectionBlock) {
+    int length = projectionBlock.getNumDocs();
+    if (_results == null || _results.length < length) {
+      _results = new int[length];
+    }
+
+    int[] docIds = projectionBlock.getDocIds();
+
+    Arrays.fill(_results, 0);
+    if (_nullValueVectorIterator != null) {
+      int currentDocIdIndex = 0;
+      while (_nullValueVectorIterator.hasNext() & currentDocIdIndex < length) {
+        _nullValueVectorIterator.advanceIfNeeded(docIds[currentDocIdIndex]);
+        currentDocIdIndex = Arrays.binarySearch(docIds, currentDocIdIndex, 
length, _nullValueVectorIterator.next());
+        if (currentDocIdIndex >= 0) {
+          _results[currentDocIdIndex] = 1;
+          currentDocIdIndex++;
+        } else {
+          currentDocIdIndex = -currentDocIdIndex - 1;
+        }
+      }
+    }
+
+    return _results;
+  }
+}
diff --git 
a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/TransformFunctionFactory.java
 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/TransformFunctionFactory.java
index b518400..000879f 100644
--- 
a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/TransformFunctionFactory.java
+++ 
b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/TransformFunctionFactory.java
@@ -165,6 +165,11 @@ public class TransformFunctionFactory {
           // tuple selection
           
put(canonicalize(TransformFunctionType.LEAST.getName().toLowerCase()), 
LeastTransformFunction.class);
           
put(canonicalize(TransformFunctionType.GREATEST.getName().toLowerCase()), 
GreatestTransformFunction.class);
+
+          // null handling
+          
put(canonicalize(TransformFunctionType.IS_NULL.getName().toLowerCase()), 
IsNullTransformFunction.class);
+          
put(canonicalize(TransformFunctionType.IS_NOT_NULL.getName().toLowerCase()),
+              IsNotNullTransformFunction.class);
         }
       };
 
@@ -235,10 +240,9 @@ public class TransformFunctionFactory {
           if (functionInfo == null) {
             if (FunctionRegistry.containsFunction(functionName)) {
               throw new BadQueryRequestException(
-                String.format("Unsupported function: %s with %d parameters", 
functionName, numArguments));
+                  String.format("Unsupported function: %s with %d parameters", 
functionName, numArguments));
             } else {
-              throw new BadQueryRequestException(
-                String.format("Unsupported function: %s not found", 
functionName));
+              throw new BadQueryRequestException(String.format("Unsupported 
function: %s not found", functionName));
             }
           }
           transformFunction = new ScalarTransformFunctionWrapper(functionInfo);
@@ -259,8 +263,7 @@ public class TransformFunctionFactory {
         String columnName = expression.getIdentifier();
         return new IdentifierTransformFunction(columnName, 
dataSourceMap.get(columnName));
       case LITERAL:
-        return queryContext == null
-            ? new LiteralTransformFunction(expression.getLiteral())
+        return queryContext == null ? new 
LiteralTransformFunction(expression.getLiteral())
             : 
queryContext.getOrComputeSharedValue(LiteralTransformFunction.class, 
expression.getLiteral(),
                 LiteralTransformFunction::new);
       default:
diff --git 
a/pinot-core/src/test/java/org/apache/pinot/core/operator/transform/function/NullHandlingTransformFunctionTest.java
 
b/pinot-core/src/test/java/org/apache/pinot/core/operator/transform/function/NullHandlingTransformFunctionTest.java
new file mode 100644
index 0000000..358209f
--- /dev/null
+++ 
b/pinot-core/src/test/java/org/apache/pinot/core/operator/transform/function/NullHandlingTransformFunctionTest.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.operator.transform.function;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.pinot.common.function.TransformFunctionType;
+import org.apache.pinot.common.request.context.ExpressionContext;
+import org.apache.pinot.common.request.context.RequestContextUtils;
+import org.apache.pinot.core.operator.DocIdSetOperator;
+import org.apache.pinot.core.operator.ProjectionOperator;
+import org.apache.pinot.core.operator.blocks.ProjectionBlock;
+import org.apache.pinot.core.operator.filter.MatchAllFilterOperator;
+import org.apache.pinot.core.plan.DocIdSetPlanNode;
+import 
org.apache.pinot.segment.local.indexsegment.immutable.ImmutableSegmentLoader;
+import 
org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
+import org.apache.pinot.segment.local.segment.readers.GenericRowRecordReader;
+import org.apache.pinot.segment.spi.IndexSegment;
+import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
+import org.apache.pinot.segment.spi.datasource.DataSource;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.config.table.TableType;
+import org.apache.pinot.spi.data.FieldSpec;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.TimeGranularitySpec;
+import org.apache.pinot.spi.data.readers.GenericRow;
+import org.apache.pinot.spi.utils.ReadMode;
+import org.apache.pinot.spi.utils.builder.TableConfigBuilder;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+
+public class NullHandlingTransformFunctionTest {
+  private static final String SEGMENT_NAME = "testSegmentWithNulls";
+  private static final String INDEX_DIR_PATH = 
FileUtils.getTempDirectoryPath() + File.separator + SEGMENT_NAME;
+  private static final Random RANDOM = new Random();
+
+  protected static final int NUM_ROWS = 1000;
+  protected static final String INT_SV_COLUMN = "intSV";
+  protected static final String LONG_SV_COLUMN = "longSV";
+  protected static final String FLOAT_SV_COLUMN = "floatSV";
+  protected static final String DOUBLE_SV_COLUMN = "doubleSV";
+  protected static final String STRING_SV_COLUMN = "stringSV";
+  protected static final String BYTES_SV_COLUMN = "bytesSV";
+
+  protected static final String TIMESTAMP_COLUMN = "timestampColumn";
+  protected static final String TIME_COLUMN = "timeColumn";
+  protected final long[] _timeValues = new long[NUM_ROWS];
+
+  protected final int[] _intSVValues = new int[NUM_ROWS];
+  protected final long[] _longSVValues = new long[NUM_ROWS];
+  protected final float[] _floatSVValues = new float[NUM_ROWS];
+  protected final double[] _doubleSVValues = new double[NUM_ROWS];
+  protected final String[] _stringSVValues = new String[NUM_ROWS];
+  protected final byte[][] _bytesSVValues = new byte[NUM_ROWS][];
+
+  protected Map<String, DataSource> _dataSourceMap;
+  protected ProjectionBlock _projectionBlock;
+  protected static final int NULL_VALUE_MOD = 10;
+
+  @BeforeClass
+  public void setup()
+      throws Exception {
+    FileUtils.deleteQuietly(new File(INDEX_DIR_PATH));
+    DecimalFormat df = new DecimalFormat("0", 
DecimalFormatSymbols.getInstance(Locale.ENGLISH));
+    df.setMaximumFractionDigits(340); // 340 = 
DecimalFormat.DOUBLE_FRACTION_DIGITS
+    long currentTimeMs = System.currentTimeMillis();
+    for (int i = 0; i < NUM_ROWS; i++) {
+      _intSVValues[i] = RANDOM.nextInt();
+      _longSVValues[i] = RANDOM.nextLong();
+      _floatSVValues[i] = _intSVValues[i] * RANDOM.nextFloat();
+      _doubleSVValues[i] = _intSVValues[i] * RANDOM.nextDouble();
+      _stringSVValues[i] = df.format(_intSVValues[i] * RANDOM.nextDouble());
+      _bytesSVValues[i] = RandomStringUtils.randomAlphanumeric(26).getBytes();
+
+      _timeValues[i] = currentTimeMs - RANDOM.nextInt(365 * 24 * 3600) * 1000L;
+    }
+
+    List<GenericRow> rows = new ArrayList<>(NUM_ROWS);
+    for (int i = 0; i < NUM_ROWS; i++) {
+      Map<String, Object> map = new HashMap<>();
+      if (i % NULL_VALUE_MOD != 0) {
+        map.put(INT_SV_COLUMN, _intSVValues[i]);
+        map.put(LONG_SV_COLUMN, _longSVValues[i]);
+        map.put(FLOAT_SV_COLUMN, _floatSVValues[i]);
+        map.put(DOUBLE_SV_COLUMN, _doubleSVValues[i]);
+        map.put(STRING_SV_COLUMN, _stringSVValues[i]);
+        map.put(BYTES_SV_COLUMN, _bytesSVValues[i]);
+      } else {
+        map.put(INT_SV_COLUMN, null);
+        map.put(LONG_SV_COLUMN, null);
+        map.put(FLOAT_SV_COLUMN, null);
+        map.put(DOUBLE_SV_COLUMN, null);
+        map.put(STRING_SV_COLUMN, null);
+        map.put(BYTES_SV_COLUMN, null);
+      }
+      map.put(TIMESTAMP_COLUMN, _timeValues[i]);
+      map.put(TIME_COLUMN, _timeValues[i]);
+      GenericRow row = new GenericRow();
+      row.init(map);
+      rows.add(row);
+    }
+
+    Schema schema = new 
Schema.SchemaBuilder().addSingleValueDimension(INT_SV_COLUMN, 
FieldSpec.DataType.INT)
+        .addSingleValueDimension(LONG_SV_COLUMN, FieldSpec.DataType.LONG)
+        .addSingleValueDimension(FLOAT_SV_COLUMN, FieldSpec.DataType.FLOAT)
+        .addSingleValueDimension(DOUBLE_SV_COLUMN, FieldSpec.DataType.DOUBLE)
+        .addSingleValueDimension(STRING_SV_COLUMN, FieldSpec.DataType.STRING)
+        .addSingleValueDimension(BYTES_SV_COLUMN, FieldSpec.DataType.BYTES)
+        .addDateTime(TIMESTAMP_COLUMN, FieldSpec.DataType.TIMESTAMP, 
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS")
+        .addTime(new TimeGranularitySpec(FieldSpec.DataType.LONG, 
TimeUnit.MILLISECONDS, TIME_COLUMN), null).build();
+    TableConfig tableConfig =
+        new 
TableConfigBuilder(TableType.OFFLINE).setTableName("testWithNulls").setNullHandlingEnabled(true)
+            .setTimeColumnName(TIME_COLUMN).build();
+
+    SegmentGeneratorConfig config = new SegmentGeneratorConfig(tableConfig, 
schema);
+    config.setOutDir(INDEX_DIR_PATH);
+    config.setSegmentName(SEGMENT_NAME);
+    SegmentIndexCreationDriverImpl driver = new 
SegmentIndexCreationDriverImpl();
+    driver.init(config, new GenericRowRecordReader(rows));
+    driver.build();
+
+    IndexSegment indexSegment = ImmutableSegmentLoader.load(new 
File(INDEX_DIR_PATH, SEGMENT_NAME), ReadMode.heap);
+    Set<String> columnNames = indexSegment.getPhysicalColumnNames();
+    _dataSourceMap = new HashMap<>(columnNames.size());
+    for (String columnName : columnNames) {
+      _dataSourceMap.put(columnName, indexSegment.getDataSource(columnName));
+    }
+
+    _projectionBlock = new ProjectionOperator(_dataSourceMap,
+        new DocIdSetOperator(new MatchAllFilterOperator(NUM_ROWS), 
DocIdSetPlanNode.MAX_DOC_PER_CALL)).nextBlock();
+  }
+
+  @Test
+  public void testIsNullTransformFunction()
+      throws Exception {
+    testIsNullTransformFunction(INT_SV_COLUMN);
+    testIsNullTransformFunction(LONG_SV_COLUMN);
+    testIsNullTransformFunction(FLOAT_SV_COLUMN);
+    testIsNullTransformFunction(DOUBLE_SV_COLUMN);
+    testIsNullTransformFunction(STRING_SV_COLUMN);
+    testIsNullTransformFunction(BYTES_SV_COLUMN);
+  }
+
+  public void testIsNullTransformFunction(String columnName)
+      throws Exception {
+    ExpressionContext expression = 
RequestContextUtils.getExpressionFromSQL(String.format("%s IS NULL", 
columnName));
+    TransformFunction transformFunction = 
TransformFunctionFactory.get(expression, _dataSourceMap);
+    Assert.assertTrue(transformFunction instanceof IsNullTransformFunction);
+    Assert.assertEquals(transformFunction.getName(), 
TransformFunctionType.IS_NULL.getName());
+    int[] expectedValues = new int[NUM_ROWS];
+    for (int i = 0; i < NUM_ROWS; i++) {
+      if (i % NULL_VALUE_MOD == 0) {
+        expectedValues[i] = 1;
+      }
+    }
+    testTransformFunction(expression, expectedValues);
+  }
+
+  @Test
+  public void testIsNotNullTransformFunction()
+      throws Exception {
+    testIsNotNullTransformFunction(INT_SV_COLUMN);
+    testIsNotNullTransformFunction(LONG_SV_COLUMN);
+    testIsNotNullTransformFunction(FLOAT_SV_COLUMN);
+    testIsNotNullTransformFunction(DOUBLE_SV_COLUMN);
+    testIsNotNullTransformFunction(STRING_SV_COLUMN);
+    testIsNotNullTransformFunction(BYTES_SV_COLUMN);
+  }
+
+  public void testIsNotNullTransformFunction(String columnName)
+      throws Exception {
+    ExpressionContext expression =
+        RequestContextUtils.getExpressionFromSQL(String.format("%s IS NOT 
NULL", columnName));
+    TransformFunction transformFunction = 
TransformFunctionFactory.get(expression, _dataSourceMap);
+    Assert.assertTrue(transformFunction instanceof IsNotNullTransformFunction);
+    Assert.assertEquals(transformFunction.getName(), 
TransformFunctionType.IS_NOT_NULL.getName());
+    int[] expectedValues = new int[NUM_ROWS];
+    Arrays.fill(expectedValues, 1);
+    for (int i = 0; i < NUM_ROWS; i++) {
+      if (i % NULL_VALUE_MOD == 0) {
+        expectedValues[i] = 0;
+      }
+    }
+    testTransformFunction(expression, expectedValues);
+  }
+
+  protected void testTransformFunction(ExpressionContext expression, int[] 
expectedValues) throws Exception {
+    int[] intValues = 
getTransformFunctionInstance(expression).transformToIntValuesSV(_projectionBlock);
+    long[] longValues = 
getTransformFunctionInstance(expression).transformToLongValuesSV(_projectionBlock);
+    float[] floatValues = 
getTransformFunctionInstance(expression).transformToFloatValuesSV(_projectionBlock);
+    double[] doubleValues = 
getTransformFunctionInstance(expression).transformToDoubleValuesSV(_projectionBlock);
+    String[] stringValues = 
getTransformFunctionInstance(expression).transformToStringValuesSV(_projectionBlock);
+    for (int i = 0; i < NUM_ROWS; i++) {
+      Assert.assertEquals(intValues[i], expectedValues[i]);
+      Assert.assertEquals(longValues[i], expectedValues[i]);
+      Assert.assertEquals(floatValues[i], (float) expectedValues[i]);
+      Assert.assertEquals(doubleValues[i], (double) expectedValues[i]);
+      Assert.assertEquals(stringValues[i], 
Integer.toString(expectedValues[i]));
+    }
+  }
+
+  private TransformFunction getTransformFunctionInstance(ExpressionContext 
expression) {
+    return TransformFunctionFactory.get(expression, _dataSourceMap);
+  }
+}
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/NullHandlingIntegrationTest.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/NullHandlingIntegrationTest.java
index 77e1181..27d3cf2 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/NullHandlingIntegrationTest.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/NullHandlingIntegrationTest.java
@@ -155,4 +155,18 @@ public class NullHandlingIntegrationTest extends 
BaseClusterIntegrationTestSet {
     String query = "SELECT count(*) FROM " + getTableName() + " where 
description IS NOT NULL AND salary IS NOT NULL";
     testQuery(query, Collections.singletonList(query));
   }
+
+  @Test
+  public void testCaseWithNullSalary()
+      throws Exception {
+    String query = "SELECT CASE WHEN salary IS NULL THEN 1 ELSE 0 END FROM " + 
getTableName();
+    testSqlQuery(query, Collections.singletonList(query));
+  }
+
+  @Test
+  public void testCaseWithNotNullDescription()
+      throws Exception {
+    String query = "SELECT CASE WHEN description IS NOT NULL THEN 1 ELSE 0 END 
FROM " + getTableName();
+    testSqlQuery(query, Collections.singletonList(query));
+  }
 }

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to