Github user ueshin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21258#discussion_r192535941
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
 ---
    @@ -235,6 +235,86 @@ case class CreateMap(children: Seq[Expression]) 
extends Expression {
       override def prettyName: String = "map"
     }
     
    +/**
    + * Returns a catalyst Map containing the two arrays in children 
expressions as keys and values.
    + */
    +@ExpressionDescription(
    +  usage = """
    +    _FUNC_(keys, values) - Creates a map with a pair of the given 
key/value arrays. All elements
    +      in keys should not be null""",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_([1.0, 3.0], ['2', '4']);
    +       {1.0:"2",3.0:"4"}
    +  """, since = "2.4.0")
    +case class CreateMapFromArrays(left: Expression, right: Expression)
    +    extends BinaryExpression with ExpectsInputTypes {
    +
    +  override def inputTypes: Seq[AbstractDataType] = Seq(ArrayType, 
ArrayType)
    +
    +  override def checkInputDataTypes(): TypeCheckResult = {
    +    (left.dataType, right.dataType) match {
    +      case (ArrayType(_, _), ArrayType(_, _)) =>
    +        TypeCheckResult.TypeCheckSuccess
    +      case _ =>
    +        TypeCheckResult.TypeCheckFailure("The given two arguments should 
be an array")
    +    }
    +  }
    +
    +  override def dataType: DataType = {
    +    MapType(
    +      keyType = left.dataType.asInstanceOf[ArrayType].elementType,
    +      valueType = right.dataType.asInstanceOf[ArrayType].elementType,
    +      valueContainsNull = 
right.dataType.asInstanceOf[ArrayType].containsNull)
    +  }
    +
    +  override def nullable: Boolean = left.nullable || right.nullable
    +
    +  override def nullSafeEval(keyArray: Any, valueArray: Any): Any = {
    +    val keyArrayData = keyArray.asInstanceOf[ArrayData]
    +    val valueArrayData = valueArray.asInstanceOf[ArrayData]
    +    if (keyArrayData.numElements != valueArrayData.numElements) {
    +      throw new RuntimeException("The given two arrays should have the 
same length")
    +    }
    +    val leftArrayType = left.dataType.asInstanceOf[ArrayType]
    +    if (leftArrayType.containsNull) {
    +      if (keyArrayData.toArray(leftArrayType.elementType).contains(null)) {
    +        throw new RuntimeException("Cannot use null as map key!")
    +      }
    +    }
    +    new ArrayBasedMapData(keyArrayData.copy(), valueArrayData.copy())
    +  }
    +
    +  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    nullSafeCodeGen(ctx, ev, (keyArrayData, valueArrayData) => {
    +      val arrayBasedMapData = classOf[ArrayBasedMapData].getName
    +      val leftArrayType = left.dataType.asInstanceOf[ArrayType]
    +      val keyArrayElemNullCheck = if (!leftArrayType.containsNull) "" else 
{
    +        val leftArrayTypeTerm = ctx.addReferenceObj("leftArrayType", 
leftArrayType.elementType)
    +        val array = ctx.freshName("array")
    +        val i = ctx.freshName("i")
    +        s"""
    +           |Object[] $array = 
$keyArrayData.toObjectArray($leftArrayTypeTerm);
    +           |for (int $i = 0; $i < $array.length; $i++) {
    +           |  if ($array[$i] == null) {
    +           |    throw new RuntimeException("Cannot use null as map key!");
    +           |  }
    +           |}
    --- End diff --
    
    We can null-check without converting to object array.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to