[SPARK-22893][SQL] Unified the data type mismatch message

## What changes were proposed in this pull request?

We should use `dataType.simpleString` to unified the data type mismatch message:
Before:
```
spark-sql> select cast(1 as binary);
Error in query: cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: 
cannot cast IntegerType to BinaryType; line 1 pos 7;
```
After:
```
park-sql> select cast(1 as binary);
Error in query: cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: 
cannot cast int to binary; line 1 pos 7;
```

## How was this patch tested?

Exist test.

Author: Yuming Wang <wgy...@gmail.com>

Closes #20064 from wangyum/SPARK-22893.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/33ae2437
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/33ae2437
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/33ae2437

Branch: refs/heads/master
Commit: 33ae2437ba4e634b510b0f96e914ad1ef4ccafd8
Parents: fba0313
Author: Yuming Wang <wgy...@gmail.com>
Authored: Mon Dec 25 01:14:09 2017 -0800
Committer: gatorsmile <gatorsm...@gmail.com>
Committed: Mon Dec 25 01:14:09 2017 -0800

----------------------------------------------------------------------
 .../spark/sql/catalyst/expressions/Cast.scala   |   2 +-
 .../aggregate/ApproximatePercentile.scala       |   4 +-
 .../expressions/conditionalExpressions.scala    |   3 +-
 .../sql/catalyst/expressions/generators.scala   |  10 +-
 .../sql/catalyst/expressions/predicates.scala   |   2 +-
 .../expressions/windowExpressions.scala         |   8 +-
 .../native/binaryComparison.sql.out             |  48 ++--
 .../typeCoercion/native/inConversion.sql.out    | 280 +++++++++----------
 .../native/windowFrameCoercion.sql.out          |   8 +-
 .../resources/sql-tests/results/window.sql.out  |   4 +-
 .../org/apache/spark/sql/DatasetSuite.scala     |   2 +-
 .../spark/sql/GeneratorFunctionSuite.scala      |   4 +-
 12 files changed, 189 insertions(+), 186 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 5279d41..274d881 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -181,7 +181,7 @@ case class Cast(child: Expression, dataType: DataType, 
timeZoneId: Option[String
       TypeCheckResult.TypeCheckSuccess
     } else {
       TypeCheckResult.TypeCheckFailure(
-        s"cannot cast ${child.dataType} to $dataType")
+        s"cannot cast ${child.dataType.simpleString} to 
${dataType.simpleString}")
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentile.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentile.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentile.scala
index 7facb9d..149ac26 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentile.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentile.scala
@@ -132,7 +132,7 @@ case class ApproximatePercentile(
         case TimestampType => value.asInstanceOf[Long].toDouble
         case n: NumericType => 
n.numeric.toDouble(value.asInstanceOf[n.InternalType])
         case other: DataType =>
-          throw new UnsupportedOperationException(s"Unexpected data type 
$other")
+          throw new UnsupportedOperationException(s"Unexpected data type 
${other.simpleString}")
       }
       buffer.add(doubleValue)
     }
@@ -157,7 +157,7 @@ case class ApproximatePercentile(
       case DoubleType => doubleResult
       case _: DecimalType => doubleResult.map(Decimal(_))
       case other: DataType =>
-        throw new UnsupportedOperationException(s"Unexpected data type $other")
+        throw new UnsupportedOperationException(s"Unexpected data type 
${other.simpleString}")
     }
     if (result.length == 0) {
       null

http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
index 142dfb0..b444c3a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
@@ -40,7 +40,8 @@ case class If(predicate: Expression, trueValue: Expression, 
falseValue: Expressi
   override def checkInputDataTypes(): TypeCheckResult = {
     if (predicate.dataType != BooleanType) {
       TypeCheckResult.TypeCheckFailure(
-        s"type of predicate expression in If should be boolean, not 
${predicate.dataType}")
+        "type of predicate expression in If should be boolean, " +
+          s"not ${predicate.dataType.simpleString}")
     } else if (!trueValue.dataType.sameType(falseValue.dataType)) {
       TypeCheckResult.TypeCheckFailure(s"differing types in '$sql' " +
         s"(${trueValue.dataType.simpleString} and 
${falseValue.dataType.simpleString}).")

http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index 69af7a2..4f4d491 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -155,8 +155,8 @@ case class Stack(children: Seq[Expression]) extends 
Generator {
         val j = (i - 1) % numFields
         if (children(i).dataType != elementSchema.fields(j).dataType) {
           return TypeCheckResult.TypeCheckFailure(
-            s"Argument ${j + 1} (${elementSchema.fields(j).dataType}) != " +
-              s"Argument $i (${children(i).dataType})")
+            s"Argument ${j + 1} 
(${elementSchema.fields(j).dataType.simpleString}) != " +
+              s"Argument $i (${children(i).dataType.simpleString})")
         }
       }
       TypeCheckResult.TypeCheckSuccess
@@ -249,7 +249,8 @@ abstract class ExplodeBase extends UnaryExpression with 
CollectionGenerator with
       TypeCheckResult.TypeCheckSuccess
     case _ =>
       TypeCheckResult.TypeCheckFailure(
-        s"input to function explode should be array or map type, not 
${child.dataType}")
+        "input to function explode should be array or map type, " +
+          s"not ${child.dataType.simpleString}")
   }
 
   // hive-compatible default alias for explode function ("col" for array, 
"key", "value" for map)
@@ -378,7 +379,8 @@ case class Inline(child: Expression) extends 
UnaryExpression with CollectionGene
       TypeCheckResult.TypeCheckSuccess
     case _ =>
       TypeCheckResult.TypeCheckFailure(
-        s"input to function $prettyName should be array of struct type, not 
${child.dataType}")
+        s"input to function $prettyName should be array of struct type, " +
+          s"not ${child.dataType.simpleString}")
   }
 
   override def elementSchema: StructType = child.dataType match {

http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index f4ee3d1..b469f5c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -195,7 +195,7 @@ case class In(value: Expression, list: Seq[Expression]) 
extends Predicate {
           }
         case _ =>
           TypeCheckResult.TypeCheckFailure(s"Arguments must be same type but 
were: " +
-            s"${value.dataType} != ${mismatchOpt.get.dataType}")
+            s"${value.dataType.simpleString} != 
${mismatchOpt.get.dataType.simpleString}")
       }
     } else {
       TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName")

http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
index 220cc4f..dd13d9a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
@@ -70,9 +70,9 @@ case class WindowSpecDefinition(
       case f: SpecifiedWindowFrame if f.frameType == RangeFrame && 
f.isValueBound &&
           !isValidFrameType(f.valueBoundary.head.dataType) =>
         TypeCheckFailure(
-          s"The data type '${orderSpec.head.dataType}' used in the order 
specification does " +
-            s"not match the data type '${f.valueBoundary.head.dataType}' which 
is used in the " +
-            "range frame.")
+          s"The data type '${orderSpec.head.dataType.simpleString}' used in 
the order " +
+            "specification does not match the data type " +
+            s"'${f.valueBoundary.head.dataType.simpleString}' which is used in 
the range frame.")
       case _ => TypeCheckSuccess
     }
   }
@@ -251,7 +251,7 @@ case class SpecifiedWindowFrame(
       TypeCheckFailure(s"Window frame $location bound '$e' is not a literal.")
     case e: Expression if !frameType.inputType.acceptsType(e.dataType) =>
       TypeCheckFailure(
-        s"The data type of the $location bound '${e.dataType}' does not match 
" +
+        s"The data type of the $location bound '${e.dataType.simpleString}' 
does not match " +
           s"the expected data type '${frameType.inputType.simpleString}'.")
     case _ => TypeCheckSuccess
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/33ae2437/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out
index fe7bde0..2914d60 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/binaryComparison.sql.out
@@ -16,7 +16,7 @@ SELECT cast(1 as binary) = '1' FROM t
 struct<>
 -- !query 1 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 2
@@ -25,7 +25,7 @@ SELECT cast(1 as binary) > '2' FROM t
 struct<>
 -- !query 2 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 3
@@ -34,7 +34,7 @@ SELECT cast(1 as binary) >= '2' FROM t
 struct<>
 -- !query 3 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 4
@@ -43,7 +43,7 @@ SELECT cast(1 as binary) < '2' FROM t
 struct<>
 -- !query 4 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 5
@@ -52,7 +52,7 @@ SELECT cast(1 as binary) <= '2' FROM t
 struct<>
 -- !query 5 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 6
@@ -61,7 +61,7 @@ SELECT cast(1 as binary) <> '2' FROM t
 struct<>
 -- !query 6 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 7
@@ -70,7 +70,7 @@ SELECT cast(1 as binary) = cast(null as string) FROM t
 struct<>
 -- !query 7 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 8
@@ -79,7 +79,7 @@ SELECT cast(1 as binary) > cast(null as string) FROM t
 struct<>
 -- !query 8 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 9
@@ -88,7 +88,7 @@ SELECT cast(1 as binary) >= cast(null as string) FROM t
 struct<>
 -- !query 9 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 10
@@ -97,7 +97,7 @@ SELECT cast(1 as binary) < cast(null as string) FROM t
 struct<>
 -- !query 10 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 11
@@ -106,7 +106,7 @@ SELECT cast(1 as binary) <= cast(null as string) FROM t
 struct<>
 -- !query 11 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 12
@@ -115,7 +115,7 @@ SELECT cast(1 as binary) <> cast(null as string) FROM t
 struct<>
 -- !query 12 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 7
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 7
 
 
 -- !query 13
@@ -124,7 +124,7 @@ SELECT '1' = cast(1 as binary) FROM t
 struct<>
 -- !query 13 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 13
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 13
 
 
 -- !query 14
@@ -133,7 +133,7 @@ SELECT '2' > cast(1 as binary) FROM t
 struct<>
 -- !query 14 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 13
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 13
 
 
 -- !query 15
@@ -142,7 +142,7 @@ SELECT '2' >= cast(1 as binary) FROM t
 struct<>
 -- !query 15 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 14
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 14
 
 
 -- !query 16
@@ -151,7 +151,7 @@ SELECT '2' < cast(1 as binary) FROM t
 struct<>
 -- !query 16 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 13
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 13
 
 
 -- !query 17
@@ -160,7 +160,7 @@ SELECT '2' <= cast(1 as binary) FROM t
 struct<>
 -- !query 17 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 14
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 14
 
 
 -- !query 18
@@ -169,7 +169,7 @@ SELECT '2' <> cast(1 as binary) FROM t
 struct<>
 -- !query 18 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 14
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 14
 
 
 -- !query 19
@@ -178,7 +178,7 @@ SELECT cast(null as string) = cast(1 as binary) FROM t
 struct<>
 -- !query 19 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 30
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 30
 
 
 -- !query 20
@@ -187,7 +187,7 @@ SELECT cast(null as string) > cast(1 as binary) FROM t
 struct<>
 -- !query 20 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 30
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 30
 
 
 -- !query 21
@@ -196,7 +196,7 @@ SELECT cast(null as string) >= cast(1 as binary) FROM t
 struct<>
 -- !query 21 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 31
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 31
 
 
 -- !query 22
@@ -205,7 +205,7 @@ SELECT cast(null as string) < cast(1 as binary) FROM t
 struct<>
 -- !query 22 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 30
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 30
 
 
 -- !query 23
@@ -214,7 +214,7 @@ SELECT cast(null as string) <= cast(1 as binary) FROM t
 struct<>
 -- !query 23 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 31
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 31
 
 
 -- !query 24
@@ -223,7 +223,7 @@ SELECT cast(null as string) <> cast(1 as binary) FROM t
 struct<>
 -- !query 24 output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast 
IntegerType to BinaryType; line 1 pos 31
+cannot resolve 'CAST(1 AS BINARY)' due to data type mismatch: cannot cast int 
to binary; line 1 pos 31
 
 
 -- !query 25


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to