This is an automated email from the ASF dual-hosted git repository.

jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sedona.git


The following commit(s) were added to refs/heads/master by this push:
     new ad76748  [SEDONA-14] Fix JSON representation of GeometryUDT schema 
(#508)
ad76748 is described below

commit ad767488ea2e6695ea9cf587e124811ceeeaaaac
Author: Michael Merg <[email protected]>
AuthorDate: Tue Feb 9 23:21:10 2021 +0100

    [SEDONA-14] Fix JSON representation of GeometryUDT schema (#508)
    
    * Fix JSON representation of GeometryUDT
    
    * Add schema parsing test
    
    * Move test to different package
    
    Co-authored-by: Michael Merg <[email protected]>
---
 .../spark/sql/sedona_sql/UDT/GeometryUDT.scala     |  9 ++++
 .../apache/sedona/sql/GeometryUdtTestScala.scala   | 63 ++++++++++++++++++++++
 2 files changed, 72 insertions(+)

diff --git 
a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/UDT/GeometryUDT.scala 
b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/UDT/GeometryUDT.scala
index b7c79d6..1fc0ee9 100644
--- a/sql/src/main/scala/org/apache/spark/sql/sedona_sql/UDT/GeometryUDT.scala
+++ b/sql/src/main/scala/org/apache/spark/sql/sedona_sql/UDT/GeometryUDT.scala
@@ -21,6 +21,8 @@ package org.apache.spark.sql.sedona_sql.UDT
 import org.apache.sedona.sql.utils.GeometrySerializer
 import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
 import org.apache.spark.sql.types._
+import org.json4s.JsonDSL._
+import org.json4s.JsonAST.JValue
 import org.locationtech.jts.geom.Geometry
 
 
@@ -40,6 +42,13 @@ class GeometryUDT extends UserDefinedType[Geometry] {
         GeometrySerializer.deserialize(values)
     }
   }
+
+  override private[sql] def jsonValue: JValue = {
+    super.jsonValue mapField {
+      case ("class", _) => "class" -> this.getClass.getName.stripSuffix("$")
+      case other: Any => other
+    }
+  }
 }
 
 case object GeometryUDT extends 
org.apache.spark.sql.sedona_sql.UDT.GeometryUDT with scala.Serializable
diff --git 
a/sql/src/test/scala/org/apache/sedona/sql/GeometryUdtTestScala.scala 
b/sql/src/test/scala/org/apache/sedona/sql/GeometryUdtTestScala.scala
new file mode 100644
index 0000000..8471be0
--- /dev/null
+++ b/sql/src/test/scala/org/apache/sedona/sql/GeometryUdtTestScala.scala
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.sql
+
+import org.apache.spark.sql.DataFrame
+import org.apache.spark.sql.functions.expr
+import org.apache.spark.sql.types.{DataType, StructType}
+import org.junit.rules.TemporaryFolder
+import org.locationtech.jts.geom.Geometry
+import org.locationtech.jts.io.WKTReader
+import org.scalatest.BeforeAndAfter
+
+class GeometryUdtTestScala extends TestBaseScala with BeforeAndAfter {
+
+  import sparkSession.implicits._
+
+  var tempFolder: TemporaryFolder = new TemporaryFolder
+  var dataFrame: DataFrame = _
+
+  before {
+    dataFrame = Seq(Tuple2(47.636, 9.389))
+      .toDF("latitude", "longitude")
+      .withColumn("point", expr("ST_Point(longitude, latitude)"))
+  }
+
+  describe("GeometryUDT Test") {
+    it("Should write dataframe with geometry in Parquet format") {
+      tempFolder.create()
+
+      dataFrame.write.parquet(tempFolder.getRoot.getPath + "/parquet")
+
+      val readDataFrame = sparkSession.read.parquet(tempFolder.getRoot.getPath 
+ "/parquet")
+      val row = readDataFrame.collect()(0)
+      assert(row.getAs[Double]("latitude") == 47.636)
+      assert(row.getAs[Double]("longitude") == 9.389)
+      assert(row.getAs[Geometry]("point").equals(new WKTReader().read("POINT 
(9.389 47.636)")))
+    }
+
+    it("Should be able to render and parse JSON schema") {
+     
assert(DataType.fromJson(dataFrame.schema.json).asInstanceOf[StructType].equals(dataFrame.schema))
+    }
+  }
+
+  after {
+    tempFolder.delete()
+  }
+}

Reply via email to