[ 
https://issues.apache.org/jira/browse/SEDONA-688?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17907108#comment-17907108
 ] 

Feng Zhang commented on SEDONA-688:
-----------------------------------

It looks like this error is cause by using 0.5 as number of neighbors (k) in 
the SQL. We should verify k >= 1 instead of k > 0.

> running ST_KNN() error : java.lang.ArithmeticException: / by zero
> -----------------------------------------------------------------
>
>                 Key: SEDONA-688
>                 URL: https://issues.apache.org/jira/browse/SEDONA-688
>             Project: Apache Sedona
>          Issue Type: Bug
>            Reporter: Feng Zhang
>            Priority: Major
>
> This is converted form the following github issue:
> [https://github.com/apache/sedona/issues/1732]
> h2. Steps to reproduce the problem
>  # ddl
> create table geo_test1 (
> id bigint,
> geom geometry
> );
> create table geo_test2 (
> id bigint,
> geom geometry
> );
> insert into geo_test1 values (1, ST_GeomFromText('POINT(1 1)')), (2, 
> ST_GeomFromText('POINT(2 2)')), (3, ST_GeomFromText('POINT(3 3)'));
> insert into geo_test2 values (1, ST_GeomFromText('POINT(2 1)')), (2, 
> ST_GeomFromText('POINT(3 2)')), (3, ST_GeomFromText('POINT(4 3)'));
>  # running knn query:
> select * from geo_test1 queries join geo_test2 objects on 
> ST_KNN(queries.geom, objects.geom, 0.5, false);
>  # ERROR:
> Error: org.apache.hive.service.cli.HiveSQLException: Error running query: 
> java.lang.ArithmeticException: / by zero
> at 
> org.apache.spark.sql.hive.thriftserver.HiveThriftServerErrors$.runningQueryError(HiveThriftServerErrors.scala:46)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:264)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.$anonfun$run$2(SparkExecuteStatementOperation.scala:168)
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties(SparkOperation.scala:79)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties$(SparkOperation.scala:63)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withLocalProperties(SparkExecuteStatementOperation.scala:42)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:168)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:163)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2.run(SparkExecuteStatementOperation.scala:177)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:750)
> Caused by: java.lang.ArithmeticException: / by zero
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.TraitKNNJoinQueryExec.knnJoinPartitionNumOptimizer(TraitKNNJoinQueryExec.scala:163)
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.TraitKNNJoinQueryExec.knnJoinPartitionNumOptimizer$(TraitKNNJoinQueryExec.scala:156)
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.BroadcastQuerySideKNNJoinExec.knnJoinPartitionNumOptimizer(BroadcastQuerySideKNNJoinExec.scala:35)
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.TraitKNNJoinQueryExec.executeKNNJoin(TraitKNNJoinQueryExec.scala:112)
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.TraitKNNJoinQueryExec.doExecute(TraitKNNJoinQueryExec.scala:57)
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.TraitKNNJoinQueryExec.doExecute$(TraitKNNJoinQueryExec.scala:55)
> at 
> org.apache.spark.sql.sedona_sql.strategy.join.BroadcastQuerySideKNNJoinExec.doExecute(BroadcastQuerySideKNNJoinExec.scala:35)
> at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:195)
> at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:246)
> at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:243)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:191)
> at 
> org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:364)
> at 
> org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:445)
> at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:4333)
> at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:3575)
> at org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4323)
> at 
> org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:546)
> at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:4321)
> at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)
> at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
> at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
> at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
> at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
> at org.apache.spark.sql.Dataset.withAction(Dataset.scala:4321)
> at org.apache.spark.sql.Dataset.collect(Dataset.scala:3575)
> at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:238)
> ... 16 more (state=,code=0)
> h2. Settings
> Sedona version = 1.7.0
> Apache Spark version = 3.5.1
> Scala version = 2.12
> JRE version = 1.8
> Environment = Standalone
>  



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to