This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 19833d92f325 [SPARK-47886][SQL][DOCS][TESTS] Postgres: Add tests and doc for Postgres special numeric values 19833d92f325 is described below commit 19833d92f3258ea2b4dcf803217e7a7334ecd927 Author: Kent Yao <y...@apache.org> AuthorDate: Wed Apr 17 07:50:35 2024 -0700 [SPARK-47886][SQL][DOCS][TESTS] Postgres: Add tests and doc for Postgres special numeric values ### What changes were proposed in this pull request? This PR added tests and doc for Postgres special numeric values. Postgres supports special numeric values "NaN", "infinity", "-infinity" for both exact and inexact numbers, while we only support these for inexact ones. ### Why are the changes needed? test coverage and doc improvement ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? new test and doc build ![image](https://github.com/apache/spark/assets/8326978/4e46be31-981d-4625-91f2-f81c4d40abed) ### Was this patch authored or co-authored using generative AI tooling? no Closes #46102 from yaooqinn/SPARK-47886. Authored-by: Kent Yao <y...@apache.org> Signed-off-by: Dongjoon Hyun <dh...@apple.com> --- .../spark/sql/jdbc/PostgresIntegrationSuite.scala | 30 ++++++++++++++++++++-- docs/sql-data-sources-jdbc.md | 2 +- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index 1cd8a77e8442..8c0a7c0a809f 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -18,12 +18,13 @@ package org.apache.spark.sql.jdbc import java.math.{BigDecimal => JBigDecimal} -import java.sql.{Connection, Date, Timestamp} +import java.sql.{Connection, Date, SQLException, Timestamp} import java.text.SimpleDateFormat import java.time.LocalDateTime import java.util.Properties -import org.apache.spark.sql.{Column, Row} +import org.apache.spark.SparkException +import org.apache.spark.sql.{Column, DataFrame, Row} import org.apache.spark.sql.catalyst.expressions.Literal import org.apache.spark.sql.types._ import org.apache.spark.tags.DockerTest @@ -554,4 +555,29 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { .option("query", "SELECT 1::oid, 'bar'::regclass, 'integer'::regtype").load() checkAnswer(df, Row(1, "bar", "integer")) } + + test("SPARK-47886: special number values") { + def toDF(qry: String): DataFrame = { + spark.read.format("jdbc") + .option("url", jdbcUrl) + .option("query", qry) + .load() + } + checkAnswer( + toDF("SELECT 'NaN'::float8 c1, 'infinity'::float8 c2, '-infinity'::float8 c3"), + Row(Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity)) + checkAnswer( + toDF("SELECT 'NaN'::float4 c1, 'infinity'::float4 c2, '-infinity'::float4 c3"), + Row(Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity) + ) + + Seq("NaN", "infinity", "-infinity").foreach { v => + val df = toDF(s"SELECT '$v'::numeric c1") + val e = intercept[SparkException](df.collect()) + checkError(e, null) + val cause = e.getCause.asInstanceOf[SQLException] + assert(cause.getMessage.contains("Bad value for type BigDecimal")) + assert(cause.getSQLState === "22003") + } + } } diff --git a/docs/sql-data-sources-jdbc.md b/docs/sql-data-sources-jdbc.md index ef7a07a82c5f..637efc24113e 100644 --- a/docs/sql-data-sources-jdbc.md +++ b/docs/sql-data-sources-jdbc.md @@ -845,7 +845,7 @@ as the activated JDBC Driver. Note that, different JDBC drivers, or different ve <tr> <td>numeric, decimal</td> <td>DecimalType</td> - <td>Since PostgreSQL 15, 's' can be negative. If 's<0' it'll be adjusted to DecimalType(min(p-s, 38), 0); Otherwise, DecimalType(p, s), and if 'p>38', the fraction part will be truncated if exceeded. And if any value of this column have an actual precision greater 38 will fail with NUMERIC_VALUE_OUT_OF_RANGE.WITHOUT_SUGGESTION error</td> + <td><ul><li>Since PostgreSQL 15, 's' can be negative. If 's<0' it'll be adjusted to DecimalType(min(p-s, 38), 0); Otherwise, DecimalType(p, s)</li><li>If 'p>38', the fraction part will be truncated if exceeded. And if any value of this column have an actual precision greater 38 will fail with NUMERIC_VALUE_OUT_OF_RANGE.WITHOUT_SUGGESTION error.</li><li>Special numeric values, 'NaN', 'infinity' and '-infinity' is not supported</li></ul></td> </tr> <tr> <td>character varying(n), varchar(n)</td> --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org