This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new ea3104fa71b [SPARK-45325][BUILD][FOLLOWUP] Update docs and sbt ea3104fa71b is described below commit ea3104fa71b0d7b6ac5e74292c28e40acb1e6537 Author: Ismaël Mejía <ieme...@gmail.com> AuthorDate: Tue Sep 26 10:40:08 2023 -0700 [SPARK-45325][BUILD][FOLLOWUP] Update docs and sbt ### What changes were proposed in this pull request? This PR adds missing parts of the upgrade to Avro 1.11.3 ### Why are the changes needed? Because there are missing references to the version fo the library ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Pass the CI + Verify docs ### Was this patch authored or co-authored using generative AI tooling? No Closes #43118 from iemejia/master. Authored-by: Ismaël Mejía <ieme...@gmail.com> Signed-off-by: Dongjoon Hyun <dh...@apple.com> --- .../avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala | 4 ++-- docs/sql-data-sources-avro.md | 4 ++-- project/SparkBuild.scala | 2 +- .../test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala index edaaa8835cc..a0db82f9871 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala @@ -81,14 +81,14 @@ private[sql] class AvroOptions( /** * Top level record name in write result, which is required in Avro spec. - * See https://avro.apache.org/docs/1.11.2/specification/#schema-record . + * See https://avro.apache.org/docs/1.11.3/specification/#schema-record . * Default value is "topLevelRecord" */ val recordName: String = parameters.getOrElse(RECORD_NAME, "topLevelRecord") /** * Record namespace in write result. Default value is "". - * See Avro spec for details: https://avro.apache.org/docs/1.11.2/specification/#schema-record . + * See Avro spec for details: https://avro.apache.org/docs/1.11.3/specification/#schema-record . */ val recordNamespace: String = parameters.getOrElse(RECORD_NAMESPACE, "") diff --git a/docs/sql-data-sources-avro.md b/docs/sql-data-sources-avro.md index b01174b9182..72741b0e9d1 100644 --- a/docs/sql-data-sources-avro.md +++ b/docs/sql-data-sources-avro.md @@ -417,7 +417,7 @@ applications. Read the [Advanced Dependency Management](https://spark.apache Submission Guide for more details. ## Supported types for Avro -> Spark SQL conversion -Currently Spark supports reading all [primitive types](https://avro.apache.org/docs/1.11.2/specification/#primitive-types) and [complex types](https://avro.apache.org/docs/1.11.2/specification/#complex-types) under records of Avro. +Currently Spark supports reading all [primitive types](https://avro.apache.org/docs/1.11.3/specification/#primitive-types) and [complex types](https://avro.apache.org/docs/1.11.3/specification/#complex-types) under records of Avro. <table class="table table-striped"> <thead><tr><th><b>Avro type</b></th><th><b>Spark SQL type</b></th></tr></thead> <tr> @@ -481,7 +481,7 @@ In addition to the types listed above, it supports reading `union` types. The fo 3. `union(something, null)`, where something is any supported Avro type. This will be mapped to the same Spark SQL type as that of something, with nullable set to true. All other union types are considered complex. They will be mapped to StructType where field names are member0, member1, etc., in accordance with members of the union. This is consistent with the behavior when converting between Avro and Parquet. -It also supports reading the following Avro [logical types](https://avro.apache.org/docs/1.11.2/specification/#logical-types): +It also supports reading the following Avro [logical types](https://avro.apache.org/docs/1.11.3/specification/#logical-types): <table class="table table-striped"> <thead><tr><th><b>Avro logical type</b></th><th><b>Avro type</b></th><th><b>Spark SQL type</b></th></tr></thead> diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 400ee8c5f28..ad2b67c67c6 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -1071,7 +1071,7 @@ object DependencyOverrides { dependencyOverrides += "com.google.guava" % "guava" % guavaVersion, dependencyOverrides += "xerces" % "xercesImpl" % "2.12.2", dependencyOverrides += "jline" % "jline" % "2.14.6", - dependencyOverrides += "org.apache.avro" % "avro" % "1.11.2") + dependencyOverrides += "org.apache.avro" % "avro" % "1.11.3") } /** diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala index 452d9850b00..aceca829df8 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala @@ -905,7 +905,7 @@ class HiveClientSuite(version: String, allVersions: Seq[String]) test("Decimal support of Avro Hive serde") { val tableName = "tab1" // TODO: add the other logical types. For details, see the link: - // https://avro.apache.org/docs/1.11.2/specification/#logical-types + // https://avro.apache.org/docs/1.11.3/specification/#logical-types val avroSchema = """{ | "name": "test_record", --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org