This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new b6b19db  [SPARK-31414][SQL][DOCS][FOLLOWUP] Update default datetime 
pattern for json/csv APIs documentations
b6b19db is described below

commit b6b19db2098ddbeda8609e0609e839154a1bb483
Author: Kent Yao <yaooq...@hotmail.com>
AuthorDate: Tue Apr 14 10:25:37 2020 +0900

    [SPARK-31414][SQL][DOCS][FOLLOWUP] Update default datetime pattern for 
json/csv APIs documentations
    
    ### What changes were proposed in this pull request?
    
    Update default datetime pattern from `yyyy-MM-dd'T'HH:mm:ss.SSSXXX ` to 
`yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX] ` for JSON/CSV APIs documentations
    
    ### Why are the changes needed?
    
    doc fix
    
    ### Does this PR introduce any user-facing change?
    
    Yes, the documentation will change
    
    ### How was this patch tested?
    
    Passing Jenkins
    
    Closes #28204 from yaooqinn/SPARK-31414-F.
    
    Authored-by: Kent Yao <yaooq...@hotmail.com>
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
    (cherry picked from commit 31b907748d21f056828a475ca6161aebf2bda536)
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
---
 python/pyspark/sql/readwriter.py                                  | 8 ++++----
 python/pyspark/sql/streaming.py                                   | 4 ++--
 .../src/main/scala/org/apache/spark/sql/DataFrameReader.scala     | 4 ++--
 .../src/main/scala/org/apache/spark/sql/DataFrameWriter.scala     | 4 ++--
 .../scala/org/apache/spark/sql/streaming/DataStreamReader.scala   | 4 ++--
 5 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/python/pyspark/sql/readwriter.py b/python/pyspark/sql/readwriter.py
index 92d36e7..6ad6377 100644
--- a/python/pyspark/sql/readwriter.py
+++ b/python/pyspark/sql/readwriter.py
@@ -245,7 +245,7 @@ class DataFrameReader(OptionUtils):
         :param timestampFormat: sets the string that indicates a timestamp 
format.
                                 Custom date formats follow the formats at 
`datetime pattern`_.
                                 This applies to timestamp type. If None is 
set, it uses the
-                                default value, 
``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
+                                default value, 
``yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]``.
         :param multiLine: parse one record, which may span multiple lines, per 
file. If None is
                           set, it uses the default value, ``false``.
         :param allowUnquotedControlChars: allows JSON Strings to contain 
unquoted control
@@ -455,7 +455,7 @@ class DataFrameReader(OptionUtils):
         :param timestampFormat: sets the string that indicates a timestamp 
format.
                                 Custom date formats follow the formats at 
`datetime pattern`_.
                                 This applies to timestamp type. If None is 
set, it uses the
-                                default value, 
``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
+                                default value, 
``yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]``.
         :param maxColumns: defines a hard limit of how many columns a record 
can have. If None is
                            set, it uses the default value, ``20480``.
         :param maxCharsPerColumn: defines the maximum number of characters 
allowed for any given
@@ -892,7 +892,7 @@ class DataFrameWriter(OptionUtils):
         :param timestampFormat: sets the string that indicates a timestamp 
format.
                                 Custom date formats follow the formats at 
`datetime pattern`_.
                                 This applies to timestamp type. If None is 
set, it uses the
-                                default value, 
``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
+                                default value, 
``yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]``.
         :param encoding: specifies encoding (charset) of saved json files. If 
None is set,
                         the default UTF-8 charset will be used.
         :param lineSep: defines the line separator that should be used for 
writing. If None is
@@ -996,7 +996,7 @@ class DataFrameWriter(OptionUtils):
         :param timestampFormat: sets the string that indicates a timestamp 
format.
                                 Custom date formats follow the formats at 
`datetime pattern`_.
                                 This applies to timestamp type. If None is 
set, it uses the
-                                default value, 
``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
+                                default value, 
``yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]``.
         :param ignoreLeadingWhiteSpace: a flag indicating whether or not 
leading whitespaces from
                                         values being written should be 
skipped. If None is set, it
                                         uses the default value, ``true``.
diff --git a/python/pyspark/sql/streaming.py b/python/pyspark/sql/streaming.py
index 4d36a04..05cf331 100644
--- a/python/pyspark/sql/streaming.py
+++ b/python/pyspark/sql/streaming.py
@@ -483,7 +483,7 @@ class DataStreamReader(OptionUtils):
         :param timestampFormat: sets the string that indicates a timestamp 
format.
                                 Custom date formats follow the formats at 
`datetime pattern`_.
                                 This applies to timestamp type. If None is 
set, it uses the
-                                default value, 
``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
+                                default value, 
``yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]``.
         :param multiLine: parse one record, which may span multiple lines, per 
file. If None is
                           set, it uses the default value, ``false``.
         :param allowUnquotedControlChars: allows JSON Strings to contain 
unquoted control
@@ -696,7 +696,7 @@ class DataStreamReader(OptionUtils):
         :param timestampFormat: sets the string that indicates a timestamp 
format.
                                 Custom date formats follow the formats at 
`datetime pattern`_.
                                 This applies to timestamp type. If None is 
set, it uses the
-                                default value, 
``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``.
+                                default value, 
``yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]``.
         :param maxColumns: defines a hard limit of how many columns a record 
can have. If None is
                            set, it uses the default value, ``20480``.
         :param maxCharsPerColumn: defines the maximum number of characters 
allowed for any given
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
index 83e5678..a7b3d08 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
@@ -428,7 +428,7 @@ class DataFrameReader private[sql](sparkSession: 
SparkSession) extends Logging {
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
    * This applies to date type.</li>
-   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the 
string that
+   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]`): sets 
the string that
    * indicates a timestamp format. Custom date formats follow the formats at
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
@@ -657,7 +657,7 @@ class DataFrameReader private[sql](sparkSession: 
SparkSession) extends Logging {
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
    * This applies to date type.</li>
-   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the 
string that
+   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]`): sets 
the string that
    * indicates a timestamp format. Custom date formats follow the formats at
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index d11e4db..9aef382 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -790,7 +790,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) 
{
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
    * This applies to date type.</li>
-   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the 
string that
+   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]`): sets 
the string that
    * indicates a timestamp format. Custom date formats follow the formats at
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
@@ -915,7 +915,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) 
{
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
    * This applies to date type.</li>
-   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the 
string that
+   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]`): sets 
the string that
    * indicates a timestamp format. Custom date formats follow the formats at
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala
index a2eaed8..1d7e4d3 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala
@@ -290,7 +290,7 @@ final class DataStreamReader private[sql](sparkSession: 
SparkSession) extends Lo
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
    * This applies to date type.</li>
-   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the 
string that
+   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]`): sets 
the string that
    * indicates a timestamp format. Custom date formats follow the formats at
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
@@ -360,7 +360,7 @@ final class DataStreamReader private[sql](sparkSession: 
SparkSession) extends Lo
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.
    * This applies to date type.</li>
-   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the 
string that
+   * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss[.SSS][XXX]`): sets 
the string that
    * indicates a timestamp format. Custom date formats follow the formats at
    * <a 
href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";>
    *   Datetime Patterns</a>.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to