This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 60e324a  [SPARK-34688][PYTHON] Upgrade to Py4J 0.10.9.2
60e324a is described below

commit 60e324aa9f071260f130780fde7438911b0aecee
Author: wankunde <wanku...@163.com>
AuthorDate: Thu Mar 11 09:51:41 2021 -0600

    [SPARK-34688][PYTHON] Upgrade to Py4J 0.10.9.2
    
    ### What changes were proposed in this pull request?
    This PR upgrade Py4J from 0.10.9.1 to 0.10.9.2 that contains some bug fixes 
and improvements.
    
    * expose shell parameter in Popen inside launch_gateway. 
([bartdag/py4j220efc3](https://github.com/bartdag/py4j/commit/220efc371620c3ffa1cfe75e6bdaa710e70d9d21))
    * fixed Flake8 errors 
([bartdag/py4j6c6ee9a](https://github.com/bartdag/py4j/commit/6c6ee9aedcff23ea142306ca08912822426b5f3d))
    
    ### Why are the changes needed?
    To leverage fixes from the upstream in Py4J.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Jenkins build and GitHub Actions will test it out.
    
    Closes #31796 from wankunde/py4j.
    
    Authored-by: wankunde <wanku...@163.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 bin/pyspark                                             |   2 +-
 bin/pyspark2.cmd                                        |   2 +-
 core/pom.xml                                            |   2 +-
 .../scala/org/apache/spark/api/python/PythonUtils.scala |   2 +-
 dev/deps/spark-deps-hadoop-2.7-hive-2.3                 |   2 +-
 dev/deps/spark-deps-hadoop-3.2-hive-2.3                 |   2 +-
 python/docs/Makefile                                    |   2 +-
 python/docs/make2.bat                                   |   2 +-
 python/docs/source/getting_started/install.rst          |   2 +-
 python/lib/py4j-0.10.9.1-src.zip                        | Bin 41589 -> 0 bytes
 python/lib/py4j-0.10.9.2-src.zip                        | Bin 0 -> 41839 bytes
 python/setup.py                                         |   2 +-
 sbin/spark-config.sh                                    |   2 +-
 13 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/bin/pyspark b/bin/pyspark
index 251bfef..38ebe51 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -50,7 +50,7 @@ export PYSPARK_DRIVER_PYTHON_OPTS
 
 # Add the PySpark classes to the Python path:
 export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
-export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.1-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.2-src.zip:$PYTHONPATH"
 
 # Load the PySpark shell.py script when ./pyspark is used interactively:
 export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 5741480..f5f9fad 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -30,7 +30,7 @@ if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
 )
 
 set PYTHONPATH=%SPARK_HOME%\python;%PYTHONPATH%
-set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.9.1-src.zip;%PYTHONPATH%
+set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.9.2-src.zip;%PYTHONPATH%
 
 set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
 set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
diff --git a/core/pom.xml b/core/pom.xml
index 30be9f7..ec80807 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -427,7 +427,7 @@
     <dependency>
       <groupId>net.sf.py4j</groupId>
       <artifactId>py4j</artifactId>
-      <version>0.10.9.1</version>
+      <version>0.10.9.2</version>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala 
b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
index 717eb4d..549edc4 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
@@ -27,7 +27,7 @@ import org.apache.spark.SparkContext
 import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
 
 private[spark] object PythonUtils {
-  val PY4J_ZIP_NAME = "py4j-0.10.9.1-src.zip"
+  val PY4J_ZIP_NAME = "py4j-0.10.9.2-src.zip"
 
   /** Get the PYTHONPATH for PySpark, either from SPARK_HOME, if it is set, or 
from our JAR */
   def sparkPythonPath: String = {
diff --git a/dev/deps/spark-deps-hadoop-2.7-hive-2.3 
b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
index 1d28b23..51e9875 100644
--- a/dev/deps/spark-deps-hadoop-2.7-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
@@ -210,7 +210,7 @@ 
parquet-format-structures/1.11.1//parquet-format-structures-1.11.1.jar
 parquet-hadoop/1.11.1//parquet-hadoop-1.11.1.jar
 parquet-jackson/1.11.1//parquet-jackson-1.11.1.jar
 protobuf-java/2.5.0//protobuf-java-2.5.0.jar
-py4j/0.10.9.1//py4j-0.10.9.1.jar
+py4j/0.10.9.2//py4j-0.10.9.2.jar
 pyrolite/4.30//pyrolite-4.30.jar
 scala-collection-compat_2.12/2.1.1//scala-collection-compat_2.12-2.1.1.jar
 scala-compiler/2.12.10//scala-compiler-2.12.10.jar
diff --git a/dev/deps/spark-deps-hadoop-3.2-hive-2.3 
b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
index 3b65d678..e1e2a4e 100644
--- a/dev/deps/spark-deps-hadoop-3.2-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
@@ -181,7 +181,7 @@ 
parquet-format-structures/1.11.1//parquet-format-structures-1.11.1.jar
 parquet-hadoop/1.11.1//parquet-hadoop-1.11.1.jar
 parquet-jackson/1.11.1//parquet-jackson-1.11.1.jar
 protobuf-java/2.5.0//protobuf-java-2.5.0.jar
-py4j/0.10.9.1//py4j-0.10.9.1.jar
+py4j/0.10.9.2//py4j-0.10.9.2.jar
 pyrolite/4.30//pyrolite-4.30.jar
 scala-collection-compat_2.12/2.1.1//scala-collection-compat_2.12-2.1.1.jar
 scala-compiler/2.12.10//scala-compiler-2.12.10.jar
diff --git a/python/docs/Makefile b/python/docs/Makefile
index 090ad7d..86bce0f 100644
--- a/python/docs/Makefile
+++ b/python/docs/Makefile
@@ -6,7 +6,7 @@ SPHINXBUILD   ?= sphinx-build
 SOURCEDIR     ?= source
 BUILDDIR      ?= build
 
-export PYTHONPATH=$(realpath ..):$(realpath ../lib/py4j-0.10.9.1-src.zip)
+export PYTHONPATH=$(realpath ..):$(realpath ../lib/py4j-0.10.9.2-src.zip)
 
 # Put it first so that "make" without argument is like "make help".
 help:
diff --git a/python/docs/make2.bat b/python/docs/make2.bat
index 485b5ed..26364c1 100644
--- a/python/docs/make2.bat
+++ b/python/docs/make2.bat
@@ -8,7 +8,7 @@ if "%SPHINXBUILD%" == "" (
 set SOURCEDIR=source
 set BUILDDIR=build
 
-set PYTHONPATH=..;..\lib\py4j-0.10.9.1-src.zip
+set PYTHONPATH=..;..\lib\py4j-0.10.9.2-src.zip
 
 if "%1" == "" goto help
 
diff --git a/python/docs/source/getting_started/install.rst 
b/python/docs/source/getting_started/install.rst
index c548542..a14f2b8 100644
--- a/python/docs/source/getting_started/install.rst
+++ b/python/docs/source/getting_started/install.rst
@@ -158,7 +158,7 @@ Package       Minimum supported version Note
 `pandas`      0.23.2                    Optional for SQL
 `NumPy`       1.7                       Required for ML 
 `pyarrow`     1.0.0                     Optional for SQL
-`Py4J`        0.10.9                    Required
+`Py4J`        0.10.9.2                  Required
 ============= ========================= ================
 
 Note that PySpark requires Java 8 or later with ``JAVA_HOME`` properly set.  
diff --git a/python/lib/py4j-0.10.9.1-src.zip b/python/lib/py4j-0.10.9.1-src.zip
deleted file mode 100644
index 11eb331..0000000
Binary files a/python/lib/py4j-0.10.9.1-src.zip and /dev/null differ
diff --git a/python/lib/py4j-0.10.9.2-src.zip b/python/lib/py4j-0.10.9.2-src.zip
new file mode 100644
index 0000000..f82d64e
Binary files /dev/null and b/python/lib/py4j-0.10.9.2-src.zip differ
diff --git a/python/setup.py b/python/setup.py
index c7f195b..b32569b 100755
--- a/python/setup.py
+++ b/python/setup.py
@@ -250,7 +250,7 @@ try:
         license='http://www.apache.org/licenses/LICENSE-2.0',
         # Don't forget to update python/docs/source/getting_started/install.rst
         # if you're updating the versions or dependencies.
-        install_requires=['py4j==0.10.9.1'],
+        install_requires=['py4j==0.10.9.2'],
         extras_require={
             'ml': ['numpy>=1.7'],
             'mllib': ['numpy>=1.7'],
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index 7389416..ae8b223 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -28,6 +28,6 @@ export 
SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
 # Add the PySpark classes to the PYTHONPATH:
 if [ -z "${PYSPARK_PYTHONPATH_SET}" ]; then
   export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
-  export 
PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.1-src.zip:${PYTHONPATH}"
+  export 
PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.2-src.zip:${PYTHONPATH}"
   export PYSPARK_PYTHONPATH_SET=1
 fi


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to