[2/2] spark git commit: [PYSPARK] Update py4j to version 0.10.7.

2018-05-10 Thread vanzin
[PYSPARK] Update py4j to version 0.10.7.

(cherry picked from commit cc613b552e753d03cb62661591de59e1c8d82c74)
Signed-off-by: Marcelo Vanzin 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/323dc3ad
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/323dc3ad
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/323dc3ad

Branch: refs/heads/branch-2.3
Commit: 323dc3ad02e63a7c99b5bd6da618d6020657ecba
Parents: eab10f9
Author: Marcelo Vanzin 
Authored: Fri Apr 13 14:28:24 2018 -0700
Committer: Marcelo Vanzin 
Committed: Thu May 10 10:47:37 2018 -0700

--
 LICENSE |   2 +-
 bin/pyspark |   6 +-
 bin/pyspark2.cmd|   2 +-
 core/pom.xml|   2 +-
 .../org/apache/spark/SecurityManager.scala  |  11 +-
 .../spark/api/python/PythonGatewayServer.scala  |  50 ++---
 .../org/apache/spark/api/python/PythonRDD.scala |  29 --
 .../apache/spark/api/python/PythonUtils.scala   |   2 +-
 .../spark/api/python/PythonWorkerFactory.scala  |  21 ++--
 .../org/apache/spark/deploy/PythonRunner.scala  |  12 ++-
 .../apache/spark/internal/config/package.scala  |   5 +
 .../spark/security/SocketAuthHelper.scala   | 101 +++
 .../scala/org/apache/spark/util/Utils.scala |  12 +++
 .../spark/security/SocketAuthHelperSuite.scala  |  97 ++
 dev/deps/spark-deps-hadoop-2.6  |   2 +-
 dev/deps/spark-deps-hadoop-2.7  |   2 +-
 dev/run-pip-tests   |   2 +-
 python/README.md|   2 +-
 python/docs/Makefile|   2 +-
 python/lib/py4j-0.10.6-src.zip  | Bin 80352 -> 0 bytes
 python/lib/py4j-0.10.7-src.zip  | Bin 0 -> 42437 bytes
 python/pyspark/context.py   |   4 +-
 python/pyspark/daemon.py|  21 +++-
 python/pyspark/java_gateway.py  |  93 ++---
 python/pyspark/rdd.py   |  21 ++--
 python/pyspark/sql/dataframe.py |  12 +--
 python/pyspark/worker.py|   7 +-
 python/setup.py |   2 +-
 .../org/apache/spark/deploy/yarn/Client.scala   |   2 +-
 .../spark/deploy/yarn/YarnClusterSuite.scala|   2 +-
 sbin/spark-config.sh|   2 +-
 .../scala/org/apache/spark/sql/Dataset.scala|   6 +-
 32 files changed, 418 insertions(+), 116 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/323dc3ad/LICENSE
--
diff --git a/LICENSE b/LICENSE
index c2b0d72..820f14d 100644
--- a/LICENSE
+++ b/LICENSE
@@ -263,7 +263,7 @@ The text of each license is also included at 
licenses/LICENSE-[project].txt.
  (New BSD license) Protocol Buffer Java API 
(org.spark-project.protobuf:protobuf-java:2.4.1-shaded - 
http://code.google.com/p/protobuf)
  (The BSD License) Fortran to Java ARPACK 
(net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
  (The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - 
http://xmlenc.sourceforge.net)
- (The New BSD License) Py4J (net.sf.py4j:py4j:0.10.6 - 
http://py4j.sourceforge.net/)
+ (The New BSD License) Py4J (net.sf.py4j:py4j:0.10.7 - 
http://py4j.sourceforge.net/)
  (Two-clause BSD-style license) JUnit-Interface 
(com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
  (BSD licence) sbt and sbt-launch-lib.bash
  (BSD 3 Clause) d3.min.js 
(https://github.com/mbostock/d3/blob/master/LICENSE)

http://git-wip-us.apache.org/repos/asf/spark/blob/323dc3ad/bin/pyspark
--
diff --git a/bin/pyspark b/bin/pyspark
index dd28627..5d5affb 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -25,14 +25,14 @@ source "${SPARK_HOME}"/bin/load-spark-env.sh
 export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
 
 # In Spark 2.0, IPYTHON and IPYTHON_OPTS are removed and pyspark fails to 
launch if either option
-# is set in the user's environment. Instead, users should set 
PYSPARK_DRIVER_PYTHON=ipython 
+# is set in the user's environment. Instead, users should set 
PYSPARK_DRIVER_PYTHON=ipython
 # to use IPython and set PYSPARK_DRIVER_PYTHON_OPTS to pass options when 
starting the Python driver
 # (e.g. PYSPARK_DRIVER_PYTHON_OPTS='notebook').  This supports full 
customization of the IPython
 # and executor Python executables.
 
 # Fail noisily if removed options are set
 if [[ -n "$IPYTHON" || -n "$IPYTHON_OPTS" ]]; then
-  echo "Error in 

[2/2] spark git commit: [PYSPARK] Update py4j to version 0.10.7.

2018-05-09 Thread vanzin
[PYSPARK] Update py4j to version 0.10.7.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cc613b55
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cc613b55
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cc613b55

Branch: refs/heads/master
Commit: cc613b552e753d03cb62661591de59e1c8d82c74
Parents: 94155d0
Author: Marcelo Vanzin 
Authored: Fri Apr 13 14:28:24 2018 -0700
Committer: Marcelo Vanzin 
Committed: Wed May 9 10:47:35 2018 -0700

--
 LICENSE |   2 +-
 bin/pyspark |   6 +-
 bin/pyspark2.cmd|   2 +-
 core/pom.xml|   2 +-
 .../org/apache/spark/SecurityManager.scala  |  12 +--
 .../spark/api/python/PythonGatewayServer.scala  |  50 ++---
 .../org/apache/spark/api/python/PythonRDD.scala |  29 --
 .../apache/spark/api/python/PythonUtils.scala   |   2 +-
 .../spark/api/python/PythonWorkerFactory.scala  |  20 ++--
 .../org/apache/spark/deploy/PythonRunner.scala  |  12 ++-
 .../apache/spark/internal/config/package.scala  |   5 +
 .../spark/security/SocketAuthHelper.scala   | 101 +++
 .../scala/org/apache/spark/util/Utils.scala |  13 ++-
 .../spark/security/SocketAuthHelperSuite.scala  |  97 ++
 dev/deps/spark-deps-hadoop-2.6  |   2 +-
 dev/deps/spark-deps-hadoop-2.7  |   2 +-
 dev/deps/spark-deps-hadoop-3.1  |   2 +-
 dev/run-pip-tests   |   2 +-
 python/README.md|   2 +-
 python/docs/Makefile|   2 +-
 python/lib/py4j-0.10.6-src.zip  | Bin 80352 -> 0 bytes
 python/lib/py4j-0.10.7-src.zip  | Bin 0 -> 42437 bytes
 python/pyspark/context.py   |   4 +-
 python/pyspark/daemon.py|  21 +++-
 python/pyspark/java_gateway.py  |  93 ++---
 python/pyspark/rdd.py   |  21 ++--
 python/pyspark/sql/dataframe.py |  12 +--
 python/pyspark/worker.py|   7 +-
 python/setup.py |   2 +-
 .../org/apache/spark/deploy/yarn/Client.scala   |   2 +-
 .../spark/deploy/yarn/YarnClusterSuite.scala|   2 +-
 sbin/spark-config.sh|   2 +-
 .../scala/org/apache/spark/sql/Dataset.scala|   6 +-
 33 files changed, 417 insertions(+), 120 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/cc613b55/LICENSE
--
diff --git a/LICENSE b/LICENSE
index c2b0d72..820f14d 100644
--- a/LICENSE
+++ b/LICENSE
@@ -263,7 +263,7 @@ The text of each license is also included at 
licenses/LICENSE-[project].txt.
  (New BSD license) Protocol Buffer Java API 
(org.spark-project.protobuf:protobuf-java:2.4.1-shaded - 
http://code.google.com/p/protobuf)
  (The BSD License) Fortran to Java ARPACK 
(net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
  (The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - 
http://xmlenc.sourceforge.net)
- (The New BSD License) Py4J (net.sf.py4j:py4j:0.10.6 - 
http://py4j.sourceforge.net/)
+ (The New BSD License) Py4J (net.sf.py4j:py4j:0.10.7 - 
http://py4j.sourceforge.net/)
  (Two-clause BSD-style license) JUnit-Interface 
(com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
  (BSD licence) sbt and sbt-launch-lib.bash
  (BSD 3 Clause) d3.min.js 
(https://github.com/mbostock/d3/blob/master/LICENSE)

http://git-wip-us.apache.org/repos/asf/spark/blob/cc613b55/bin/pyspark
--
diff --git a/bin/pyspark b/bin/pyspark
index dd28627..5d5affb 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -25,14 +25,14 @@ source "${SPARK_HOME}"/bin/load-spark-env.sh
 export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
 
 # In Spark 2.0, IPYTHON and IPYTHON_OPTS are removed and pyspark fails to 
launch if either option
-# is set in the user's environment. Instead, users should set 
PYSPARK_DRIVER_PYTHON=ipython 
+# is set in the user's environment. Instead, users should set 
PYSPARK_DRIVER_PYTHON=ipython
 # to use IPython and set PYSPARK_DRIVER_PYTHON_OPTS to pass options when 
starting the Python driver
 # (e.g. PYSPARK_DRIVER_PYTHON_OPTS='notebook').  This supports full 
customization of the IPython
 # and executor Python executables.
 
 # Fail noisily if removed options are set
 if [[ -n "$IPYTHON" || -n "$IPYTHON_OPTS" ]]; then
-  echo "Error in pyspark startup:" 
+  echo "Error in pyspark startup:"
   echo "IPYTHON