This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new de59caa83af [SPARK-44222][BUILD][PYTHON] Upgrade grpc to 1.56.0 with lower/upperbound de59caa83af is described below commit de59caa83af1e6b2febb24597d06c8ff8505d888 Author: Hyukjin Kwon <gurwls...@apache.org> AuthorDate: Fri Jul 14 14:06:48 2023 +0900 [SPARK-44222][BUILD][PYTHON] Upgrade grpc to 1.56.0 with lower/upperbound ### What changes were proposed in this pull request? This PR revert the revert of https://github.com/apache/spark/pull/41767 with setting grpc lowerbounds. ### Why are the changes needed? See https://github.com/apache/spark/pull/41767 ### Does this PR introduce _any_ user-facing change? See https://github.com/apache/spark/pull/41767 ### How was this patch tested? Manually tested with Conda environment, with `pip install -r dev/requirements.txt` in Python 3.9, Python 3.10 and Python 3.11. Closes #41997 from HyukjinKwon/SPARK-44222. Authored-by: Hyukjin Kwon <gurwls...@apache.org> Signed-off-by: Hyukjin Kwon <gurwls...@apache.org> --- .github/workflows/build_and_test.yml | 4 ++-- connector/connect/common/src/main/buf.gen.yaml | 4 ++-- dev/create-release/spark-rm/Dockerfile | 2 +- dev/requirements.txt | 4 ++-- pom.xml | 2 +- project/SparkBuild.scala | 2 +- python/docs/source/getting_started/install.rst | 16 ++++++++-------- python/setup.py | 2 +- 8 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 4370e622cf4..0b184c6c248 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -256,7 +256,7 @@ jobs: - name: Install Python packages (Python 3.8) if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 'sql-')) run: | - python3.8 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.48.1' 'protobuf==3.19.5' + python3.8 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.56.0' 'protobuf==3.19.5' python3.8 -m pip list # Run the tests. - name: Run tests @@ -625,7 +625,7 @@ jobs: # Jinja2 3.0.0+ causes error when building with Sphinx. # See also https://issues.apache.org/jira/browse/SPARK-35375. python3.9 -m pip install 'flake8==3.9.0' pydata_sphinx_theme 'mypy==0.982' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' numpydoc 'jinja2<3.0.0' 'black==22.6.0' - python3.9 -m pip install 'pandas-stubs==1.2.0.53' ipython 'grpcio==1.48.1' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' + python3.9 -m pip install 'pandas-stubs==1.2.0.53' ipython 'grpcio==1.56.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' - name: Python linter run: PYTHON_EXECUTABLE=python3.9 ./dev/lint-python - name: Install dependencies for Python code generation check diff --git a/connector/connect/common/src/main/buf.gen.yaml b/connector/connect/common/src/main/buf.gen.yaml index c816f12398d..07edaa567b1 100644 --- a/connector/connect/common/src/main/buf.gen.yaml +++ b/connector/connect/common/src/main/buf.gen.yaml @@ -22,14 +22,14 @@ plugins: out: gen/proto/csharp - plugin: buf.build/protocolbuffers/java:v21.7 out: gen/proto/java - - remote: buf.build/grpc/plugins/ruby:v1.47.0-1 + - plugin: buf.build/grpc/ruby:v1.56.0 out: gen/proto/ruby - plugin: buf.build/protocolbuffers/ruby:v21.7 out: gen/proto/ruby # Building the Python build and building the mypy interfaces. - plugin: buf.build/protocolbuffers/python:v21.7 out: gen/proto/python - - remote: buf.build/grpc/plugins/python:v1.47.0-1 + - plugin: buf.build/grpc/python:v1.56.0 out: gen/proto/python - name: mypy out: gen/proto/python diff --git a/dev/create-release/spark-rm/Dockerfile b/dev/create-release/spark-rm/Dockerfile index 8f198a420bc..def8626d3be 100644 --- a/dev/create-release/spark-rm/Dockerfile +++ b/dev/create-release/spark-rm/Dockerfile @@ -42,7 +42,7 @@ ARG APT_INSTALL="apt-get install --no-install-recommends -y" # We should use the latest Sphinx version once this is fixed. # TODO(SPARK-35375): Jinja2 3.0.0+ causes error when building with Sphinx. # See also https://issues.apache.org/jira/browse/SPARK-35375. -ARG PIP_PKGS="sphinx==3.0.4 mkdocs==1.1.2 numpy==1.20.3 pydata_sphinx_theme==0.4.1 ipython==7.19.0 nbsphinx==0.8.0 numpydoc==1.1.0 jinja2==2.11.3 twine==3.4.1 sphinx-plotly-directive==0.1.3 pandas==1.5.3 pyarrow==3.0.0 plotly==5.4.0 markupsafe==2.0.1 docutils<0.17 grpcio==1.48.1 protobuf==4.21.6 grpcio-status==1.48.1 googleapis-common-protos==1.56.4" +ARG PIP_PKGS="sphinx==3.0.4 mkdocs==1.1.2 numpy==1.20.3 pydata_sphinx_theme==0.4.1 ipython==7.19.0 nbsphinx==0.8.0 numpydoc==1.1.0 jinja2==2.11.3 twine==3.4.1 sphinx-plotly-directive==0.1.3 pandas==1.5.3 pyarrow==3.0.0 plotly==5.4.0 markupsafe==2.0.1 docutils<0.17 grpcio==1.56.0 protobuf==4.21.6 grpcio-status==1.56.0 googleapis-common-protos==1.56.4" ARG GEM_PKGS="bundler:2.3.8" # Install extra needed repos and refresh. diff --git a/dev/requirements.txt b/dev/requirements.txt index 72da5dbe163..6efefaae8eb 100644 --- a/dev/requirements.txt +++ b/dev/requirements.txt @@ -50,8 +50,8 @@ black==22.6.0 py # Spark Connect (required) -grpcio==1.48.1 -grpcio-status==1.48.1 +grpcio>=1.48,<1.57 +grpcio-status>=1.48,<1.57 protobuf==3.19.5 googleapis-common-protos==1.56.4 diff --git a/pom.xml b/pom.xml index 4a682edddcf..fc2534c8d33 100644 --- a/pom.xml +++ b/pom.xml @@ -288,7 +288,7 @@ <!-- Version used in Connect --> <connect.guava.version>32.0.1-jre</connect.guava.version> <guava.failureaccess.version>1.0.1</guava.failureaccess.version> - <io.grpc.version>1.47.0</io.grpc.version> + <io.grpc.version>1.56.0</io.grpc.version> <mima.version>1.1.2</mima.version> <tomcat.annotations.api.version>6.0.53</tomcat.annotations.api.version> diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index dbde112239f..8f2f5d78787 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -91,7 +91,7 @@ object BuildCommons { // SPARK-41247: needs to be consistent with `protobuf.version` in `pom.xml`. val protoVersion = "3.23.2" // GRPC version used for Spark Connect. - val gprcVersion = "1.47.0" + val gprcVersion = "1.56.0" } object SparkBuild extends PomBuild { diff --git a/python/docs/source/getting_started/install.rst b/python/docs/source/getting_started/install.rst index eb296dc16d6..6822285e961 100644 --- a/python/docs/source/getting_started/install.rst +++ b/python/docs/source/getting_started/install.rst @@ -153,15 +153,15 @@ To install PySpark from source, refer to |building_spark|_. Dependencies ------------ ========================== ========================= ====================================================================================== -Package Minimum supported version Note +Package Supported version Note ========================== ========================= ====================================================================================== -`py4j` 0.10.9.7 Required -`pandas` 1.0.5 Required for pandas API on Spark and Spark Connect; Optional for Spark SQL -`pyarrow` 4.0.0 Required for pandas API on Spark and Spark Connect; Optional for Spark SQL -`numpy` 1.15 Required for pandas API on Spark and MLLib DataFrame-based API; Optional for Spark SQL -`grpc` 1.48.1 Required for Spark Connect -`grpcio-status` 1.48.1 Required for Spark Connect -`googleapis-common-protos` 1.56.4 Required for Spark Connect +`py4j` >=0.10.9.7 Required +`pandas` >=1.0.5 Required for pandas API on Spark and Spark Connect; Optional for Spark SQL +`pyarrow` >=4.0.0 Required for pandas API on Spark and Spark Connect; Optional for Spark SQL +`numpy` >=1.15 Required for pandas API on Spark and MLLib DataFrame-based API; Optional for Spark SQL +`grpcio` >=1.48,<1.57 Required for Spark Connect +`grpcio-status` >=1.48,<1.57 Required for Spark Connect +`googleapis-common-protos` ==1.56.4 Required for Spark Connect ========================== ========================= ====================================================================================== Note that PySpark requires Java 8 or later with ``JAVA_HOME`` properly set. diff --git a/python/setup.py b/python/setup.py index 42acd902d51..f190930b2a7 100755 --- a/python/setup.py +++ b/python/setup.py @@ -132,7 +132,7 @@ if in_spark: # Also don't forget to update python/docs/source/getting_started/install.rst. _minimum_pandas_version = "1.0.5" _minimum_pyarrow_version = "4.0.0" -_minimum_grpc_version = "1.48.1" +_minimum_grpc_version = "1.56.0" _minimum_googleapis_common_protos_version = "1.56.4" --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org