This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a47bde28d9a Revert "[SPARK-44222][BUILD][PYTHON] Upgrade `grpc` to 
1.56.0"
a47bde28d9a is described below

commit a47bde28d9a2f6d77fba66350b81f1b4ce00c6cc
Author: Hyukjin Kwon <gurwls...@apache.org>
AuthorDate: Fri Jul 14 13:26:59 2023 +0900

    Revert "[SPARK-44222][BUILD][PYTHON] Upgrade `grpc` to 1.56.0"
    
    This reverts commit f26bdb7bfde56e4856a2f962162f204ba2dbb1c1.
---
 .github/workflows/build_and_test.yml           | 4 ++--
 connector/connect/common/src/main/buf.gen.yaml | 4 ++--
 dev/create-release/spark-rm/Dockerfile         | 2 +-
 dev/requirements.txt                           | 4 ++--
 pom.xml                                        | 2 +-
 project/SparkBuild.scala                       | 2 +-
 python/docs/source/getting_started/install.rst | 4 ++--
 python/setup.py                                | 2 +-
 8 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index 0b184c6c248..4370e622cf4 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -256,7 +256,7 @@ jobs:
     - name: Install Python packages (Python 3.8)
       if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 
'sql-'))
       run: |
-        python3.8 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy 
unittest-xml-reporting 'grpcio==1.56.0' 'protobuf==3.19.5'
+        python3.8 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy 
unittest-xml-reporting 'grpcio==1.48.1' 'protobuf==3.19.5'
         python3.8 -m pip list
     # Run the tests.
     - name: Run tests
@@ -625,7 +625,7 @@ jobs:
         # Jinja2 3.0.0+ causes error when building with Sphinx.
         #   See also https://issues.apache.org/jira/browse/SPARK-35375.
         python3.9 -m pip install 'flake8==3.9.0' pydata_sphinx_theme 
'mypy==0.982' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' numpydoc 
'jinja2<3.0.0' 'black==22.6.0'
-        python3.9 -m pip install 'pandas-stubs==1.2.0.53' ipython 
'grpcio==1.56.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0'
+        python3.9 -m pip install 'pandas-stubs==1.2.0.53' ipython 
'grpcio==1.48.1' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0'
     - name: Python linter
       run: PYTHON_EXECUTABLE=python3.9 ./dev/lint-python
     - name: Install dependencies for Python code generation check
diff --git a/connector/connect/common/src/main/buf.gen.yaml 
b/connector/connect/common/src/main/buf.gen.yaml
index 07edaa567b1..c816f12398d 100644
--- a/connector/connect/common/src/main/buf.gen.yaml
+++ b/connector/connect/common/src/main/buf.gen.yaml
@@ -22,14 +22,14 @@ plugins:
     out: gen/proto/csharp
   - plugin: buf.build/protocolbuffers/java:v21.7
     out: gen/proto/java
-  - plugin: buf.build/grpc/ruby:v1.56.0
+  - remote: buf.build/grpc/plugins/ruby:v1.47.0-1
     out: gen/proto/ruby
   - plugin: buf.build/protocolbuffers/ruby:v21.7
     out: gen/proto/ruby
    # Building the Python build and building the mypy interfaces.
   - plugin: buf.build/protocolbuffers/python:v21.7
     out: gen/proto/python
-  - plugin: buf.build/grpc/python:v1.56.0
+  - remote: buf.build/grpc/plugins/python:v1.47.0-1
     out: gen/proto/python
   - name: mypy
     out: gen/proto/python
diff --git a/dev/create-release/spark-rm/Dockerfile 
b/dev/create-release/spark-rm/Dockerfile
index def8626d3be..8f198a420bc 100644
--- a/dev/create-release/spark-rm/Dockerfile
+++ b/dev/create-release/spark-rm/Dockerfile
@@ -42,7 +42,7 @@ ARG APT_INSTALL="apt-get install --no-install-recommends -y"
 #   We should use the latest Sphinx version once this is fixed.
 # TODO(SPARK-35375): Jinja2 3.0.0+ causes error when building with Sphinx.
 #   See also https://issues.apache.org/jira/browse/SPARK-35375.
-ARG PIP_PKGS="sphinx==3.0.4 mkdocs==1.1.2 numpy==1.20.3 
pydata_sphinx_theme==0.4.1 ipython==7.19.0 nbsphinx==0.8.0 numpydoc==1.1.0 
jinja2==2.11.3 twine==3.4.1 sphinx-plotly-directive==0.1.3 pandas==1.5.3 
pyarrow==3.0.0 plotly==5.4.0 markupsafe==2.0.1 docutils<0.17 grpcio==1.56.0 
protobuf==4.21.6 grpcio-status==1.56.0 googleapis-common-protos==1.56.4"
+ARG PIP_PKGS="sphinx==3.0.4 mkdocs==1.1.2 numpy==1.20.3 
pydata_sphinx_theme==0.4.1 ipython==7.19.0 nbsphinx==0.8.0 numpydoc==1.1.0 
jinja2==2.11.3 twine==3.4.1 sphinx-plotly-directive==0.1.3 pandas==1.5.3 
pyarrow==3.0.0 plotly==5.4.0 markupsafe==2.0.1 docutils<0.17 grpcio==1.48.1 
protobuf==4.21.6 grpcio-status==1.48.1 googleapis-common-protos==1.56.4"
 ARG GEM_PKGS="bundler:2.3.8"
 
 # Install extra needed repos and refresh.
diff --git a/dev/requirements.txt b/dev/requirements.txt
index eb482f3f8a2..72da5dbe163 100644
--- a/dev/requirements.txt
+++ b/dev/requirements.txt
@@ -50,8 +50,8 @@ black==22.6.0
 py
 
 # Spark Connect (required)
-grpcio==1.56.0
-grpcio-status==1.56.0
+grpcio==1.48.1
+grpcio-status==1.48.1
 protobuf==3.19.5
 googleapis-common-protos==1.56.4
 
diff --git a/pom.xml b/pom.xml
index fc2534c8d33..4a682edddcf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -288,7 +288,7 @@
     <!-- Version used in Connect -->
     <connect.guava.version>32.0.1-jre</connect.guava.version>
     <guava.failureaccess.version>1.0.1</guava.failureaccess.version>
-    <io.grpc.version>1.56.0</io.grpc.version>
+    <io.grpc.version>1.47.0</io.grpc.version>
     <mima.version>1.1.2</mima.version>
     <tomcat.annotations.api.version>6.0.53</tomcat.annotations.api.version>
 
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 8f2f5d78787..dbde112239f 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -91,7 +91,7 @@ object BuildCommons {
   // SPARK-41247: needs to be consistent with `protobuf.version` in `pom.xml`.
   val protoVersion = "3.23.2"
   // GRPC version used for Spark Connect.
-  val gprcVersion = "1.56.0"
+  val gprcVersion = "1.47.0"
 }
 
 object SparkBuild extends PomBuild {
diff --git a/python/docs/source/getting_started/install.rst 
b/python/docs/source/getting_started/install.rst
index 9599161c7d4..eb296dc16d6 100644
--- a/python/docs/source/getting_started/install.rst
+++ b/python/docs/source/getting_started/install.rst
@@ -159,8 +159,8 @@ Package                    Minimum supported version Note
 `pandas`                   1.0.5                     Required for pandas API 
on Spark and Spark Connect; Optional for Spark SQL
 `pyarrow`                  4.0.0                     Required for pandas API 
on Spark and Spark Connect; Optional for Spark SQL
 `numpy`                    1.15                      Required for pandas API 
on Spark and MLLib DataFrame-based API; Optional for Spark SQL
-`grpcio`                   1.56.0                    Required for Spark Connect
-`grpcio-status`            1.56.0                    Required for Spark Connect
+`grpc`                     1.48.1                    Required for Spark Connect
+`grpcio-status`            1.48.1                    Required for Spark Connect
 `googleapis-common-protos` 1.56.4                    Required for Spark Connect
 ========================== ========================= 
======================================================================================
 
diff --git a/python/setup.py b/python/setup.py
index f190930b2a7..42acd902d51 100755
--- a/python/setup.py
+++ b/python/setup.py
@@ -132,7 +132,7 @@ if in_spark:
 # Also don't forget to update python/docs/source/getting_started/install.rst.
 _minimum_pandas_version = "1.0.5"
 _minimum_pyarrow_version = "4.0.0"
-_minimum_grpc_version = "1.56.0"
+_minimum_grpc_version = "1.48.1"
 _minimum_googleapis_common_protos_version = "1.56.4"
 
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to