This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch branch-0.10
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/branch-0.10 by this push:
     new be57acc  [ZEPPELIN-5525] Python vanillar interpreter doesn't' work in 
Python 3.8
be57acc is described below

commit be57accef8452da5974b8a5529ffa8b52223921e
Author: Jeff Zhang <zjf...@apache.org>
AuthorDate: Thu Sep 16 16:41:46 2021 +0800

    [ZEPPELIN-5525] Python vanillar interpreter doesn't' work in Python 3.8
    
    ### What is this PR for?
    
    The root cause is due to ast api is changed after python 3.8, see 
https://github.com/ipython/ipython/pull/11593.
    * The PR fixed this issue in `zeppelin_python.py`.
    * This PR add tests for Python 3.8 for both python interpreter & spark 
interpreter (spark 2.4 doesn't support python 3.8, so there's no python 3.8 
test for spark 2.4).  `jupyter_client` 5 is used because jupyter_client 7 
doesn't work (tracked in ZEPPELIN-5533)
    * bokeh test is disabled for flink interpreter. Because pyflink's 
dependencies conflicts with bokeh2.
    
    ### What type of PR is it?
    [Bug Fix ]
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
    * https://issues.apache.org/jira/browse/ZEPPELIN-5525
    
    ### How should this be tested?
    * Ci pass
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Jeff Zhang <zjf...@apache.org>
    
    Closes #4230 from zjffdu/ZEPPELIN-5525 and squashes the following commits:
    
    b816a530c0 [Jeff Zhang] [ZEPPELIN-5525] Python vanillar interpreter 
doesn't' work in Python 3.8
    
    (cherry picked from commit fa1f72891f7c83911deefedef5275fa766b5b6ab)
    Signed-off-by: Jeff Zhang <zjf...@apache.org>
---
 .github/workflows/core.yml                         | 31 +++++++++++-------
 .../zeppelin/flink/IPyFlinkInterpreterTest.java    |  6 ++++
 .../src/main/resources/python/zeppelin_python.py   | 13 ++++++--
 .../zeppelin/python/IPythonInterpreterTest.java    | 24 ++++++++++----
 testing/env_python_3.7_with_R.yml                  | 33 +++++++++++++++++++
 testing/env_python_3.8_with_R.yml                  | 33 +++++++++++++++++++
 testing/env_python_3_with_R.yml                    | 37 +++++++++++-----------
 testing/env_python_3_with_R_and_tensorflow.yml     | 36 ++++++++++-----------
 testing/env_python_3_with_flink_110.yml            | 34 ++++++++++----------
 testing/env_python_3_with_flink_111.yml            | 34 ++++++++++----------
 testing/env_python_3_with_flink_112.yml            | 37 +++++++++++-----------
 testing/env_python_3_with_flink_113.yml            | 37 +++++++++++-----------
 12 files changed, 231 insertions(+), 124 deletions(-)

diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml
index 81e0822..3f866fd 100644
--- a/.github/workflows/core.yml
+++ b/.github/workflows/core.yml
@@ -32,6 +32,7 @@ jobs:
       fail-fast: false
       matrix:
         hadoop: [hadoop2, hadoop3]
+        python: [3.7, 3.8]
     steps:
       - name: Checkout
         uses: actions/checkout@v2
@@ -51,12 +52,12 @@ jobs:
           key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
           restore-keys: |
             ${{ runner.os }}-zeppelin-
-      - name: Setup conda environment with python 3.7 and R
+      - name: Setup conda environment with python ${{ matrix.python }} and R
         uses: conda-incubator/setup-miniconda@v2
         with:
           activate-environment: python_3_with_R
-          environment-file: testing/env_python_3_with_R.yml
-          python-version: 3.7
+          environment-file: testing/env_python_${{ matrix.python }}_with_R.yml
+          python-version: ${{ matrix.python }}
           auto-activate-base: false
           channel-priority: strict
       - name: Make IRkernel available to Jupyter
@@ -298,6 +299,10 @@ jobs:
 
   spark-3-0-and-scala-2-12-and-other-interpreter:
     runs-on: ubuntu-20.04
+    strategy:
+      fail-fast: false
+      matrix:
+        python: [ 3.7, 3.8 ]
     steps:
       - name: Checkout
         uses: actions/checkout@v2
@@ -317,12 +322,12 @@ jobs:
           key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
           restore-keys: |
             ${{ runner.os }}-zeppelin-
-      - name: Setup conda environment with python 3.7 and R
+      - name: Setup conda environment with python ${{ matrix.python }} and R
         uses: conda-incubator/setup-miniconda@v2
         with:
           activate-environment: python_3_with_R
-          environment-file: testing/env_python_3_with_R.yml
-          python-version: 3.7
+          environment-file: testing/env_python_${{ matrix.python }}_with_R.yml
+          python-version: ${{ matrix.python }}
           auto-activate-base: false
       - name: Make IRkernel available to Jupyter
         run: |
@@ -330,11 +335,15 @@ jobs:
       - name: install environment
         run: |
           mvn install -DskipTests -DskipRat -pl 
spark-submit,spark/spark-dependencies -am -Pspark-3.0 -Pspark-scala-2.12 
-Phadoop2 -B
-      - name: run tests
+      - name: run tests with ${{ matrix.python }}
         run: mvn test -DskipRat -pl spark-submit,spark/spark-dependencies -am 
-Pspark-3.0 -Pspark-scala-2.12 -Phadoop2 -B 
-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.*
 -DfailIfNoTests=false
 
   spark-3-1-and-scala-2-12-and-other-interpreter:
     runs-on: ubuntu-20.04
+    strategy:
+      fail-fast: false
+      matrix:
+        python: [ 3.7, 3.8 ]
     steps:
       - name: Checkout
         uses: actions/checkout@v2
@@ -354,19 +363,19 @@ jobs:
           key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
           restore-keys: |
             ${{ runner.os }}-zeppelin-
-      - name: Setup conda environment with python 3.7 and R
+      - name: Setup conda environment with python ${{ matrix.python }} and R
         uses: conda-incubator/setup-miniconda@v2
         with:
           activate-environment: python_3_with_R
-          environment-file: testing/env_python_3_with_R.yml
-          python-version: 3.7
+          environment-file: testing/env_python_${{ matrix.python }}_with_R.yml
+          python-version: ${{ matrix.python }}
           auto-activate-base: false
       - name: Make IRkernel available to Jupyter
         run: |
           R -e "IRkernel::installspec()"
       - name: install environment
         run: mvn install -DskipTests -DskipRat -pl 
spark-submit,spark/spark-dependencies -am -Pspark-3.1 -Pspark-scala-2.12 
-Phadoop2 -B
-      - name: run tests
+      - name: run tests with ${{ matrix.python }}
         run: mvn test -DskipRat -pl spark-submit,spark/spark-dependencies -am 
-Pspark-3.1 -Pspark-scala-2.12 -Phadoop2 -B 
-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.*
 -DfailIfNoTests=false
   test-livy-0-5-with-spark-2-2-0-under-python3:
     runs-on: ubuntu-20.04
diff --git 
a/flink/flink-scala-parent/src/test/java/org/apache/zeppelin/flink/IPyFlinkInterpreterTest.java
 
b/flink/flink-scala-parent/src/test/java/org/apache/zeppelin/flink/IPyFlinkInterpreterTest.java
index e1536bd..ac8d65b 100644
--- 
a/flink/flink-scala-parent/src/test/java/org/apache/zeppelin/flink/IPyFlinkInterpreterTest.java
+++ 
b/flink/flink-scala-parent/src/test/java/org/apache/zeppelin/flink/IPyFlinkInterpreterTest.java
@@ -62,6 +62,12 @@ public class IPyFlinkInterpreterTest extends 
IPythonInterpreterTest {
   private LazyOpenInterpreter flinkScalaInterpreter;
 
 
+  public IPyFlinkInterpreterTest() {
+    super();
+    // disable bokeh test, because its (bokeh2) dependencies conflicts with 
apache-flink,
+    this.enableBokehTest = false;
+  }
+
   protected Properties initIntpProperties() {
     Properties p = new Properties();
     p.setProperty("zeppelin.pyflink.python", "python");
diff --git a/python/src/main/resources/python/zeppelin_python.py 
b/python/src/main/resources/python/zeppelin_python.py
index db224e4..f3f9186 100644
--- a/python/src/main/resources/python/zeppelin_python.py
+++ b/python/src/main/resources/python/zeppelin_python.py
@@ -77,6 +77,15 @@ class PythonCompletion:
       result = json.dumps(list(filter(lambda x : not re.match("^__.*", x), 
list(completionList))))
       self.interpreter.setStatementsFinished(result, False)
 
+# ast api is changed after python 3.8, see 
https://github.com/ipython/ipython/pull/11593
+if sys.version_info > (3,8):
+  from ast import Module
+else :
+  # mock the new API, ignore second argument
+  # see https://github.com/ipython/ipython/issues/11590
+  from ast import Module as OriginalModule
+  Module = lambda nodelist, type_ignores: OriginalModule(nodelist)
+
 host = sys.argv[1]
 port = int(sys.argv[2])
 
@@ -148,7 +157,7 @@ while True :
                                    [code.body[-(nhooks + 1)]] if 
len(code.body) > nhooks else [])
       try:
         for node in to_run_exec:
-          mod = ast.Module([node])
+          mod = Module([node], [])
           code = compile(mod, '<stdin>', 'exec')
           exec(code, _zcUserQueryNameSpace)
 
@@ -158,7 +167,7 @@ while True :
           exec(code, _zcUserQueryNameSpace)
 
         for node in to_run_hooks:
-          mod = ast.Module([node])
+          mod = Module([node], [])
           code = compile(mod, '<stdin>', 'exec')
           exec(code, _zcUserQueryNameSpace)
 
diff --git 
a/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java 
b/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java
index 72ab25b..65e3786 100644
--- 
a/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java
+++ 
b/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java
@@ -29,6 +29,8 @@ import 
org.apache.zeppelin.interpreter.InterpreterResultMessage;
 import org.apache.zeppelin.interpreter.LazyOpenInterpreter;
 import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -49,6 +51,10 @@ import static org.junit.Assert.fail;
 
 public class IPythonInterpreterTest extends BasePythonInterpreterTest {
 
+  private static final Logger LOGGER = 
LoggerFactory.getLogger(IPythonInterpreterTest.class);
+
+  protected boolean enableBokehTest = true;
+
   protected Properties initIntpProperties() {
     Properties properties = new Properties();
     properties.setProperty("zeppelin.python.maxResult", "3");
@@ -215,14 +221,18 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
     assertTrue("No Image Output", hasImageOutput);
     assertTrue("No Line Text", hasLineText);
 
+    if (!enableBokehTest) {
+      LOGGER.info("Bokeh test is skipped");
+      return;
+    }
+
     // bokeh
     // bokeh initialization
     context = getInterpreterContext();
     result = interpreter.interpret("from bokeh.io import output_notebook, 
show\n" +
         "from bokeh.plotting import figure\n" +
-        "import bkzep\n" +
-        "output_notebook(notebook_type='zeppelin')", context);
-    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+        "output_notebook()", context);
+    assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, 
result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
 
     if (interpreterResultMessages.size() == 3) {
@@ -299,14 +309,14 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
             InterpreterResult.Code.SUCCESS, result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
 
-    assertEquals(context.out.toString(), 5, interpreterResultMessages.size());
+    assertEquals(interpreterResultMessages.size() + ":" + 
context.out.toString(),
+            3, interpreterResultMessages.size());
     // the first message is the warning text message.
+    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(0).getType());
     assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
     assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(2).getType());
-    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(3).getType());
-    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(4).getType());
     // docs_json is the source data of plotting which bokeh would use to 
render the plotting.
-    
assertTrue(interpreterResultMessages.get(4).getData().contains("docs_json"));
+    
assertTrue(interpreterResultMessages.get(2).getData().contains("docs_json"));
   }
 
 
diff --git a/testing/env_python_3.7_with_R.yml 
b/testing/env_python_3.7_with_R.yml
new file mode 100644
index 0000000..b204a0b
--- /dev/null
+++ b/testing/env_python_3.7_with_R.yml
@@ -0,0 +1,33 @@
+name: python_3_with_R
+channels:
+  - conda-forge
+  - defaults
+dependencies:
+  - pycodestyle
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
+  - jupyter_client=5
+  - hvplot
+  - plotnine
+  - seaborn
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
+  - pip
+  - r-base=3
+  - r-data.table
+  - r-evaluate
+  - r-base64enc
+  - r-knitr
+  - r-ggplot2
+  - r-irkernel
+  - r-shiny
+  - r-googlevis
diff --git a/testing/env_python_3.8_with_R.yml 
b/testing/env_python_3.8_with_R.yml
new file mode 100644
index 0000000..b204a0b
--- /dev/null
+++ b/testing/env_python_3.8_with_R.yml
@@ -0,0 +1,33 @@
+name: python_3_with_R
+channels:
+  - conda-forge
+  - defaults
+dependencies:
+  - pycodestyle
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
+  - jupyter_client=5
+  - hvplot
+  - plotnine
+  - seaborn
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
+  - pip
+  - r-base=3
+  - r-data.table
+  - r-evaluate
+  - r-base64enc
+  - r-knitr
+  - r-ggplot2
+  - r-irkernel
+  - r-shiny
+  - r-googlevis
diff --git a/testing/env_python_3_with_R.yml b/testing/env_python_3_with_R.yml
index b0c77d2..d4acf16 100644
--- a/testing/env_python_3_with_R.yml
+++ b/testing/env_python_3_with_R.yml
@@ -4,30 +4,31 @@ channels:
   - defaults
 dependencies:
   - pycodestyle
-  - numpy=1
-  - pandas=0.25
-  - scipy=1
-  - grpcio=1.22.0
-  - hvplot=0.5.2
-  - protobuf=3
-  - pandasql=0.7.3
-  - ipython=7
-  - matplotlib=3
-  - ipykernel=5
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
   - jupyter_client=5
-  - bokeh=1.3.4
-  - panel=0.6.0
-  - holoviews=1.12.3
-  - pyyaml=3
+  - hvplot
+  - plotnine
+  - seaborn
+  - bokeh
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
   - pip
-  - pip:
-    - bkzep==0.6.1
-
   - r-base=3
+  - r-data.table
   - r-evaluate
   - r-base64enc
   - r-knitr
   - r-ggplot2
   - r-irkernel
   - r-shiny
-  - r-googlevis
+  - r-googlevis
\ No newline at end of file
diff --git a/testing/env_python_3_with_R_and_tensorflow.yml 
b/testing/env_python_3_with_R_and_tensorflow.yml
index 8813cde..a7c8fff 100644
--- a/testing/env_python_3_with_R_and_tensorflow.yml
+++ b/testing/env_python_3_with_R_and_tensorflow.yml
@@ -4,26 +4,27 @@ channels:
   - defaults
 dependencies:
   - pycodestyle
-  - numpy=1
-  - pandas=0.25
-  - scipy=1
-  - grpcio=1.22.0
-  - hvplot=0.5.2
-  - protobuf=3
-  - pandasql=0.7.3
-  - ipython=7
-  - matplotlib=3
-  - ipykernel=5
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
   - jupyter_client=5
-  - bokeh=1.3.4
-  - panel=0.6.0
-  - holoviews=1.12.3
-  - pyyaml=3
+  - hvplot
+  - plotnine
+  - seaborn
+  - bokeh
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
   - pip
-  - pip:
-    - bkzep==0.6.1
-
   - r-base=3
+  - r-data.table
   - r-evaluate
   - r-base64enc
   - r-knitr
@@ -31,5 +32,4 @@ dependencies:
   - r-irkernel
   - r-shiny
   - r-googlevis
-
   - tensorflow=1.13
diff --git a/testing/env_python_3_with_flink_110.yml 
b/testing/env_python_3_with_flink_110.yml
index 7deb648..9255e5c 100644
--- a/testing/env_python_3_with_flink_110.yml
+++ b/testing/env_python_3_with_flink_110.yml
@@ -3,22 +3,24 @@ channels:
   - conda-forge
   - defaults
 dependencies:
+  - pycodestyle
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
+  - jupyter_client=5
+  - hvplot
+  - plotnine
+  - seaborn
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
   - pip
   - pip:
       - apache-flink==1.10.2
-      - bkzep==0.6.1
-  - numpy==1.17.3
-  - pandas==0.25.0
-  - scipy==1.3.1
-  - grpcio==1.34.1
-  - hvplot==0.5.2
-  - protobuf==3.10.0
-  - pandasql==0.7.3
-  - ipython==7.8.0
-  - matplotlib==3.0.3
-  - ipykernel==5.1.2
-  - jupyter_client==5.3.4
-  - bokeh==1.3.4
-  - panel==0.6.0
-  - holoviews==1.12.3
-  - pycodestyle==2.5.0
diff --git a/testing/env_python_3_with_flink_111.yml 
b/testing/env_python_3_with_flink_111.yml
index 653763f..33d5733 100644
--- a/testing/env_python_3_with_flink_111.yml
+++ b/testing/env_python_3_with_flink_111.yml
@@ -4,22 +4,24 @@ channels:
   - defaults
 dependencies:
   - pycodestyle
-  - numpy=1
-  - pandas=0.25
-  - scipy=1
-  - grpcio=1.22.0
-  - hvplot=0.5.2
-  - protobuf=3
-  - pandasql=0.7.3
-  - ipython=7
-  - matplotlib=3
-  - ipykernel=5
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
   - jupyter_client=5
-  - bokeh=1.3.4
-  - panel=0.6.0
-  - holoviews=1.12.3
-  - pyyaml=3
+  - hvplot
+  - plotnine
+  - seaborn
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
   - pip
   - pip:
-    - bkzep==0.6.1
-    - apache-flink==1.11.1
+      - apache-flink==1.11.3
+
diff --git a/testing/env_python_3_with_flink_112.yml 
b/testing/env_python_3_with_flink_112.yml
index 35b4903..6d67eb0 100644
--- a/testing/env_python_3_with_flink_112.yml
+++ b/testing/env_python_3_with_flink_112.yml
@@ -3,24 +3,25 @@ channels:
   - conda-forge
   - defaults
 dependencies:
-  - pip
-  - pip:
-      - bkzep==0.6.1
-      - apache-flink==1.12.0
   - pycodestyle
-  - numpy=1
-  - pandas=0.25
-  - scipy=1
-  - grpcio=1.22.0
-  - hvplot=0.5.2
-  - protobuf=3
-  - pandasql=0.7.3
-  - ipython=7
-  - matplotlib=3
-  - ipykernel=5
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
   - jupyter_client=5
-  - bokeh=1.3.4
-  - panel=0.6.0
-  - holoviews=1.12.3
-  - pyyaml=3
+  - hvplot
+  - plotnine
+  - seaborn
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
+  - pip
+  - pip:
+      - apache-flink==1.12.2
 
diff --git a/testing/env_python_3_with_flink_113.yml 
b/testing/env_python_3_with_flink_113.yml
index c15bf27..c1f352b 100644
--- a/testing/env_python_3_with_flink_113.yml
+++ b/testing/env_python_3_with_flink_113.yml
@@ -3,24 +3,25 @@ channels:
   - conda-forge
   - defaults
 dependencies:
-  - pip
-  - pip:
-      - bkzep==0.6.1
-      - apache-flink==1.13.0
   - pycodestyle
-  - numpy=1
-  - pandas=0.25
-  - scipy=1
-  - grpcio=1.22.0
-  - hvplot=0.5.2
-  - protobuf=3
-  - pandasql=0.7.3
-  - ipython=7
-  - matplotlib=3
-  - ipykernel=5
+  - scipy
+  - numpy=1.19.5
+  - grpcio
+  - protobuf
+  - pandasql
+  - ipython
+  - ipykernel
   - jupyter_client=5
-  - bokeh=1.3.4
-  - panel=0.6.0
-  - holoviews=1.12.3
-  - pyyaml=3
+  - hvplot
+  - plotnine
+  - seaborn
+  - intake
+  - intake-parquet
+  - intake-xarray
+  - altair
+  - vega_datasets
+  - plotly
+  - pip
+  - pip:
+      - apache-flink==1.13.1
 

Reply via email to