This is an automated email from the ASF dual-hosted git repository.

dmysakovets pushed a commit to branch DLAB-433
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git


The following commit(s) were added to refs/heads/DLAB-433 by this push:
     new d066bdc  [DLAB-623] Removed Spark installation and added it to 
Dockerfile_jupyter [bugfixing]
d066bdc is described below

commit d066bdc5a788a89f6a42a5ee6d04fe0b9dc31f20
Author: Dyoma33 <demian.mysakove...@gmail.com>
AuthorDate: Mon May 13 17:07:29 2019 +0300

    [DLAB-623] Removed Spark installation and added it to Dockerfile_jupyter 
[bugfixing]
---
 infrastructure-provisioning/src/general/lib/os/fab.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py 
b/infrastructure-provisioning/src/general/lib/os/fab.py
index 52441b7..fb21896 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -247,14 +247,14 @@ def ensure_jupyter_docker_files(os_user, jupyter_dir, 
spark_script, jupyter_conf
            sudo('touch {}'.format(spark_script))
            sudo('echo "#!/bin/bash" >> {}'.format(spark_script))
            sudo(
-               'echo "PYJ=`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' 
\':\' | sed \'s|:$||g\'`; sed -i \'s|PY4J|'$PYJ\'|g\' 
/tmp/pyspark_local_template.json" >> {}'.format(
+               'echo "PYJ=`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' 
\':\' | sed \'s|:$||g\'`; sed -i \'s|PY4J|\'$PYJ\'|g\' 
/tmp/pyspark_local_template.json" >> {}'.format(
                spark_script))
            sudo(
                'echo "sed -i 
\'14s/:",/:\/home\/dlab-user\/caffe\/python:\/home\/dlab-user\/pytorch\/build:",/\'
 /tmp/pyspark_local_template.json" >> {}'.format(
                    spark_script))
            sudo('echo \'sed -i "s|SP_VER|2.3.2|g" 
/tmp/pyspark_local_template.json\' >> {}'.format(spark_script))
            sudo(
-               'echo "PYJ=`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' 
\':\' | sed \'s|:$||g\'`; sed -i \'s|PY4J|'$PYJ\'|g\' 
/tmp/py3spark_local_template.json" >> {}'.format(
+               'echo "PYJ=`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' 
\':\' | sed \'s|:$||g\'`; sed -i \'s|PY4J|\'$PYJ\'|g\' 
/tmp/py3spark_local_template.json" >> {}'.format(
                spark_script))
            sudo(
                'echo "sed -i 
\'14s/:",/:\/home\/dlab-user\/caffe\/python:\/home\/dlab-user\/pytorch\/build:",/\'
 /tmp/py3spark_local_template.json" >> {}'.format(
@@ -280,7 +280,7 @@ def ensure_jupyter_docker_files(os_user, jupyter_dir, 
spark_script, jupyter_conf
             sys.exit(1)
 
 
-def ensure_pyspark_local_kernel(os_user, pyspark_local_path_dir, 
templates_dir, spark_version, jupyter_dir):
+def ensure_pyspark_local_kernel(os_user, pyspark_local_path_dir, 
templates_dir, spark_version):
     if not exists('/home/' + os_user + 
'/.ensure_dir/pyspark_local_kernel_ensured'):
         try:
             sudo('mkdir -p ' + pyspark_local_path_dir)
@@ -296,7 +296,7 @@ def ensure_pyspark_local_kernel(os_user, 
pyspark_local_path_dir, templates_dir,
             sys.exit(1)
 
 
-def ensure_py3spark_local_kernel(os_user, py3spark_local_path_dir, 
templates_dir, spark_version, jupyter_dir):
+def ensure_py3spark_local_kernel(os_user, py3spark_local_path_dir, 
templates_dir, spark_version):
     if not exists('/home/' + os_user + 
'/.ensure_dir/py3spark_local_kernel_ensured'):
         try:
             sudo('mkdir -p ' + py3spark_local_path_dir)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@dlab.apache.org
For additional commands, e-mail: commits-h...@dlab.apache.org

Reply via email to