This is an automated email from the ASF dual-hosted git repository. mhladun pushed a commit to branch DATALAB-2610 in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 596c8cde257dae4649cf85338fa6330f54939221 Author: Marian_Hladun <[email protected]> AuthorDate: Mon Nov 22 11:01:55 2021 +0200 DATALAB-2610 --- infrastructure-provisioning/src/general/lib/gcp/actions_lib.py | 4 ++-- infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py index a699155..c40ff49 100644 --- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py +++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py @@ -1412,8 +1412,8 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'): datalab.fab.conn.sudo('''bash -c 'echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf' ''' .format(memory_type, spark_memory)) if not exists(datalab.fab.conn,'/opt/spark/conf/spark-env.sh'): - datalab.fab.conn.sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh') - if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['conf_cloud_provider'] == 'gcp': + datalab.fab.conn.sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh' + if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['conf_cloud_provider'] == 'gcp' and os.environ['application'] == 'deeplearning': java_home = '/usr/lib/jvm/adoptopenjdk-8-hotspot-amd64/jre' else: java_home = datalab.fab.conn.run( diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py index 5253c91..3f6ad1e 100644 --- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py +++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py @@ -225,7 +225,7 @@ def ensure_jre_jdk(os_user): try: manage_pkg('-y install', 'remote', 'default-jre') manage_pkg('-y install', 'remote', 'default-jdk') - if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['conf_cloud_provider'] == 'gcp': + if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['conf_cloud_provider'] == 'gcp' and os.environ['application'] == 'deeplearning': datalab.fab.conn.sudo( 'wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | sudo apt-key add -') datalab.fab.conn.sudo('add-apt-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/') --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
