This is an automated email from the ASF dual-hosted git repository. lfrolov pushed a commit to branch DATALAB-2372 in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit b12120de11d44f1016b7776d27c633f7de887c4d Author: leonidfrolov <[email protected]> AuthorDate: Thu Aug 26 10:45:05 2021 +0300 [DATALAB-2372]: changed deeplearning image and how it is handled for azure --- .../files/azure/deeplearning_description.json | 2 +- .../src/general/lib/os/fab.py | 2 +- .../scripts/azure/common_prepare_notebook.py | 30 ++++++++++++++-------- .../service/impl/ExploratoryServiceImpl.java | 2 +- 4 files changed, 22 insertions(+), 14 deletions(-) diff --git a/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json b/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json index 2e6d612..ce864da 100644 --- a/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json +++ b/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json @@ -11,7 +11,7 @@ "template_name": "Data Science Virtual Machine - Ubuntu 18.04", "description": "Pre-configured with NVIDIA drivers, CUDA Toolkit, and cuDNN library for GPU workloads with the following highlights: Jupyter, JupyterLab, and JupyterHub; Deep learning with TensorFlow and PyTorch; Machine learning with xgboost, Vowpal Wabbit, and LightGBM; Julia; Azure SDKs and libraries; Azure Machine Learning SDKs and sample notebooks; R support; Spark. Uses Anaconda virtual environments, configured to keep the different framework installations separate and easy t [...] "environment_type": "exploratory", - "version": "microsoft-dsvm:ubuntu-1804:1804-gen2", + "version": "microsoft-dsvm:ubuntu-1804:1804", "vendor": "Azure" } ], diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py index ff1ea4d..4097882 100644 --- a/infrastructure-provisioning/src/general/lib/os/fab.py +++ b/infrastructure-provisioning/src/general/lib/os/fab.py @@ -261,7 +261,7 @@ def put_resource_status(resource, status, datalab_path, os_user, hostname): def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version, exploratory_name): if not exists(conn,'/home/' + os_user + '/.ensure_dir/jupyter_ensured'): try: - if os.environ['conf_deeplearning_cloud_ami'] == 'false' or os.environ['application'] != 'deeplearning': + if os.environ['conf_deeplearning_cloud_ami'] == 'false' or os.environ['application'] != 'deeplearning' or os.environ['conf_cloud_provider'] == azure: conn.sudo('pip3 install notebook=={} --no-cache-dir'.format(jupyter_version)) conn.sudo('pip3 install jupyter --no-cache-dir') conn.sudo('rm -rf {}'.format(jupyter_conf_file)) diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py index bdfc5ff..53b767a 100644 --- a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py +++ b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py @@ -86,7 +86,10 @@ if __name__ == "__main__": ssh_key_path = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name']) key = RSA.importKey(open(ssh_key_path, 'rb').read()) notebook_config['public_ssh_key'] = key.publickey().exportKey("OpenSSH").decode('UTF-8') - notebook_config['primary_disk_size'] = '32' + if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['application'] == 'deeplearning': + notebook_config['primary_disk_size'] = '150' + else: + notebook_config['primary_disk_size'] = '32' notebook_config['instance_storage_account_type'] = (lambda x: 'Standard_LRS' if x in ('deeplearning', 'tensor') else 'Premium_LRS')(os.environ['application']) if os.environ['conf_os_family'] == 'debian': @@ -109,19 +112,24 @@ if __name__ == "__main__": notebook_config['service_base_name'], notebook_config['endpoint_name'], notebook_config['application']) - notebook_config['notebook_image_name'] = (lambda x: '{0}-{1}-{2}-{3}-{4}'.format( - notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'], - os.environ['application'], os.environ['notebook_image_name']).replace('_', '-') if (x != 'None' and x != '') - else notebook_config['expected_image_name'])(str(os.environ.get('notebook_image_name'))) + print('Searching pre-configured images') notebook_config['image_name'] = os.environ['azure_{}_image_name'.format(os.environ['conf_os_family'])] - if AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['notebook_image_name']): - notebook_config['image_name'] = notebook_config['notebook_image_name'] - notebook_config['image_type'] = 'pre-configured' - print('Pre-configured image found. Using: {}'.format(notebook_config['notebook_image_name'])) + if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['application'] == 'deeplearning': + notebook_config['image_name'] = os.environ['notebook_image_name'] + print('Pre-configured deeplearning image found. Using: {}'.format(notebook_config['image_name'])) else: - os.environ['notebook_image_name'] = notebook_config['image_name'] - print('No pre-configured image found. Using default one: {}'.format(notebook_config['image_name'])) + notebook_config['notebook_image_name'] = (lambda x: '{0}-{1}-{2}-{3}-{4}'.format( + notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'], + os.environ['application'], os.environ['notebook_image_name']).replace('_', '-') if (x != 'None' and x != '') + else notebook_config['expected_image_name'])(str(os.environ.get('notebook_image_name'))) + if AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['notebook_image_name']): + notebook_config['image_name'] = notebook_config['notebook_image_name'] + notebook_config['image_type'] = 'pre-configured' + print('Pre-configured image found. Using: {}'.format(notebook_config['notebook_image_name'])) + else: + os.environ['notebook_image_name'] = notebook_config['image_name'] + print('No pre-configured image found. Using default one: {}'.format(notebook_config['image_name'])) except Exception as err: print("Failed to generate variables dictionary.") datalab.fab.append_result("Failed to generate variables dictionary.", str(err)) diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ExploratoryServiceImpl.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ExploratoryServiceImpl.java index f5e148d..60d4c84 100644 --- a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ExploratoryServiceImpl.java +++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ExploratoryServiceImpl.java @@ -152,7 +152,7 @@ public class ExploratoryServiceImpl implements ExploratoryService { private boolean isDeepLearningOnAwsOrAzure(Exploratory exploratory, EndpointDTO endpointDTO) { return exploratory.getVersion().equals("Deep Learning AMI (Ubuntu 18.04) Version 42.1") || - exploratory.getVersion().equals("microsoft-dsvm:ubuntu-1804:1804-gen2"); + exploratory.getVersion().equals("microsoft-dsvm:ubuntu-1804:1804"); } @Override --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
