This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-1408
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-1408 by this push:
     new b1cbe3004 [DATALAB-1408]: fixed some variables and imports
b1cbe3004 is described below

commit b1cbe3004d3013275199f53a891bb707b7529976
Author: leonidfrolov <[email protected]>
AuthorDate: Tue Sep 6 11:00:56 2022 +0300

    [DATALAB-1408]: fixed some variables and imports
---
 .../src/general/conf/datalab.ini                   |  2 --
 .../src/general/lib/gcp/meta_lib.py                |  2 +-
 .../scripts/azure/dataengine-service_prepare.py    | 25 +++++++---------------
 .../src/general/scripts/azure/ssn_configure.py     |  3 ++-
 4 files changed, 11 insertions(+), 21 deletions(-)

diff --git a/infrastructure-provisioning/src/general/conf/datalab.ini 
b/infrastructure-provisioning/src/general/conf/datalab.ini
index f4e80dc67..6a79fe1a5 100644
--- a/infrastructure-provisioning/src/general/conf/datalab.ini
+++ b/infrastructure-provisioning/src/general/conf/datalab.ini
@@ -218,8 +218,6 @@ locale = en-US
 datalake_enable = false
 ### Azure login application ID
 # application_id =
-### Azure HDinsight version
-hdinsight_version = 4.0
 
 [gcp]
 ### GCP project ID
diff --git a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py 
b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
index be5d17b0c..9af2170f7 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
@@ -758,7 +758,7 @@ class GCPMeta:
 
     def dataproc_waiter(self, labels):
         if os.path.exists(
-                '/response/.emr_creating_' + os.environ['exploratory_name']) 
or self.get_not_configured_dataproc(
+                '/response/.dataproc_creating_' + 
os.environ['exploratory_name']) or self.get_not_configured_dataproc(
                 os.environ['notebook_instance_name']):
             with hide('stderr', 'running', 'warnings'):
                 subprocess.run("echo 'Some Dataproc cluster is still being 
created/terminated, waiting..'", shell=True, check=True)
diff --git 
a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
 
b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
index 64b5a2857..2f47d74ca 100644
--- 
a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
+++ 
b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
@@ -21,24 +21,17 @@
 #
 # 
******************************************************************************
 
-import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import datalab.logger
 import multiprocessing
 import os
 import sys
 import traceback
 import subprocess
-import Crypto.PublicKey
-import fabric
-import azure.mgmt.hdinsight.models
-from azure.mgmt.hdinsight.models import *
-from azure.mgmt.core import *
-from azure.common import *
-from azure.core import *
-from datalab.actions_lib import *
+from Crypto.PublicKey import RSA
+from datalab.logger import logging
+from fabric import *
 
 if __name__ == "__main__":
     try:
@@ -53,7 +46,7 @@ if __name__ == "__main__":
             hdinsight_conf['computational_name'] = 
os.environ['computational_name'].replace('_', '-').lower()
         else:
             hdinsight_conf['computational_name'] = ''
-        hdinsight_conf['hdinsight_worker_count'] = 
int(os.environ['dataengine_instance_count']) - 2
+        hdinsight_conf['hdinsight_worker_count'] = 
int(os.environ['hdinsight_count']) - 2
         hdinsight_conf['service_base_name'] = 
(os.environ['conf_service_base_name'])
         hdinsight_conf['project_name'] = 
(os.environ['project_name']).replace('_', '-').lower()
         hdinsight_conf['endpoint_name'] = 
(os.environ['endpoint_name']).replace('_', '-').lower()
@@ -81,7 +74,7 @@ if __name__ == "__main__":
         if hdinsight_conf['custom_tag']:
             hdinsight_conf['cluster_tags']['custom_tag'] = 
hdinsight_conf['custom_tag']
 
-        hdinsight_conf['release_label'] = os.environ['azure_hdinsight_version']
+        hdinsight_conf['release_label'] = os.environ['hdinsight_version']
         key = RSA.importKey(open(hdinsight_conf['key_path'], 'rb').read())
         ssh_admin_pubkey = key.publickey().exportKey("OpenSSH").decode('UTF-8')
         hdinsight_conf['container_name'] = 
('{0}-{1}-{2}-{3}-bucket'.format(hdinsight_conf['service_base_name'],
@@ -133,10 +126,10 @@ if __name__ == "__main__":
                  "--master_instance_type {} --worker_instance_type {} " \
                  "--worker_count {} --storage_account_name {} " \
                  "--storage_account_key {} --container_name {} " \
-                 "--tags '{}' --public_key {}"\
+                 "--tags '{}' --public_key '{}'"\
             .format(hdinsight_conf['resource_group_name'], 
hdinsight_conf['cluster_name'],
                     hdinsight_conf['release_label'], hdinsight_conf['region'],
-                    os.environ['azure_dataengine_master_size'], 
os.environ['azure_dataengine_slave_size'],
+                    os.environ['hdinsight_master_instance_type'], 
os.environ['hdinsight_slave_instance_type'],
                     hdinsight_conf['hdinsight_worker_count'], 
hdinsight_conf['storage_account_name'],
                     hdinsight_conf['storage_account_key'], 
hdinsight_conf['container_name'],
                     json.dumps(hdinsight_conf['cluster_tags']), 
ssh_admin_pubkey)
@@ -147,9 +140,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
 
-        keyfile_name = "/root/keys/{}.pem".format(hdinsight_conf['key_name'])
-        subprocess.run('rm 
/response/.hdinsight_creating_{}'.format(os.environ['exploratory_name']), 
shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Failed to create hdinsight Cluster.", 
str(err))
-        subprocess.run('rm 
/response/.hdinsight_creating_{}'.format(os.environ['exploratory_name']), 
shell=True, check=True)
+        #subprocess.run('rm 
/response/.hdinsight_creating_{}'.format(os.environ['exploratory_name']), 
shell=True, check=True)
         sys.exit(1)
diff --git 
a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py 
b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
index 2b448a1e0..b0d26cb0f 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
@@ -187,7 +187,8 @@ if __name__ == "__main__":
                              {"name": "zeppelin", "tag": "latest"},
                              {"name": "tensor", "tag": "latest"},
                              {"name": "deeplearning", "tag": "latest"},
-                             {"name": "dataengine", "tag": "latest"}]
+                             {"name": "dataengine", "tag": "latest"},
+                             {"name": "dataengine-service", "tag": "latest"}]
         params = "--hostname {} --keyfile {} --additional_config '{}' 
--os_family {} --os_user {} --datalab_path {} " \
                  "--cloud_provider {} --region 
{}".format(ssn_conf['instance_host'], ssn_conf['ssh_key_path'],
                                                           
json.dumps(additional_config), os.environ['conf_os_family'],


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to