This is an automated email from the ASF dual-hosted git repository.

omartushevskyi pushed a commit to branch DLAB-456
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git

commit 0858bba082403fab7644968aafb4d63bbcc61e77
Author: Oleh Martushevskyi <oleh_martushevs...@epam.com>
AuthorDate: Fri Apr 5 16:26:59 2019 +0300

    [DLAB-456]: fixed issue with header structure
---
 .../src/general/lib/aws/actions_lib.py             | 41 ++++++++++++----------
 .../src/general/lib/azure/actions_lib.py           | 41 ++++++++++++----------
 .../src/general/lib/gcp/actions_lib.py             | 41 ++++++++++++----------
 3 files changed, 69 insertions(+), 54 deletions(-)

diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index f6f37d0..642d495 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1588,21 +1588,23 @@ def configure_local_spark(jars_dir, templates_dir, 
memory_type='driver'):
             sudo('echo "spark.{0}.memory {1}m" >> 
/opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                
               spark_memory))
         if 'spark_configurations' in os.environ:
+            dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | 
grep "^#"')
             spark_configurations = 
ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
             spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
-                for config in spark_configurations:
-                    if config['Classification'] == 'spark-defaults':
-                        for property in config['Properties']:
-                            if property == param.split(' ')[0]:
-                                param = property + ' ' + 
config['Properties'][property]
-                            else:
-                                new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
-                new_spark_defaults.append(param)
+                if param.split(' ')[0] != '#':
+                    for config in spark_configurations:
+                        if config['Classification'] == 'spark-defaults':
+                            for property in config['Properties']:
+                                if property == param.split(' ')[0]:
+                                    param = property + ' ' + 
config['Properties'][property]
+                                else:
+                                    new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
+                    new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo('echo "" > /opt/spark/conf/spark-defaults.conf')
+            sudo("echo '{}' > 
/opt/spark/conf/spark-defaults.conf".format(dlab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
                 sudo('echo "{}" >> 
/opt/spark/conf/spark-defaults.conf'.format(prop))
@@ -1755,21 +1757,24 @@ def configure_dataengine_spark(cluster_name, jars_dir, 
cluster_dir, datalake_ena
     local('cp -f /tmp/{0}/notebook_spark-defaults_local.conf  
{1}spark/conf/spark-defaults.conf'.format(cluster_name,
                                                                                
                         cluster_dir))
     if spark_configs:
+        dlab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | 
grep "^#"'.format(cluster_name),
+                            capture=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
         spark_defaults = local('cat 
{0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
-            for config in spark_configurations:
-                if config['Classification'] == 'spark-defaults':
-                    for property in config['Properties']:
-                        if property == param.split(' ')[0]:
-                            param = property + ' ' + 
config['Properties'][property]
-                        else:
-                            new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
-            new_spark_defaults.append(param)
+            if param.split(' ')[0] != '#':
+                for config in spark_configurations:
+                    if config['Classification'] == 'spark-defaults':
+                        for property in config['Properties']:
+                            if property == param.split(' ')[0]:
+                                param = property + ' ' + 
config['Properties'][property]
+                            else:
+                                new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
+                new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        local('echo "" > 
{0}/spark/conf/spark-defaults.conf'.format(cluster_dir))
+        local("echo '{0}' > 
{1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_dir))
         for prop in new_spark_defaults:
             prop = prop.rstrip()
             local('echo "{0}" >> 
{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir))
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index a8f8774..a571a28 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1123,21 +1123,23 @@ def configure_local_spark(jars_dir, templates_dir, 
memory_type='driver'):
             sudo('echo "spark.{0}.memory {1}m" >> 
/opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                
               spark_memory))
         if 'spark_configurations' in os.environ:
+            dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | 
grep "^#"')
             spark_configurations = 
ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
             spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
-                for config in spark_configurations:
-                    if config['Classification'] == 'spark-defaults':
-                        for property in config['Properties']:
-                            if property == param.split(' ')[0]:
-                                param = property + ' ' + 
config['Properties'][property]
-                            else:
-                                new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
-                new_spark_defaults.append(param)
+                if param.split(' ')[0] != '#':
+                    for config in spark_configurations:
+                        if config['Classification'] == 'spark-defaults':
+                            for property in config['Properties']:
+                                if property == param.split(' ')[0]:
+                                    param = property + ' ' + 
config['Properties'][property]
+                                else:
+                                    new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
+                    new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo('echo "" > /opt/spark/conf/spark-defaults.conf')
+            sudo("echo '{}' > 
/opt/spark/conf/spark-defaults.conf".format(dlab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
                 sudo('echo "{}" >> 
/opt/spark/conf/spark-defaults.conf'.format(prop))
@@ -1166,21 +1168,24 @@ def configure_dataengine_spark(cluster_name, jars_dir, 
cluster_dir, datalake_ena
     else:
         local('cp -f /opt/hadoop/etc/hadoop/core-site.xml 
{}hadoop/etc/hadoop/core-site.xml'.format(cluster_dir))
     if spark_configs:
+        dlab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | 
grep "^#"'.format(cluster_name),
+                            capture=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
         spark_defaults = local('cat 
{0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
-            for config in spark_configurations:
-                if config['Classification'] == 'spark-defaults':
-                    for property in config['Properties']:
-                        if property == param.split(' ')[0]:
-                            param = property + ' ' + 
config['Properties'][property]
-                        else:
-                            new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
-            new_spark_defaults.append(param)
+            if param.split(' ')[0] != '#':
+                for config in spark_configurations:
+                    if config['Classification'] == 'spark-defaults':
+                        for property in config['Properties']:
+                            if property == param.split(' ')[0]:
+                                param = property + ' ' + 
config['Properties'][property]
+                            else:
+                                new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
+                new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        local('echo "" > 
{0}/spark/conf/spark-defaults.conf'.format(cluster_dir))
+        local("echo '{0}' > 
{1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_dir))
         for prop in new_spark_defaults:
             prop = prop.rstrip()
             local('echo "{0}" >> 
{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir))
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index 5de8aa5..f32bae9 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -1284,21 +1284,23 @@ def configure_local_spark(jars_dir, templates_dir, 
memory_type='driver'):
             sudo('echo "spark.{0}.memory {1}m" >> 
/opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                
               spark_memory))
         if 'spark_configurations' in os.environ:
+            dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | 
grep "^#"')
             spark_configurations = 
ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
             spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
-                for config in spark_configurations:
-                    if config['Classification'] == 'spark-defaults':
-                        for property in config['Properties']:
-                            if property == param.split(' ')[0]:
-                                param = property + ' ' + 
config['Properties'][property]
-                            else:
-                                new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
-                new_spark_defaults.append(param)
+                if param.split(' ')[0] != '#':
+                    for config in spark_configurations:
+                        if config['Classification'] == 'spark-defaults':
+                            for property in config['Properties']:
+                                if property == param.split(' ')[0]:
+                                    param = property + ' ' + 
config['Properties'][property]
+                                else:
+                                    new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
+                    new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo('echo "" > /opt/spark/conf/spark-defaults.conf')
+            sudo("echo '{}' > 
/opt/spark/conf/spark-defaults.conf".format(dlab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
                 sudo('echo "{}" >> 
/opt/spark/conf/spark-defaults.conf'.format(prop))
@@ -1392,21 +1394,24 @@ def configure_dataengine_spark(cluster_name, jars_dir, 
cluster_dir, datalake_ena
                                                                                
                         cluster_dir))
     local('cp -f /opt/spark/conf/core-site.xml 
{}spark/conf/'.format(cluster_dir))
     if spark_configs:
+        dlab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | 
grep "^#"'.format(cluster_name),
+                            capture=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
         spark_defaults = local('cat 
{0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
-            for config in spark_configurations:
-                if config['Classification'] == 'spark-defaults':
-                    for property in config['Properties']:
-                        if property == param.split(' ')[0]:
-                            param = property + ' ' + 
config['Properties'][property]
-                        else:
-                            new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
-            new_spark_defaults.append(param)
+            if param.split(' ')[0] != '#':
+                for config in spark_configurations:
+                    if config['Classification'] == 'spark-defaults':
+                        for property in config['Properties']:
+                            if property == param.split(' ')[0]:
+                                param = property + ' ' + 
config['Properties'][property]
+                            else:
+                                new_spark_defaults.append(property + ' ' + 
config['Properties'][property])
+                new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        local('echo "" > 
{0}/spark/conf/spark-defaults.conf'.format(cluster_dir))
+        local("echo '{0}' > 
{1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_dir))
         for prop in new_spark_defaults:
             prop = prop.rstrip()
             local('echo "{0}" >> 
{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@dlab.apache.org
For additional commands, e-mail: commits-h...@dlab.apache.org

Reply via email to