This is an automated email from the ASF dual-hosted git repository.

dyankiv pushed a commit to branch DATALAB-2645
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 1ae6687cdd1a946b39c0afa2fb8870de7b1c2780
Author: Denys Yankiv <[email protected]>
AuthorDate: Tue Jun 7 15:47:02 2022 +0300

    inital support for hdinsight
---
 .../computational/AzureComputationalResource.java  |  50 ++++++++++
 .../computational/ComputationalConfigAzure.java    |   5 +
 .../computational/ComputationalCreateAzure.java    |  19 ++++
 .../computational/UserComputationalResource.java   |   2 +
 .../azure/ComputationalResourceAzure.java          | 108 ++++++++++++++++++++-
 .../conf/SelfServiceApplicationConfiguration.java  |  17 ++++
 .../azure/ComputationalResourceAzure.java          |  64 +++++++++++-
 .../dto/azure/AzureComputationalCreateForm.java    |  32 ++++++
 .../dto/azure/AzureHDInsightConfiguration.java     |  19 ++++
 .../impl/InfrastructureTemplateServiceImpl.java    |  12 +++
 10 files changed, 326 insertions(+), 2 deletions(-)

diff --git 
a/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/AzureComputationalResource.java
 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/AzureComputationalResource.java
new file mode 100644
index 000000000..71a8abe04
--- /dev/null
+++ 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/AzureComputationalResource.java
@@ -0,0 +1,50 @@
+package com.epam.datalab.dto.azure.computational;
+
+import com.epam.datalab.dto.ResourceURL;
+import com.epam.datalab.dto.SchedulerJobDTO;
+import com.epam.datalab.dto.aws.computational.ClusterConfig;
+import com.epam.datalab.dto.computational.UserComputationalResource;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.Builder;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.ToString;
+
+import java.time.LocalDateTime;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+@ToString(callSuper = true)
+@Getter
+@EqualsAndHashCode(callSuper = true)
+public class AzureComputationalResource extends UserComputationalResource {
+    @JsonProperty("instance_id")
+    private final String instanceId;
+    @JsonProperty("master_node_shape")
+    private final String masterShape;
+    @JsonProperty("slave_node_shape")
+    private final String slaveShape;
+    @JsonProperty("hdinsight_version")
+    private final String version;
+
+
+    @Builder
+    public AzureComputationalResource(String computationalName, String 
computationalId, String imageName,
+                                    String templateName, String status, Date 
uptime,
+                                    SchedulerJobDTO schedulerJobData, boolean 
reuploadKeyRequired,
+                                    String instanceId, String masterShape, 
String slaveShape, String version,
+                                    List<ResourceURL> resourceURL, 
LocalDateTime lastActivity,
+                                    List<ClusterConfig> config, Map<String, 
String> tags, int totalInstanceCount) {
+        super(computationalName, computationalId, imageName, templateName, 
status, uptime, schedulerJobData,
+                reuploadKeyRequired, resourceURL, lastActivity, tags, 
totalInstanceCount);
+        this.instanceId = instanceId;
+        this.masterShape = masterShape;
+        this.slaveShape = slaveShape;
+//        this.slaveSpot = slaveSpot;
+//        this.slaveSpotPctPrice = slaveSpotPctPrice;
+//        this.slaveNumber = slaveNumber;
+        this.version = version;
+        this.config = config;
+    }
+}
diff --git 
a/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/ComputationalConfigAzure.java
 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/ComputationalConfigAzure.java
new file mode 100644
index 000000000..e28d83097
--- /dev/null
+++ 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/ComputationalConfigAzure.java
@@ -0,0 +1,5 @@
+package com.epam.datalab.dto.azure.computational;
+
+public class ComputationalConfigAzure {
+    private String hdinsightversion;
+}
diff --git 
a/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/ComputationalCreateAzure.java
 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/ComputationalCreateAzure.java
new file mode 100644
index 000000000..16449bb93
--- /dev/null
+++ 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/azure/computational/ComputationalCreateAzure.java
@@ -0,0 +1,19 @@
+package com.epam.datalab.dto.azure.computational;
+
+import com.epam.datalab.dto.base.computational.ComputationalBase;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class ComputationalCreateAzure extends 
ComputationalBase<ComputationalCreateAzure> {
+    @JsonProperty("hdinsight_count")
+    private String count;
+    @JsonProperty("hdinsight_slave_count")
+    private String slaveInstanceCount;
+    @JsonProperty("hdinsight_master_instance_type")
+    private String masterInstanceType;
+    @JsonProperty("hdinsight_slave_instance_type")
+    private String slaveInstanceType;
+    @JsonProperty("hdinsight_version")
+    private String version;
+    @JsonProperty("conf_shared_image_enabled")
+    private String sharedImageEnabled;
+}
diff --git 
a/services/datalab-model/src/main/java/com/epam/datalab/dto/computational/UserComputationalResource.java
 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/computational/UserComputationalResource.java
index dbb35e22b..17ac694e5 100644
--- 
a/services/datalab-model/src/main/java/com/epam/datalab/dto/computational/UserComputationalResource.java
+++ 
b/services/datalab-model/src/main/java/com/epam/datalab/dto/computational/UserComputationalResource.java
@@ -71,6 +71,8 @@ public class UserComputationalResource {
     private String gcpClusterVersion;
     @JsonProperty("emr_version")
     private String awsClusterVersion;
+    @JsonProperty("hdinsight_version")
+    private String azureClusterVersion;
     private int totalInstanceCount;
     protected List<ClusterConfig> config;
     private Map<String, String> tags;
diff --git 
a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
 
b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
index effed2dfb..5f8bc5676 100644
--- 
a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
+++ 
b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
@@ -20,13 +20,27 @@
 package com.epam.datalab.backendapi.resources.azure;
 
 import com.epam.datalab.auth.UserInfo;
+import com.epam.datalab.backendapi.core.Directories;
+import com.epam.datalab.backendapi.core.FileHandlerCallback;
+import com.epam.datalab.backendapi.core.commands.DockerAction;
+import com.epam.datalab.backendapi.core.commands.DockerCommands;
+import com.epam.datalab.backendapi.core.commands.RunDockerCommand;
+import 
com.epam.datalab.backendapi.core.response.handlers.ComputationalCallbackHandler;
+import 
com.epam.datalab.backendapi.core.response.handlers.ComputationalConfigure;
+import com.epam.datalab.backendapi.service.impl.DockerService;
 import com.epam.datalab.backendapi.service.impl.SparkClusterService;
+import com.epam.datalab.dto.azure.computational.ComputationalCreateAzure;
 import com.epam.datalab.dto.azure.computational.SparkComputationalCreateAzure;
+import com.epam.datalab.dto.base.DataEngineType;
+import com.epam.datalab.dto.base.computational.ComputationalBase;
 import com.epam.datalab.dto.computational.ComputationalClusterConfigDTO;
 import com.epam.datalab.dto.computational.ComputationalStartDTO;
 import com.epam.datalab.dto.computational.ComputationalStopDTO;
 import com.epam.datalab.dto.computational.ComputationalTerminateDTO;
+import com.epam.datalab.dto.gcp.computational.GcpComputationalTerminateDTO;
+import com.epam.datalab.exceptions.DatalabException;
 import com.epam.datalab.rest.contracts.ComputationalAPI;
+import com.fasterxml.jackson.core.JsonProcessingException;
 import com.google.inject.Inject;
 import io.dropwizard.auth.Auth;
 import lombok.extern.slf4j.Slf4j;
@@ -37,15 +51,94 @@ import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 
+import static com.epam.datalab.backendapi.core.commands.DockerAction.CREATE;
+import static com.epam.datalab.backendapi.core.commands.DockerAction.TERMINATE;
+
 @Path("/")
 @Consumes(MediaType.APPLICATION_JSON)
 @Produces(MediaType.APPLICATION_JSON)
 @Slf4j
-public class ComputationalResourceAzure implements ComputationalAPI {
+//public class ComputationalResourceAzure implements ComputationalAPI {
+public class ComputationalResourceAzure extends DockerService implements 
DockerCommands {
+
+    @Inject
+    private ComputationalConfigure computationalConfigure;
 
     @Inject
     private SparkClusterService sparkClusterService;
 
+    @POST
+    @Path(ComputationalAPI.COMPUTATIONAL_CREATE_CLOUD_SPECIFIC)
+    public String create(@Auth UserInfo ui, ComputationalCreateAzure dto) {
+        log.debug("Create computational resources {} for user {}: {}", 
dto.getComputationalName(), ui.getName(), dto);
+        String uuid = DockerCommands.generateUUID();
+        folderListenerExecutor.start(configuration.getImagesDirectory(),
+                configuration.getResourceStatusPollTimeout(),
+                getFileHandlerCallback(CREATE, uuid, dto));
+        try {
+            commandExecutor.executeAsync(
+                    ui.getName(),
+                    uuid,
+                    commandBuilder.buildCommand(
+                            new RunDockerCommand()
+                                    .withInteractive()
+                                    
.withName(nameContainer(dto.getEdgeUserName(), CREATE,
+                                            dto.getExploratoryName(), 
dto.getComputationalName()))
+                                    
.withVolumeForRootKeys(configuration.getKeyDirectory())
+                                    
.withVolumeForResponse(configuration.getImagesDirectory())
+                                    
.withVolumeForLog(configuration.getDockerLogDirectory(),
+                                            
DataEngineType.CLOUD_SERVICE.getName())
+                                    
.withResource(DataEngineType.CLOUD_SERVICE.getName())
+                                    .withRequestId(uuid)
+                                    
.withConfKeyName(configuration.getAdminKey())
+                                    
.withActionCreate(DataEngineType.getDockerImageName(DataEngineType.CLOUD_SERVICE)),
+                            dto
+                    )
+            );
+        } catch (Exception t) {
+            throw new DatalabException("Could not create computational 
resource cluster", t);
+        }
+        return uuid;
+    }
+
+    @POST
+    @Path(ComputationalAPI.COMPUTATIONAL_TERMINATE_CLOUD_SPECIFIC)
+    public String terminate(@Auth UserInfo ui, GcpComputationalTerminateDTO 
dto) {
+
+        log.debug("Terminate computational resources {} for user {}: {}", 
dto.getComputationalName(), ui.getName(),
+                dto);
+        String uuid = DockerCommands.generateUUID();
+        folderListenerExecutor.start(configuration.getImagesDirectory(),
+                configuration.getResourceStatusPollTimeout(),
+                getFileHandlerCallback(TERMINATE, uuid, dto));
+        try {
+            commandExecutor.executeAsync(
+                    ui.getName(),
+                    uuid,
+                    commandBuilder.buildCommand(
+                            new RunDockerCommand()
+                                    .withInteractive()
+                                    
.withName(nameContainer(dto.getEdgeUserName(), TERMINATE,
+                                            dto.getExploratoryName(), 
dto.getComputationalName()))
+                                    
.withVolumeForRootKeys(configuration.getKeyDirectory())
+                                    
.withVolumeForResponse(configuration.getImagesDirectory())
+                                    
.withVolumeForLog(configuration.getDockerLogDirectory(), DataEngineType
+                                            .CLOUD_SERVICE.getName())
+                                    
.withResource(DataEngineType.CLOUD_SERVICE.getName())
+                                    .withRequestId(uuid)
+                                    
.withConfKeyName(configuration.getAdminKey())
+                                    
.withActionTerminate(DataEngineType.getDockerImageName(DataEngineType
+                                            .CLOUD_SERVICE)),
+                            dto
+                    )
+            );
+        } catch (JsonProcessingException t) {
+            throw new DatalabException("Could not terminate computational 
resources cluster", t);
+        }
+
+        return uuid;
+    }
+
     @POST
     @Path(ComputationalAPI.COMPUTATIONAL_CREATE_SPARK)
     public String create(@Auth UserInfo ui, SparkComputationalCreateAzure dto) 
{
@@ -91,4 +184,17 @@ public class ComputationalResourceAzure implements 
ComputationalAPI {
         return sparkClusterService.updateConfig(ui, config);
     }
 
+    private FileHandlerCallback getFileHandlerCallback(DockerAction action, 
String uuid, ComputationalBase<?> dto){
+        return new ComputationalCallbackHandler(computationalConfigure, 
selfService, action, uuid, dto);
+    }
+
+    private String nameContainer(String user, DockerAction action, String 
exploratoryName, String name) {
+        return nameContainer(user, action.toString(), "computational", 
exploratoryName, name);
+    }
+
+    @Override
+    public String getResourceType() {
+        return Directories.DATA_ENGINE_SERVICE_LOG_DIRECTORY;
+    }
+
 }
diff --git 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/conf/SelfServiceApplicationConfiguration.java
 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/conf/SelfServiceApplicationConfiguration.java
index 66ccb7224..6488c2cfd 100644
--- 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/conf/SelfServiceApplicationConfiguration.java
+++ 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/conf/SelfServiceApplicationConfiguration.java
@@ -55,6 +55,15 @@ public class SelfServiceApplicationConfiguration extends 
ServiceConfiguration {
     @JsonProperty
     private int maxEmrSpotInstanceBidPct;
 
+
+    @Min(value = 2)
+    @JsonProperty
+    private int minHDInsightInstanceCount;
+
+    @Max(value = 1000)
+    @JsonProperty
+    private int maxHDInsightInstanceCount;
+
     @Min(value = 2)
     @JsonProperty
     private int minSparkInstanceCount;
@@ -180,6 +189,14 @@ public class SelfServiceApplicationConfiguration extends 
ServiceConfiguration {
         return maxEmrSpotInstanceBidPct;
     }
 
+    public int getMinHDInsightInstanceCount() {
+        return minHDInsightInstanceCount;
+    }
+
+    public int getMaxHDInsightInstanceCount() {
+        return maxHDInsightInstanceCount;
+    }
+
     public int getMinSparkInstanceCount() {
         return minSparkInstanceCount;
     }
diff --git 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
index f1582f23e..ad7886032 100644
--- 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
+++ 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/azure/ComputationalResourceAzure.java
@@ -20,17 +20,22 @@
 package com.epam.datalab.backendapi.resources.azure;
 
 import com.epam.datalab.auth.UserInfo;
+import com.epam.datalab.backendapi.conf.SelfServiceApplicationConfiguration;
 import 
com.epam.datalab.backendapi.resources.dto.SparkStandaloneClusterCreateForm;
+import 
com.epam.datalab.backendapi.resources.dto.azure.AzureComputationalCreateForm;
 import com.epam.datalab.backendapi.roles.RoleType;
 import com.epam.datalab.backendapi.roles.UserRoles;
 import com.epam.datalab.backendapi.service.ComputationalService;
 import com.epam.datalab.dto.aws.computational.ClusterConfig;
+import com.epam.datalab.dto.azure.computational.AzureComputationalResource;
+import com.epam.datalab.dto.base.DataEngineType;
 import com.epam.datalab.exceptions.DatalabException;
 import com.google.inject.Inject;
 import io.dropwizard.auth.Auth;
 import io.swagger.v3.oas.annotations.Parameter;
 import lombok.extern.slf4j.Slf4j;
 
+import javax.management.relation.Role;
 import javax.validation.Valid;
 import javax.validation.constraints.NotNull;
 import javax.ws.rs.Consumes;
@@ -44,6 +49,7 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.util.List;
 
+import static com.epam.datalab.dto.UserInstanceStatus.CREATING;
 import static 
com.epam.datalab.rest.contracts.ComputationalAPI.AUDIT_COMPUTATIONAL_RECONFIGURE_MESSAGE;
 import static com.epam.datalab.rest.contracts.ComputationalAPI.AUDIT_MESSAGE;
 
@@ -55,11 +61,16 @@ import static 
com.epam.datalab.rest.contracts.ComputationalAPI.AUDIT_MESSAGE;
 @Produces(MediaType.APPLICATION_JSON)
 @Slf4j
 public class ComputationalResourceAzure {
+
+    private final SelfServiceApplicationConfiguration configuration;
+
     private final ComputationalService computationalService;
 
     @Inject
-    public ComputationalResourceAzure(ComputationalService 
computationalService) {
+    public ComputationalResourceAzure(ComputationalService 
computationalService,
+                                      SelfServiceApplicationConfiguration 
configuration) {
         this.computationalService = computationalService;
+        this.configuration = configuration;
     }
 
     @GET
@@ -69,6 +80,37 @@ public class ComputationalResourceAzure {
         return 
Response.ok(computationalService.getComputationalNamesAndTemplates(userInfo, 
project, endpoint)).build();
     }
 
+    @PUT
+    @Path("dataengine-service")
+    public Response createDataEngineService(@Auth @Parameter(hidden = true) 
UserInfo userInfo,
+                                            @Parameter @Valid @NotNull 
AzureComputationalCreateForm form) {
+        log.debug("Create computational resources for {} | form is {}", 
userInfo.getName(), form);
+
+        if (DataEngineType.CLOUD_SERVICE == 
DataEngineType.fromDockerImageName(form.getImage())) {
+
+            validate(userInfo, form);
+
+            AzureComputationalResource azureComputationalResource = 
AzureComputationalResource.builder()
+                    .computationalName(form.getName())
+                    .imageName(form.getImage())
+                    .templateName(form.getTemplateName())
+                    .status(CREATING.toString())
+                    .masterShape(form.getMasterInstanceType())
+                    .slaveShape(form.getSlaveInstanceType())
+                    .config(form.getConfig())
+                    .version(form.getVersion())
+                    
//.totalInstanceCount(Integer.parseInt(form.getInstanceCount()))
+                    .build();
+
+            boolean resourceAdded = 
computationalService.createDataEngineService(userInfo, form.getName(), form, 
azureComputationalResource
+                    , form.getProject(), getAuditInfo(form.getNotebookName()));
+            return resourceAdded ? Response.ok().build() : 
Response.status(Response.Status.FOUND).build();
+        }
+
+        throw new IllegalArgumentException("Malformed image " + 
form.getImage());
+    }
+
+
     /**
      * Asynchronously creates computational Spark cluster.
      *
@@ -175,6 +217,26 @@ public class ComputationalResourceAzure {
         return Response.ok(computationalService.getClusterConfig(userInfo, 
projectName, exploratoryName, computationalName)).build();
     }
 
+    private void validate(UserInfo userInfo, AzureComputationalCreateForm 
formDTO) {
+        if (!UserRoles.checkAccess(userInfo, RoleType.COMPUTATIONAL, 
formDTO.getImage(), userInfo.getRoles())) {
+            log.warn("Unauthorized attempt to create a {} by user {}", 
formDTO.getImage(), userInfo.getName());
+            throw new DatalabException("You do not have the privileges to 
create a " + formDTO.getTemplateName());
+        }
+
+        int slaveInstanceCount = 
Integer.parseInt(formDTO.getSlaveInstanceCount());
+        if (slaveInstanceCount < configuration.getMinHDInsightInstanceCount() 
|| slaveInstanceCount > configuration.getMaxHDInsightInstanceCount()) {
+
+            log.debug("Creating computational resource {} for user {} fail: 
Limit exceeded to creation slave " +
+                            "instances. Minimum is {}, maximum is {}",
+                    formDTO.getName(), userInfo.getName(), 
configuration.getMinHDInsightInstanceCount(),
+                    configuration.getMaxHDInsightInstanceCount());
+            throw new DatalabException("Limit exceeded to creation slave 
instances. Minimum is " +
+                    configuration.getMinHDInsightInstanceCount() + ", maximum 
is " + configuration.getMaxHDInsightInstanceCount() +
+                    ".");
+        }
+
+    }
+
     private String getAuditInfo(String exploratoryName) {
         return String.format(AUDIT_MESSAGE, exploratoryName);
     }
diff --git 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/dto/azure/AzureComputationalCreateForm.java
 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/dto/azure/AzureComputationalCreateForm.java
new file mode 100644
index 000000000..c65fcf938
--- /dev/null
+++ 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/dto/azure/AzureComputationalCreateForm.java
@@ -0,0 +1,32 @@
+package com.epam.datalab.backendapi.resources.dto.azure;
+
+import com.epam.datalab.backendapi.resources.dto.ComputationalCreateFormDTO;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.ToString;
+import org.hibernate.validator.constraints.NotBlank;
+
+@Data
+@EqualsAndHashCode(callSuper = true)
+@ToString(callSuper = true)
+@JsonIgnoreProperties
+public class AzureComputationalCreateForm extends ComputationalCreateFormDTO {
+
+    @NotBlank
+    @JsonProperty("hdinsight_master_instance_type")
+    private String masterInstanceType;
+    @NotBlank
+    @JsonProperty("hdinsight_slave_instance_type")
+    private String slaveInstanceType;
+    @NotBlank
+    @JsonProperty("hdinsight_version")
+    private String version;
+
+    @NotBlank
+    @JsonProperty("hdinsight_slave_instance_count")
+    private String slaveInstanceCount;
+
+
+}
diff --git 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/dto/azure/AzureHDInsightConfiguration.java
 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/dto/azure/AzureHDInsightConfiguration.java
new file mode 100644
index 000000000..ee854adaf
--- /dev/null
+++ 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/dto/azure/AzureHDInsightConfiguration.java
@@ -0,0 +1,19 @@
+package com.epam.datalab.backendapi.resources.dto.azure;
+
+import com.epam.datalab.backendapi.resources.dto.ComputationalCreateFormDTO;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.Builder;
+import lombok.Data;
+import org.hibernate.validator.constraints.NotBlank;
+
+@Data
+@Builder
+public class AzureHDInsightConfiguration  {
+    @NotBlank
+    @JsonProperty("min_hdinsight_instance_count")
+    private int minHdinsightInstanceCount;
+    @NotBlank
+    @JsonProperty("max_hdinsight_instance_count")
+    private int maxHdinsightInstanceCount;
+
+}
diff --git 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureTemplateServiceImpl.java
 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureTemplateServiceImpl.java
index da13cf1cb..3e2fcd33c 100644
--- 
a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureTemplateServiceImpl.java
+++ 
b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureTemplateServiceImpl.java
@@ -28,6 +28,7 @@ import com.epam.datalab.backendapi.dao.UserGroupDAO;
 import com.epam.datalab.backendapi.domain.EndpointDTO;
 import com.epam.datalab.backendapi.resources.dto.SparkStandaloneConfiguration;
 import com.epam.datalab.backendapi.resources.dto.aws.AwsEmrConfiguration;
+import 
com.epam.datalab.backendapi.resources.dto.azure.AzureHDInsightConfiguration;
 import com.epam.datalab.backendapi.resources.dto.gcp.GcpDataprocConfiguration;
 import com.epam.datalab.backendapi.roles.RoleType;
 import com.epam.datalab.backendapi.roles.UserRoles;
@@ -250,6 +251,17 @@ public class InfrastructureTemplateServiceImpl implements 
InfrastructureTemplate
         }
     }
 
+    private static class AzureFullComputationalTemplate extends 
FullComputationalTemplate {
+        @JsonProperty("limits")
+        private AzureHDInsightConfiguration azureHDInsightConfiguration;
+
+        AzureFullComputationalTemplate(ComputationalMetadataDTO metadataDTO,
+                                       AzureHDInsightConfiguration 
azureHDInsightConfiguration){
+            super(metadataDTO);
+            this.azureHDInsightConfiguration = azureHDInsightConfiguration;
+        }
+    }
+
     private static class GcpFullComputationalTemplate extends 
FullComputationalTemplate {
         @JsonProperty("limits")
         private GcpDataprocConfiguration gcpDataprocConfiguration;


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to