turcsanyip commented on code in PR #8359:
URL: https://github.com/apache/nifi/pull/8359#discussion_r1500389209


##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/services/azure/storage/AzureDataLakeStorageFileResourceService.java:
##########
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.services.azure.storage;
+
+import com.azure.storage.file.datalake.DataLakeDirectoryClient;
+import com.azure.storage.file.datalake.DataLakeFileClient;
+import com.azure.storage.file.datalake.DataLakeFileSystemClient;
+import com.azure.storage.file.datalake.DataLakeServiceClient;
+import com.azure.storage.file.datalake.models.DataLakeStorageException;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.documentation.UseCase;
+import org.apache.nifi.annotation.lifecycle.OnDisabled;
+import org.apache.nifi.annotation.lifecycle.OnEnabled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.fileresource.service.api.FileResource;
+import org.apache.nifi.fileresource.service.api.FileResourceService;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.azure.storage.FetchAzureDataLakeStorage;
+import 
org.apache.nifi.processors.azure.storage.utils.DataLakeServiceClientFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.ADLS_CREDENTIALS_SERVICE;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.DIRECTORY;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.FILE;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.FILESYSTEM;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.getProxyOptions;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.validateDirectoryProperty;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.validateFileProperty;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.validateFileSystemProperty;
+
+@Tags({"azure", "microsoft", "cloud", "storage", "adlsgen2", "file", 
"resource", "datalake"})
+@SeeAlso({FetchAzureDataLakeStorage.class})
+@CapabilityDescription("Provides an Azure Data Lake Storage (ADLS) file 
resource for other components.")
+@UseCase(
+        description = "Fetch the specified file from Azure Data Lake Storage." 
+
+                " The service provides higher performance compared to fetch 
processors when the data should be moved between different storages without any 
transformation.",
+        configuration = """
+                "Filesystem Name" = "${azure.filesystem}"
+                "Directory Name" = "${azure.directory}"
+                "File Name" = "${azure.filename}"
+
+                The "ADLS Credentials" property should specify an instance of 
the ADLSCredentialsService in order to provide credentials for accessing the 
filesystem.
+                """
+)
+public class AzureDataLakeStorageFileResourceService extends 
AbstractControllerService implements FileResourceService {
+
+    private static final List<PropertyDescriptor> PROPERTIES = List.of(
+            ADLS_CREDENTIALS_SERVICE,
+            FILESYSTEM,
+            DIRECTORY,

Review Comment:
   Please add default values for these properties (`${azure.filesystem}` and 
`${azure.directory}`, respectively). The defaults should be the attributes 
emitted by `ListAzureDataLakeStorage` (as in case of `ListAzureBlobStorage_v12` 
and `AzureBlobStorageFileResourceService`).



##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/services/azure/storage/AzureBlobStorageFileResourceService.java:
##########
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.services.azure.storage;
+
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.models.BlobStorageException;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.documentation.UseCase;
+import org.apache.nifi.annotation.lifecycle.OnDisabled;
+import org.apache.nifi.annotation.lifecycle.OnEnabled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.fileresource.service.api.FileResource;
+import org.apache.nifi.fileresource.service.api.FileResourceService;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.azure.AbstractAzureBlobProcessor_v12;
+import org.apache.nifi.processors.azure.storage.FetchAzureBlobStorage_v12;
+import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
+import org.apache.nifi.processors.azure.storage.utils.BlobServiceClientFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.getProxyOptions;
+import static 
org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_BLOBNAME;
+import static 
org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_CONTAINER;
+import static org.apache.nifi.util.StringUtils.isBlank;
+
+@Tags({"azure", "microsoft", "cloud", "storage", "file", "resource", "blob"})
+@SeeAlso({FetchAzureBlobStorage_v12.class})
+@CapabilityDescription("Provides an Azure Blob Storage file resource for other 
components.")
+@UseCase(
+        description = "Fetch a specific file from Azure Blob Storage." +
+                " The service provides higher performance compared to fetch 
processors when the data should be moved between different storages without any 
transformation.",
+        configuration = """
+                "Container Name" = "${azure.container}"
+                "Blob Name" = "${azure.blobname}"
+
+                The "Storage Credentials" property should specify an instance 
of the AzureStorageCredentialsService_v12 in order to provide credentials for 
accessing the storage container.
+                """
+)
+public class AzureBlobStorageFileResourceService extends 
AbstractControllerService implements FileResourceService {
+
+    public static final PropertyDescriptor CONTAINER = new 
PropertyDescriptor.Builder()
+            .fromPropertyDescriptor(AzureStorageUtils.CONTAINER)
+            .defaultValue(String.format("${%s}", ATTR_NAME_CONTAINER))
+            .build();
+
+    public static final PropertyDescriptor BLOB_NAME = new 
PropertyDescriptor.Builder()
+            .fromPropertyDescriptor(AbstractAzureBlobProcessor_v12.BLOB_NAME)
+            .defaultValue(String.format("${%s}", ATTR_NAME_BLOBNAME))
+            .build();
+
+    public static final PropertyDescriptor STORAGE_CREDENTIALS_SERVICE = new 
PropertyDescriptor.Builder()
+            .name("storage-credentials-service")
+            .displayName("Storage Credentials")
+            .description("Controller Service used to obtain Azure Blob Storage 
Credentials.")
+            
.identifiesControllerService(AzureStorageCredentialsService_v12.class)
+            .required(true)
+            .build();
+
+    private static final List<PropertyDescriptor> PROPERTIES = List.of(
+            CONTAINER,
+            BLOB_NAME,
+            STORAGE_CREDENTIALS_SERVICE
+    );

Review Comment:
   Please move the credential service property to the top (similar to 
`AzureDataLakeStorageFileResourceService`).
   ```suggestion
       private static final List<PropertyDescriptor> PROPERTIES = List.of(
               STORAGE_CREDENTIALS_SERVICE,
               CONTAINER,
               BLOB_NAME
       );
   ```



##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/services/azure/storage/AzureDataLakeStorageFileResourceService.java:
##########
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.services.azure.storage;
+
+import com.azure.storage.file.datalake.DataLakeDirectoryClient;
+import com.azure.storage.file.datalake.DataLakeFileClient;
+import com.azure.storage.file.datalake.DataLakeFileSystemClient;
+import com.azure.storage.file.datalake.DataLakeServiceClient;
+import com.azure.storage.file.datalake.models.DataLakeStorageException;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.documentation.UseCase;
+import org.apache.nifi.annotation.lifecycle.OnDisabled;
+import org.apache.nifi.annotation.lifecycle.OnEnabled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.fileresource.service.api.FileResource;
+import org.apache.nifi.fileresource.service.api.FileResourceService;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.azure.storage.FetchAzureDataLakeStorage;
+import 
org.apache.nifi.processors.azure.storage.utils.DataLakeServiceClientFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.ADLS_CREDENTIALS_SERVICE;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.DIRECTORY;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.FILE;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.FILESYSTEM;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.getProxyOptions;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.validateDirectoryProperty;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.validateFileProperty;
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.validateFileSystemProperty;
+
+@Tags({"azure", "microsoft", "cloud", "storage", "adlsgen2", "file", 
"resource", "datalake"})
+@SeeAlso({FetchAzureDataLakeStorage.class})
+@CapabilityDescription("Provides an Azure Data Lake Storage (ADLS) file 
resource for other components.")
+@UseCase(
+        description = "Fetch the specified file from Azure Data Lake Storage." 
+
+                " The service provides higher performance compared to fetch 
processors when the data should be moved between different storages without any 
transformation.",
+        configuration = """
+                "Filesystem Name" = "${azure.filesystem}"
+                "Directory Name" = "${azure.directory}"
+                "File Name" = "${azure.filename}"
+
+                The "ADLS Credentials" property should specify an instance of 
the ADLSCredentialsService in order to provide credentials for accessing the 
filesystem.
+                """
+)
+public class AzureDataLakeStorageFileResourceService extends 
AbstractControllerService implements FileResourceService {
+
+    private static final List<PropertyDescriptor> PROPERTIES = List.of(
+            ADLS_CREDENTIALS_SERVICE,
+            FILESYSTEM,
+            DIRECTORY,
+            FILE
+    );
+
+    private volatile DataLakeServiceClientFactory clientFactory;
+    private volatile ConfigurationContext context;
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return PROPERTIES;
+    }
+
+    @OnEnabled
+    public void onEnabled(final ConfigurationContext context) {
+        this.clientFactory = new DataLakeServiceClientFactory(getLogger(), 
getProxyOptions(context));
+        this.context = context;
+    }
+
+    @OnDisabled
+    public void onDisabled() {
+        this.clientFactory = null;
+        this.context = null;
+    }
+
+    @Override
+    public FileResource getFileResource(Map<String, String> attributes) {
+        final DataLakeServiceClient client = getStorageClient(attributes);
+        try {
+            return fetchBlob(client, attributes);
+        } catch (final DataLakeStorageException | IOException e) {
+            throw new ProcessException("Failed to fetch file from ADLS 
Storage", e);
+        }
+    }
+
+    protected DataLakeServiceClient getStorageClient(Map<String, String> 
attributes) {
+        final ADLSCredentialsService credentialsService = 
context.getProperty(ADLS_CREDENTIALS_SERVICE)
+                .asControllerService(ADLSCredentialsService.class);
+        return 
clientFactory.getStorageClient(credentialsService.getCredentialsDetails(attributes));
+    }
+
+    /**
+     * Fetching file from the provided filesystem and directory in ADLS.
+     *
+     * @param storageClient azure data lake service client
+     * @param attributes configuration attributes
+     * @return fetched file as FileResource
+     * @throws IOException exception caused by missing parameters or blob not 
found
+     */
+    private FileResource fetchBlob(final DataLakeServiceClient storageClient, 
final Map<String, String> attributes) throws IOException {

Review Comment:
   The method should rather be called `fetchFile()` as it is referred to in its 
javadoc.



##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/utils/AzureStorageUtils.java:
##########
@@ -215,6 +258,67 @@ public static void validateProxySpec(ValidationContext 
context, Collection<Valid
         ProxyConfiguration.validateProxySpec(context, results, PROXY_SPECS);
     }
 
+    public static String validateFileSystemProperty(PropertyDescriptor 
propertyDescriptor, PropertyContext context, Map<String, String> attributeMap) {
+        final String fileSystem = 
getPropertyFromAttributeMap(propertyDescriptor, context, attributeMap);
+        return doValidateFileSystemValue(propertyDescriptor, fileSystem);
+    }
+
+    public static String validateFileSystemProperty(PropertyDescriptor 
propertyDescriptor, PropertyContext context, FlowFile flowFile) {
+        final String fileSystem = getPropertyFromFlowFile(propertyDescriptor, 
context, flowFile);
+        return doValidateFileSystemValue(propertyDescriptor, fileSystem);
+    }
+
+    public static String validateDirectoryProperty(PropertyDescriptor 
property, PropertyContext context, FlowFile flowFile) {
+        final String directory = getPropertyFromFlowFile(property, context, 
flowFile);
+        return doValidateDirectoryValue(property, directory);
+    }
+
+    public static String validateDirectoryProperty(PropertyDescriptor 
property, PropertyContext context, Map<String, String> attributes) {
+        final String directory = getPropertyFromAttributeMap(property, 
context, attributes);
+        return doValidateDirectoryValue(property, directory);
+    }
+
+    public static String validateFileProperty(PropertyContext context, 
FlowFile flowFile) {
+        final String fileName = getPropertyFromFlowFile(FILE, context, 
flowFile);
+        return doValidateFileValue(fileName);
+    }
+
+    public static String validateFileProperty(PropertyContext context, 
Map<String, String> attributes) {
+        final String fileName = getPropertyFromAttributeMap(FILE, context, 
attributes);
+        return doValidateFileValue(fileName);
+    }
+
+    private static String doValidateFileSystemValue(PropertyDescriptor 
property, String fileSystem) {
+        if (StringUtils.isBlank(fileSystem)) {
+            throw new ProcessException(String.format("'%1$s' property 
evaluated to blank string. '%s' must be specified as a non-blank string.", 
property.getDisplayName()));
+        }
+        return fileSystem;
+    }
+
+    private static String doValidateDirectoryValue(PropertyDescriptor 
property, String directory) {
+        if (directory.startsWith("/")) {
+            throw new ProcessException(String.format("'%1$s' starts with '/'. 
'%s' cannot contain a leading '/'.", property.getDisplayName()));
+        } else if (StringUtils.isNotEmpty(directory) && 
StringUtils.isWhitespace(directory)) {
+            throw new ProcessException(String.format("'%1$s' contains 
whitespace characters only.", property.getDisplayName()));
+        }
+        return directory;
+    }
+
+    private static String doValidateFileValue(String fileName) {
+        if (StringUtils.isBlank(fileName)) {
+            throw new ProcessException(String.format("'%1$s' property 
evaluated to blank string. '%s' must be specified as a non-blank string.", 
FILE.getDisplayName()));
+        }
+        return fileName;
+    }
+
+    private static String getPropertyFromFlowFile(PropertyDescriptor 
propertyDescriptor, PropertyContext context, FlowFile flowFile) {
+        return 
context.getProperty(propertyDescriptor).evaluateAttributeExpressions(flowFile).getValue();
+    }
+
+    private static String getPropertyFromAttributeMap(PropertyDescriptor 
propertyDescriptor, PropertyContext context, Map<String, String> attributeMap) {

Review Comment:
   ```suggestion
       private static String evaluateProperty(PropertyDescriptor 
propertyDescriptor, PropertyContext context, Map<String, String> attributeMap) {
   ```



##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/utils/AzureStorageUtils.java:
##########
@@ -54,6 +70,33 @@ public final class AzureStorageUtils {
             .defaultValue(AzureStorageCredentialsType.SAS_TOKEN)
             .build();
 
+    public static final PropertyDescriptor FILESYSTEM = new 
PropertyDescriptor.Builder()
+            .name("filesystem-name").displayName("Filesystem Name")
+            .description("Name of the Azure Storage File System (also called 
Container). It is assumed to be already existing.")
+            .addValidator(StandardValidators.NON_BLANK_VALIDATOR)
+            
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+            .required(true)
+            .build();
+
+    public static final PropertyDescriptor DIRECTORY = new 
PropertyDescriptor.Builder()
+            .name("directory-name")
+            .displayName("Directory Name")
+            .description("Name of the Azure Storage Directory. The Directory 
Name cannot contain a leading '/'. The root directory can be designated by the 
empty string value. " +
+                    "In case of the PutAzureDataLakeStorage processor, the 
directory will be created if not already existing.")
+            .addValidator(new 
AbstractAzureDataLakeStorageProcessor.DirectoryValidator())

Review Comment:
   I think `DirectoryValidator` should be moved here too or into an outer 
class. The util should not reference back the abstract class.



##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/utils/AzureStorageUtils.java:
##########
@@ -215,6 +258,67 @@ public static void validateProxySpec(ValidationContext 
context, Collection<Valid
         ProxyConfiguration.validateProxySpec(context, results, PROXY_SPECS);
     }
 
+    public static String validateFileSystemProperty(PropertyDescriptor 
propertyDescriptor, PropertyContext context, Map<String, String> attributeMap) {
+        final String fileSystem = 
getPropertyFromAttributeMap(propertyDescriptor, context, attributeMap);
+        return doValidateFileSystemValue(propertyDescriptor, fileSystem);
+    }
+
+    public static String validateFileSystemProperty(PropertyDescriptor 
propertyDescriptor, PropertyContext context, FlowFile flowFile) {
+        final String fileSystem = getPropertyFromFlowFile(propertyDescriptor, 
context, flowFile);
+        return doValidateFileSystemValue(propertyDescriptor, fileSystem);
+    }

Review Comment:
   In my opinion, the following would be less verbose in these methods. 
`getPropertyFromFlowFile()` is also not needed in this case.
   ```suggestion
       public static String validateFileSystemProperty(PropertyDescriptor 
propertyDescriptor, PropertyContext context, FlowFile flowFile) {
           return validateFileSystemProperty(propertyDescriptor, context, 
flowFile.getAttributes());
       }
   ```



##########
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/services/azure/storage/AzureBlobStorageFileResourceService.java:
##########
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.services.azure.storage;
+
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.models.BlobStorageException;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.SeeAlso;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.documentation.UseCase;
+import org.apache.nifi.annotation.lifecycle.OnDisabled;
+import org.apache.nifi.annotation.lifecycle.OnEnabled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.controller.AbstractControllerService;
+import org.apache.nifi.controller.ConfigurationContext;
+import org.apache.nifi.fileresource.service.api.FileResource;
+import org.apache.nifi.fileresource.service.api.FileResourceService;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.azure.AbstractAzureBlobProcessor_v12;
+import org.apache.nifi.processors.azure.storage.FetchAzureBlobStorage_v12;
+import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
+import org.apache.nifi.processors.azure.storage.utils.BlobServiceClientFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import static 
org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.getProxyOptions;
+import static 
org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_BLOBNAME;
+import static 
org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_CONTAINER;
+import static org.apache.nifi.util.StringUtils.isBlank;
+
+@Tags({"azure", "microsoft", "cloud", "storage", "file", "resource", "blob"})
+@SeeAlso({FetchAzureBlobStorage_v12.class})
+@CapabilityDescription("Provides an Azure Blob Storage file resource for other 
components.")
+@UseCase(
+        description = "Fetch a specific file from Azure Blob Storage." +
+                " The service provides higher performance compared to fetch 
processors when the data should be moved between different storages without any 
transformation.",
+        configuration = """
+                "Container Name" = "${azure.container}"
+                "Blob Name" = "${azure.blobname}"
+
+                The "Storage Credentials" property should specify an instance 
of the AzureStorageCredentialsService_v12 in order to provide credentials for 
accessing the storage container.
+                """
+)
+public class AzureBlobStorageFileResourceService extends 
AbstractControllerService implements FileResourceService {
+
+    public static final PropertyDescriptor CONTAINER = new 
PropertyDescriptor.Builder()
+            .fromPropertyDescriptor(AzureStorageUtils.CONTAINER)
+            .defaultValue(String.format("${%s}", ATTR_NAME_CONTAINER))
+            .build();
+
+    public static final PropertyDescriptor BLOB_NAME = new 
PropertyDescriptor.Builder()
+            .fromPropertyDescriptor(AbstractAzureBlobProcessor_v12.BLOB_NAME)
+            .defaultValue(String.format("${%s}", ATTR_NAME_BLOBNAME))
+            .build();
+
+    public static final PropertyDescriptor STORAGE_CREDENTIALS_SERVICE = new 
PropertyDescriptor.Builder()
+            .name("storage-credentials-service")
+            .displayName("Storage Credentials")
+            .description("Controller Service used to obtain Azure Blob Storage 
Credentials.")
+            
.identifiesControllerService(AzureStorageCredentialsService_v12.class)
+            .required(true)
+            .build();

Review Comment:
   This property is the same that is used in the processors and which is 
defined in `AbstractAzureBlobProcessor_v12`. The property could be moved to 
`AzureStorageUtils` and be used in every place.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@nifi.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to