[ 
https://issues.apache.org/jira/browse/NIFI-1833?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15990926#comment-15990926
 ] 

ASF GitHub Bot commented on NIFI-1833:
--------------------------------------

Github user brosander commented on a diff in the pull request:

    https://github.com/apache/nifi/pull/1719#discussion_r114136630
  
    --- Diff: 
nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/storage/ListAzureBlobStorage.java
 ---
    @@ -0,0 +1,180 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.nifi.processors.azure.storage;
    +
    +import java.io.IOException;
    +import java.net.URISyntaxException;
    +import java.security.InvalidKeyException;
    +import java.util.ArrayList;
    +import java.util.Arrays;
    +import java.util.Collections;
    +import java.util.EnumSet;
    +import java.util.HashMap;
    +import java.util.List;
    +import java.util.Map;
    +
    +import org.apache.nifi.annotation.behavior.InputRequirement;
    +import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
    +import org.apache.nifi.annotation.behavior.Stateful;
    +import org.apache.nifi.annotation.behavior.TriggerSerially;
    +import org.apache.nifi.annotation.behavior.WritesAttribute;
    +import org.apache.nifi.annotation.behavior.WritesAttributes;
    +import org.apache.nifi.annotation.documentation.CapabilityDescription;
    +import org.apache.nifi.annotation.documentation.SeeAlso;
    +import org.apache.nifi.annotation.documentation.Tags;
    +import org.apache.nifi.components.PropertyDescriptor;
    +import org.apache.nifi.components.state.Scope;
    +import org.apache.nifi.processor.ProcessContext;
    +import org.apache.nifi.processor.util.StandardValidators;
    +import org.apache.nifi.processors.azure.AzureConstants;
    +import org.apache.nifi.processors.azure.storage.utils.BlobInfo;
    +import org.apache.nifi.processors.azure.storage.utils.BlobInfo.Builder;
    +import org.apache.nifi.processor.util.list.AbstractListProcessor;
    +
    +import com.microsoft.azure.storage.CloudStorageAccount;
    +import com.microsoft.azure.storage.StorageException;
    +import com.microsoft.azure.storage.StorageUri;
    +import com.microsoft.azure.storage.blob.BlobListingDetails;
    +import com.microsoft.azure.storage.blob.BlobProperties;
    +import com.microsoft.azure.storage.blob.CloudBlob;
    +import com.microsoft.azure.storage.blob.CloudBlobClient;
    +import com.microsoft.azure.storage.blob.CloudBlobContainer;
    +import com.microsoft.azure.storage.blob.CloudBlockBlob;
    +import com.microsoft.azure.storage.blob.ListBlobItem;
    +
    +@TriggerSerially
    +@Tags({ "azure", "microsoft", "cloud", "storage", "blob" })
    +@SeeAlso({ FetchAzureBlobStorage.class, PutAzureBlobStorage.class })
    +@CapabilityDescription("Lists blobs in an Azure Storage container. Listing 
details are attached to an empty FlowFile for use with FetchAzureBlobStorage")
    +@InputRequirement(Requirement.INPUT_FORBIDDEN)
    +@WritesAttributes({ @WritesAttribute(attribute = "azure.container", 
description = "The name of the Azure container"),
    +        @WritesAttribute(attribute = "azure.blobname", description = "The 
name of the Azure blob"),
    +        @WritesAttribute(attribute = "azure.primaryUri", description = 
"Primary location for blob content"),
    +        @WritesAttribute(attribute = "azure.secondaryUri", description = 
"Secondary location for blob content"),
    +        @WritesAttribute(attribute = "azure.etag", description = "Etag for 
the Azure blob"),
    +        @WritesAttribute(attribute = "azure.length", description = "Length 
of the blob"),
    +        @WritesAttribute(attribute = "azure.timestamp", description = "The 
timestamp in Azure for the blob"),
    +        @WritesAttribute(attribute = "mime.type", description = "MimeType 
of the content"),
    +        @WritesAttribute(attribute = "lang", description = "Language code 
for the content"),
    +        @WritesAttribute(attribute = "azure.blobtype", description = "This 
is the type of blob and can be either page or block type") })
    +@Stateful(scopes = { Scope.LOCAL, Scope.CLUSTER }, description = "After 
performing a listing of blobs, the timestamp of the newest blob is stored. "
    +        + "This allows the Processor to list only blobs that have been 
added or modified after " + "this date the next time that the Processor is 
run.")
    +public class ListAzureBlobStorage extends AbstractListProcessor<BlobInfo> {
    +
    +    private static final PropertyDescriptor PREFIX = new 
PropertyDescriptor.Builder().name("Prefix").description("Search prefix for 
listing").addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
    +            .expressionLanguageSupported(true).required(false).build();
    +
    +    private static final List<PropertyDescriptor> PROPERTIES = 
Collections.unmodifiableList(Arrays.asList(AzureConstants.ACCOUNT_NAME, 
AzureConstants.ACCOUNT_KEY, AzureConstants.CONTAINER, PREFIX));
    +
    +    @Override
    +    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
    +        return PROPERTIES;
    +    }
    +
    +    @Override
    +    protected Map<String, String> createAttributes(BlobInfo entity, 
ProcessContext context) {
    +        final Map<String, String> attributes = new HashMap<>();
    +        attributes.put("azure.etag", entity.getEtag());
    +        attributes.put("azure.primaryUri", entity.getPrimaryUri());
    +        attributes.put("azure.secondaryUri", entity.getSecondaryUri());
    +        attributes.put("azure.blobname", entity.getName());
    +        attributes.put("azure.blobtype", entity.getBlobType());
    +        attributes.put("azure.length", String.valueOf(entity.getLength()));
    +        attributes.put("azure.timestamp", 
String.valueOf(entity.getTimestamp()));
    +        attributes.put("mime.type", entity.getContentType());
    +        attributes.put("lang", entity.getContentLanguage());
    +
    +        return attributes;
    +    }
    +
    +    @Override
    +    protected String getPath(final ProcessContext context) {
    +        return 
context.getProperty(AzureConstants.CONTAINER).evaluateAttributeExpressions().getValue();
    +    }
    +
    +    @Override
    +    protected boolean isListingResetNecessary(final PropertyDescriptor 
property) {
    +        // re-list if configuration changed, but not when security keys 
are rolled (not included in the condition)
    +        return PREFIX.equals(property)
    +                   || AzureConstants.ACCOUNT_NAME.equals(property)
    +                   || AzureConstants.CONTAINER.equals(property);
    +    }
    +
    +    @Override
    +    protected Scope getStateScope(final ProcessContext context) {
    +        return Scope.CLUSTER;
    --- End diff --
    
    annotations at top of class indicate local or cluster scope is supported


> Add support for Azure Blob Storage and Table Storage
> ----------------------------------------------------
>
>                 Key: NIFI-1833
>                 URL: https://issues.apache.org/jira/browse/NIFI-1833
>             Project: Apache NiFi
>          Issue Type: Improvement
>          Components: Extensions
>    Affects Versions: 0.6.1
>            Reporter: Simon Elliston Ball
>            Assignee: Jeff Storck
>            Priority: Minor
>             Fix For: 1.2.0
>
>
> It would be useful to have an Azure equivalent of the current S3 capability. 
> Azure also provides a Table storage mechanism, providing simple key value 
> storage. Since the Azure SDKs are Apache Licensed, this should be reasonably 
> straightforward. A first cut is available as an addition to the existing 
> azure bundle.



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to