This is an automated email from the ASF dual-hosted git repository.

lburgazzoli pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git


The following commit(s) were added to refs/heads/master by this push:
     new 6241a5c  CAMEL-15526: Camel-AWS2-S3: Consume Gzip file from S3 not 
working
6241a5c is described below

commit 6241a5cbe548b1ca855f91a3741acf06184a947e
Author: Luca Burgazzoli <lburgazz...@gmail.com>
AuthorDate: Fri Sep 25 15:59:51 2020 +0200

    CAMEL-15526: Camel-AWS2-S3: Consume Gzip file from S3 not working
---
 components/camel-aws2-s3/pom.xml                   |  5 ++
 .../camel/component/aws2/s3/AWS2S3Endpoint.java    | 24 +-----
 .../localstack/S3GzipOperationLocalstackTest.java  | 85 ++++++++++++++++++++++
 3 files changed, 94 insertions(+), 20 deletions(-)

diff --git a/components/camel-aws2-s3/pom.xml b/components/camel-aws2-s3/pom.xml
index 81d292b..500d556 100644
--- a/components/camel-aws2-s3/pom.xml
+++ b/components/camel-aws2-s3/pom.xml
@@ -67,6 +67,11 @@
             <artifactId>camel-testcontainers-junit5</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.camel</groupId>
+            <artifactId>camel-zip-deflater</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
     <profiles>
diff --git 
a/components/camel-aws2-s3/src/main/java/org/apache/camel/component/aws2/s3/AWS2S3Endpoint.java
 
b/components/camel-aws2-s3/src/main/java/org/apache/camel/component/aws2/s3/AWS2S3Endpoint.java
index 7212715..c9e3b9c 100644
--- 
a/components/camel-aws2-s3/src/main/java/org/apache/camel/component/aws2/s3/AWS2S3Endpoint.java
+++ 
b/components/camel-aws2-s3/src/main/java/org/apache/camel/component/aws2/s3/AWS2S3Endpoint.java
@@ -16,12 +16,7 @@
  */
 package org.apache.camel.component.aws2.s3;
 
-import java.io.BufferedReader;
 import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
 
 import org.apache.camel.Category;
 import org.apache.camel.Component;
@@ -32,6 +27,7 @@ import org.apache.camel.ExtendedExchange;
 import org.apache.camel.Message;
 import org.apache.camel.Processor;
 import org.apache.camel.Producer;
+import org.apache.camel.RuntimeCamelException;
 import org.apache.camel.component.aws2.s3.client.AWS2S3ClientFactory;
 import org.apache.camel.spi.Metadata;
 import org.apache.camel.spi.UriEndpoint;
@@ -50,6 +46,7 @@ import 
software.amazon.awssdk.services.s3.model.CreateBucketRequest;
 import software.amazon.awssdk.services.s3.model.GetObjectResponse;
 import software.amazon.awssdk.services.s3.model.ListObjectsRequest;
 import software.amazon.awssdk.services.s3.model.PutBucketPolicyRequest;
+import software.amazon.awssdk.utils.IoUtils;
 
 /**
  * Store and retrieve objects from AWS S3 Storage Service using AWS SDK 
version 2.x.
@@ -173,10 +170,9 @@ public class AWS2S3Endpoint extends ScheduledPollEndpoint {
 
         if (configuration.isIncludeBody()) {
             try {
-                message.setBody(readInputStream(s3Object));
+                message.setBody(IoUtils.toByteArray(s3Object));
             } catch (IOException e) {
-                // TODO Auto-generated catch block
-                e.printStackTrace();
+                throw new RuntimeCamelException(e);
             }
         } else {
             message.setBody(null);
@@ -258,16 +254,4 @@ public class AWS2S3Endpoint extends ScheduledPollEndpoint {
     public void setMaxConnections(int maxConnections) {
         this.maxConnections = maxConnections;
     }
-
-    private String readInputStream(ResponseInputStream<GetObjectResponse> 
s3Object) throws IOException {
-        StringBuilder textBuilder = new StringBuilder();
-        try (Reader reader
-                = new BufferedReader(new InputStreamReader(s3Object, 
Charset.forName(StandardCharsets.UTF_8.name())))) {
-            int c = 0;
-            while ((c = reader.read()) != -1) {
-                textBuilder.append((char) c);
-            }
-        }
-        return textBuilder.toString();
-    }
 }
diff --git 
a/components/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/localstack/S3GzipOperationLocalstackTest.java
 
b/components/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/localstack/S3GzipOperationLocalstackTest.java
new file mode 100644
index 0000000..f35de30
--- /dev/null
+++ 
b/components/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/localstack/S3GzipOperationLocalstackTest.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.aws2.s3.localstack;
+
+import java.util.UUID;
+
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.aws2.s3.AWS2S3Constants;
+import org.apache.camel.component.aws2.s3.AWS2S3Operations;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+
+public class S3GzipOperationLocalstackTest extends Aws2S3BaseTest {
+    @Test
+    public void sendAndGet() throws Exception {
+        final String content = UUID.randomUUID().toString();
+        final MockEndpoint poll = getMockEndpoint("mock:poll");
+
+        poll.expectedBodiesReceived(content);
+        poll.expectedMessageCount(1);
+
+        // put a compressed element to the bucket
+        Object putResult = fluentTemplate()
+                .to("direct:putObject")
+                .withHeader(AWS2S3Constants.KEY, "hello.txt.gz")
+                .withBody(content)
+                .request(Object.class);
+
+        assertNotNull(putResult);
+
+        //retrieve it from a producer
+        Object getResult = fluentTemplate()
+                .to("direct:getObject")
+                .withHeader(AWS2S3Constants.KEY, "hello.txt.gz")
+                .withHeader(AWS2S3Constants.S3_OPERATION, 
AWS2S3Operations.getObject)
+                .request(String.class);
+
+        assertEquals(content, getResult);
+
+        //retrieve it from a polling consumer
+        poll.assertIsSatisfied();
+
+        // delete the content
+        fluentTemplate()
+                .to("aws2-s3://mycamel?autoCreateBucket=true")
+                .withHeader(AWS2S3Constants.KEY, "hello.txt.gz")
+                .withHeader(AWS2S3Constants.S3_OPERATION, 
AWS2S3Operations.deleteObject)
+                .request();
+    }
+
+    @Override
+    protected RouteBuilder createRouteBuilder() throws Exception {
+        return new RouteBuilder() {
+            @Override
+            public void configure() throws Exception {
+                from("direct:putObject")
+                        .marshal().gzipDeflater()
+                        .to("aws2-s3://mycamel?autoCreateBucket=true");
+                from("direct:getObject")
+                        
.to("aws2-s3://mycamel?autoCreateBucket=true&deleteAfterRead=false&includeBody=true")
+                        .unmarshal().gzipDeflater();
+                
from("aws2-s3://mycamel?autoCreateBucket=true&deleteAfterRead=false&includeBody=true&prefix=hello.txt.gz")
+                        .unmarshal().gzipDeflater()
+                        .to("mock:poll");
+            }
+        };
+    }
+}

Reply via email to