steveloughran commented on a change in pull request #3109:
URL: https://github.com/apache/hadoop/pull/3109#discussion_r657412668



##########
File path: 
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import javax.net.ssl.SSLException;
+import java.io.IOException;
+import java.net.SocketException;
+import java.nio.charset.Charset;
+
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import org.junit.Test;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.audit.impl.NoopSpan;
+import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
+import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy;
+
+import static java.lang.Math.min;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests S3AInputStream retry behavior on read failure.
+ * These tests are for validating expected behavior of retrying the 
S3AInputStream
+ * read() and read(b, off, len), it tests that the read should reopen the 
input stream and retry
+ * the read when IOException is thrown during the read process.
+ */
+public class TestS3AInputStreamRetry extends AbstractS3AMockTest {
+
+  String input = "ab";
+
+  @Test
+  public void testInputStreamReadRetryForException() throws IOException {
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+
+    assertEquals("'a' from the test input stream 'ab' should be the first 
character being read",
+        input.charAt(0), s3AInputStream.read());
+    assertEquals("'b' from the test input stream 'ab' should be the second 
character being read",
+        input.charAt(1), s3AInputStream.read());
+  }
+
+  @Test
+  public void testInputStreamReadRetryLengthForException() throws IOException {
+    byte[] result = new byte[input.length()];
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+    s3AInputStream.read(result, 0, input.length());
+
+    assertArrayEquals("The read result should equals to the test input stream 
content",
+        input.getBytes(), result);
+  }
+
+  private S3AInputStream getMockedS3AInputStream() {
+    Path path = new Path("test-path");
+    String eTag = "test-etag";
+    String versionId = "test-version-id";
+    String owner = "test-owner";
+
+    S3AFileStatus s3AFileStatus = new S3AFileStatus(
+        input.length(), 0, path, input.length(), owner, eTag, versionId);
+
+    S3ObjectAttributes s3ObjectAttributes = new S3ObjectAttributes(
+        fs.getBucket(), path, fs.pathToKey(path), 
fs.getServerSideEncryptionAlgorithm(),
+        new EncryptionSecrets().getEncryptionKey(), eTag, versionId, 
input.length());
+
+    S3AReadOpContext s3AReadOpContext = fs.createReadContext(s3AFileStatus, 
S3AInputPolicy.Normal,
+        ChangeDetectionPolicy.getPolicy(fs.getConf()), 100, NoopSpan.INSTANCE);
+
+    return new S3AInputStream(s3AReadOpContext, s3ObjectAttributes, 
getMockedInputStreamCallback());
+  }
+
+  // Get mocked InputStreamCallbacks where we return mocked S3Object
+  private S3AInputStream.InputStreamCallbacks getMockedInputStreamCallback() {
+    return new S3AInputStream.InputStreamCallbacks() {
+
+      final S3Object mockedS3Object = getMockedS3Object();
+
+      @Override
+      public S3Object getObject(GetObjectRequest request) {
+        // Set s3 client to return mocked s3object with already defined read 
behavior
+        return mockedS3Object;
+      }
+
+      @Override
+      public GetObjectRequest newGetRequest(String key) {
+        return new GetObjectRequest(fs.getBucket(), key);
+      }
+
+      @Override
+      public void close() {
+      }
+    };
+  }
+
+  // Get mocked S3Object where return bad input stream on the first couple of 
getObjectContent calls
+  private S3Object getMockedS3Object() {
+    S3ObjectInputStream objectInputStreamBad1 = getMockedInputStream(true);
+    S3ObjectInputStream objectInputStreamBad2 = getMockedInputStream(true);
+    S3ObjectInputStream objectInputStreamGood = getMockedInputStream(false);
+
+    return new S3Object() {
+      final S3ObjectInputStream[] inputStreams =
+          {objectInputStreamBad1, objectInputStreamBad2, 
objectInputStreamGood};
+
+      Integer inputStreamIndex = 0;
+
+      @Override
+      public S3ObjectInputStream getObjectContent() {
+        // Set getObjectContent behavior: returns bad stream twice, and good 
stream afterwards
+        inputStreamIndex++;
+        return inputStreams[min(inputStreamIndex, inputStreams.length) - 1];
+      }
+
+      @Override
+      public ObjectMetadata getObjectMetadata() {
+        // Set getObjectMetadata behavior: returns dummy metadata
+        ObjectMetadata metadata = new ObjectMetadata();
+        metadata.setHeader("ETag", "test-etag");
+        return metadata;
+      }
+    };
+  }
+
+  // Get mocked S3ObjectInputStream where we can trigger IOException to 
simulate the read failure

Review comment:
       add as a /** */ javadoc and add a "." at the end of the sentence to stop 
some javadoc versions from failing.

##########
File path: 
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import javax.net.ssl.SSLException;
+import java.io.IOException;
+import java.net.SocketException;
+import java.nio.charset.Charset;
+
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import org.junit.Test;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.audit.impl.NoopSpan;
+import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
+import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy;
+
+import static java.lang.Math.min;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests S3AInputStream retry behavior on read failure.
+ * These tests are for validating expected behavior of retrying the 
S3AInputStream
+ * read() and read(b, off, len), it tests that the read should reopen the 
input stream and retry
+ * the read when IOException is thrown during the read process.
+ */
+public class TestS3AInputStreamRetry extends AbstractS3AMockTest {
+
+  String input = "ab";
+
+  @Test
+  public void testInputStreamReadRetryForException() throws IOException {
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+
+    assertEquals("'a' from the test input stream 'ab' should be the first 
character being read",
+        input.charAt(0), s3AInputStream.read());
+    assertEquals("'b' from the test input stream 'ab' should be the second 
character being read",
+        input.charAt(1), s3AInputStream.read());
+  }
+
+  @Test
+  public void testInputStreamReadRetryLengthForException() throws IOException {

Review comment:
       Add a readFully() version of this test case too, for completeness

##########
File path: 
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import javax.net.ssl.SSLException;
+import java.io.IOException;
+import java.net.SocketException;
+import java.nio.charset.Charset;
+
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import org.junit.Test;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.audit.impl.NoopSpan;
+import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
+import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy;
+
+import static java.lang.Math.min;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests S3AInputStream retry behavior on read failure.
+ * These tests are for validating expected behavior of retrying the 
S3AInputStream
+ * read() and read(b, off, len), it tests that the read should reopen the 
input stream and retry
+ * the read when IOException is thrown during the read process.
+ */
+public class TestS3AInputStreamRetry extends AbstractS3AMockTest {
+
+  String input = "ab";
+
+  @Test
+  public void testInputStreamReadRetryForException() throws IOException {
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+
+    assertEquals("'a' from the test input stream 'ab' should be the first 
character being read",
+        input.charAt(0), s3AInputStream.read());
+    assertEquals("'b' from the test input stream 'ab' should be the second 
character being read",
+        input.charAt(1), s3AInputStream.read());
+  }
+
+  @Test
+  public void testInputStreamReadRetryLengthForException() throws IOException {
+    byte[] result = new byte[input.length()];
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+    s3AInputStream.read(result, 0, input.length());
+
+    assertArrayEquals("The read result should equals to the test input stream 
content",
+        input.getBytes(), result);
+  }
+
+  private S3AInputStream getMockedS3AInputStream() {
+    Path path = new Path("test-path");
+    String eTag = "test-etag";
+    String versionId = "test-version-id";
+    String owner = "test-owner";
+
+    S3AFileStatus s3AFileStatus = new S3AFileStatus(
+        input.length(), 0, path, input.length(), owner, eTag, versionId);
+
+    S3ObjectAttributes s3ObjectAttributes = new S3ObjectAttributes(
+        fs.getBucket(), path, fs.pathToKey(path), 
fs.getServerSideEncryptionAlgorithm(),
+        new EncryptionSecrets().getEncryptionKey(), eTag, versionId, 
input.length());
+
+    S3AReadOpContext s3AReadOpContext = fs.createReadContext(s3AFileStatus, 
S3AInputPolicy.Normal,
+        ChangeDetectionPolicy.getPolicy(fs.getConf()), 100, NoopSpan.INSTANCE);
+
+    return new S3AInputStream(s3AReadOpContext, s3ObjectAttributes, 
getMockedInputStreamCallback());
+  }
+
+  // Get mocked InputStreamCallbacks where we return mocked S3Object
+  private S3AInputStream.InputStreamCallbacks getMockedInputStreamCallback() {
+    return new S3AInputStream.InputStreamCallbacks() {
+
+      final S3Object mockedS3Object = getMockedS3Object();
+
+      @Override
+      public S3Object getObject(GetObjectRequest request) {
+        // Set s3 client to return mocked s3object with already defined read 
behavior
+        return mockedS3Object;
+      }
+
+      @Override
+      public GetObjectRequest newGetRequest(String key) {
+        return new GetObjectRequest(fs.getBucket(), key);
+      }
+
+      @Override
+      public void close() {
+      }
+    };
+  }
+
+  // Get mocked S3Object where return bad input stream on the first couple of 
getObjectContent calls

Review comment:
       add as a /** */ javadoc and add a "." at the end of the sentence to stop 
some javadoc versions from failing.

##########
File path: 
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import javax.net.ssl.SSLException;
+import java.io.IOException;
+import java.net.SocketException;
+import java.nio.charset.Charset;
+
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import org.junit.Test;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.audit.impl.NoopSpan;
+import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
+import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy;
+
+import static java.lang.Math.min;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests S3AInputStream retry behavior on read failure.
+ * These tests are for validating expected behavior of retrying the 
S3AInputStream
+ * read() and read(b, off, len), it tests that the read should reopen the 
input stream and retry
+ * the read when IOException is thrown during the read process.
+ */
+public class TestS3AInputStreamRetry extends AbstractS3AMockTest {
+
+  String input = "ab";
+
+  @Test
+  public void testInputStreamReadRetryForException() throws IOException {
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+
+    assertEquals("'a' from the test input stream 'ab' should be the first 
character being read",
+        input.charAt(0), s3AInputStream.read());
+    assertEquals("'b' from the test input stream 'ab' should be the second 
character being read",
+        input.charAt(1), s3AInputStream.read());
+  }
+
+  @Test
+  public void testInputStreamReadRetryLengthForException() throws IOException {
+    byte[] result = new byte[input.length()];
+    S3AInputStream s3AInputStream = getMockedS3AInputStream();
+    s3AInputStream.read(result, 0, input.length());
+
+    assertArrayEquals("The read result should equals to the test input stream 
content",
+        input.getBytes(), result);
+  }
+
+  private S3AInputStream getMockedS3AInputStream() {
+    Path path = new Path("test-path");
+    String eTag = "test-etag";
+    String versionId = "test-version-id";
+    String owner = "test-owner";
+
+    S3AFileStatus s3AFileStatus = new S3AFileStatus(
+        input.length(), 0, path, input.length(), owner, eTag, versionId);
+
+    S3ObjectAttributes s3ObjectAttributes = new S3ObjectAttributes(
+        fs.getBucket(), path, fs.pathToKey(path), 
fs.getServerSideEncryptionAlgorithm(),
+        new EncryptionSecrets().getEncryptionKey(), eTag, versionId, 
input.length());
+
+    S3AReadOpContext s3AReadOpContext = fs.createReadContext(s3AFileStatus, 
S3AInputPolicy.Normal,
+        ChangeDetectionPolicy.getPolicy(fs.getConf()), 100, NoopSpan.INSTANCE);
+
+    return new S3AInputStream(s3AReadOpContext, s3ObjectAttributes, 
getMockedInputStreamCallback());
+  }
+
+  // Get mocked InputStreamCallbacks where we return mocked S3Object

Review comment:
       add as a /** */ javadoc and add a "." at the end of the sentence to stop 
some javadoc versions from failing.

##########
File path: 
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import javax.net.ssl.SSLException;
+import java.io.IOException;
+import java.net.SocketException;
+import java.nio.charset.Charset;
+
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import org.junit.Test;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.audit.impl.NoopSpan;
+import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
+import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy;
+
+import static java.lang.Math.min;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests S3AInputStream retry behavior on read failure.
+ * These tests are for validating expected behavior of retrying the 
S3AInputStream
+ * read() and read(b, off, len), it tests that the read should reopen the 
input stream and retry
+ * the read when IOException is thrown during the read process.
+ */
+public class TestS3AInputStreamRetry extends AbstractS3AMockTest {
+
+  String input = "ab";

Review comment:
       make `private static final` and capitalise to `INPUT` (for checkstyle)




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-issues-h...@hadoop.apache.org

Reply via email to