http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3A.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3A.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3A.java
deleted file mode 100644
index a22dd28..0000000
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3A.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.fs.s3a.yarn;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CreateFlag;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileContext;
-import org.apache.hadoop.fs.FsStatus;
-import org.apache.hadoop.fs.Path;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
-
-import java.util.EnumSet;
-import org.apache.hadoop.fs.s3a.S3ATestUtils;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-public class TestS3A {
-  private FileContext fc;
-
-  @Rule
-  public final Timeout testTimeout = new Timeout(90000);
-
-  @Before
-  public void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    fc = S3ATestUtils.createTestFileContext(conf);
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    if (fc != null) {
-      fc.delete(getTestPath(), true);
-    }
-  }
-
-  protected Path getTestPath() {
-    return new Path("/tests3afc");
-  }
-
-  @Test
-  public void testS3AStatus() throws Exception {
-    FsStatus fsStatus = fc.getFsStatus(null);
-    assertNotNull(fsStatus);
-    assertTrue("Used capacity should be positive: " + fsStatus.getUsed(),
-        fsStatus.getUsed() >= 0);
-    assertTrue("Remaining capacity should be positive: " + fsStatus
-            .getRemaining(),
-        fsStatus.getRemaining() >= 0);
-    assertTrue("Capacity should be positive: " + fsStatus.getCapacity(),
-        fsStatus.getCapacity() >= 0);
-  }
-
-  @Test
-  public void testS3ACreateFileInSubDir() throws Exception {
-    Path dirPath = getTestPath();
-    fc.mkdir(dirPath,FileContext.DIR_DEFAULT_PERM,true);
-    Path filePath = new Path(dirPath, "file");
-    try (FSDataOutputStream file = fc.create(filePath, EnumSet.of(CreateFlag
-        .CREATE))) {
-      file.write(666);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3AMiniYarnCluster.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3AMiniYarnCluster.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3AMiniYarnCluster.java
deleted file mode 100644
index 990d79f..0000000
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/TestS3AMiniYarnCluster.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-package org.apache.hadoop.fs.s3a.yarn;
-
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.examples.WordCount;
-import org.apache.hadoop.fs.CreateFlag;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileContext;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.s3a.S3AFileSystem;
-import org.apache.hadoop.fs.s3a.S3ATestUtils;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.MiniYARNCluster;
-
-import org.junit.After;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-
-public class TestS3AMiniYarnCluster {
-
-  private final Configuration conf = new YarnConfiguration();
-  private S3AFileSystem fs;
-  private MiniYARNCluster yarnCluster;
-  private final String rootPath = "/tests/MiniClusterWordCount/";
-
-  @Before
-  public void beforeTest() throws IOException {
-    fs = S3ATestUtils.createTestFileSystem(conf);
-    fs.mkdirs(new Path(rootPath + "input/"));
-
-    yarnCluster = new MiniYARNCluster("MiniClusterWordCount", // testName
-            1, // number of node managers
-            1, // number of local log dirs per node manager
-            1); // number of hdfs dirs per node manager
-    yarnCluster.init(conf);
-    yarnCluster.start();
-  }
-
-  @After
-  public void afterTest() throws IOException {
-    fs.delete(new Path(rootPath), true);
-    yarnCluster.stop();
-  }
-
-  @Test
-  public void testWithMiniCluster() throws Exception {
-    Path input = new Path(rootPath + "input/in.txt");
-    input = input.makeQualified(fs.getUri(), fs.getWorkingDirectory());
-    Path output = new Path(rootPath + "output/");
-    output = output.makeQualified(fs.getUri(), fs.getWorkingDirectory());
-
-    writeStringToFile(input, "first line\nsecond line\nthird line");
-
-    Job job = Job.getInstance(conf, "word count");
-    job.setJarByClass(WordCount.class);
-    job.setMapperClass(WordCount.TokenizerMapper.class);
-    job.setCombinerClass(WordCount.IntSumReducer.class);
-    job.setReducerClass(WordCount.IntSumReducer.class);
-    job.setOutputKeyClass(Text.class);
-    job.setOutputValueClass(IntWritable.class);
-    FileInputFormat.addInputPath(job, input);
-    FileOutputFormat.setOutputPath(job, output);
-
-    int exitCode = (job.waitForCompletion(true) ? 0 : 1);
-    assertEquals("Returned error code.", 0, exitCode);
-
-    assertTrue(fs.exists(new Path(output, "_SUCCESS")));
-    String outputAsStr = readStringFromFile(new Path(output, "part-r-00000"));
-    Map<String, Integer> resAsMap = getResultAsMap(outputAsStr);
-
-    assertEquals(4, resAsMap.size());
-    assertEquals(1, (int) resAsMap.get("first"));
-    assertEquals(1, (int) resAsMap.get("second"));
-    assertEquals(1, (int) resAsMap.get("third"));
-    assertEquals(3, (int) resAsMap.get("line"));
-  }
-
-  /**
-   * helper method
-   */
-  private Map<String, Integer> getResultAsMap(String outputAsStr) throws 
IOException {
-    Map<String, Integer> result = new HashMap<>();
-    for (String line : outputAsStr.split("\n")) {
-      String[] tokens = line.split("\t");
-      result.put(tokens[0], Integer.parseInt(tokens[1]));
-    }
-    return result;
-  }
-
-  /**
-   * helper method
-   */
-  private void writeStringToFile(Path path, String string) throws IOException {
-    FileContext fc = S3ATestUtils.createTestFileContext(conf);
-    try (FSDataOutputStream file = fc.create(path,
-            EnumSet.of(CreateFlag.CREATE))) {
-      file.write(string.getBytes());
-    }
-  }
-
-  /**
-   * helper method
-   */
-  private String readStringFromFile(Path path) {
-    try (FSDataInputStream in = fs.open(path)) {
-      long bytesLen = fs.getFileStatus(path).getLen();
-      byte[] buffer = new byte[(int) bytesLen];
-      IOUtils.readFully(in, buffer, 0, buffer.length);
-      return new String(buffer);
-    } catch (IOException e) {
-      throw new RuntimeException("Failed to read from [" + path + "]", e);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestInMemoryNativeS3FileSystemContract.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestInMemoryNativeS3FileSystemContract.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestInMemoryNativeS3FileSystemContract.java
new file mode 100644
index 0000000..adbf950
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestInMemoryNativeS3FileSystemContract.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3native;
+
+import java.io.IOException;
+
+/**
+ * S3N basic contract tests through mock in-memory S3 implementation.
+ */
+public class ITestInMemoryNativeS3FileSystemContract
+    extends NativeS3FileSystemContractBaseTest {
+
+  @Override
+  NativeFileSystemStore getNativeFileSystemStore() throws IOException {
+    return new InMemoryNativeFileSystemStore();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeFileSystemStore.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeFileSystemStore.java
new file mode 100644
index 0000000..cfe622c
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeFileSystemStore.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3native;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+
+import static org.junit.Assert.*;
+import static org.junit.Assume.*;
+
+import org.junit.Before;
+import org.junit.After;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.security.DigestInputStream;
+import java.security.DigestOutputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * S3N tests through live S3 service.
+ */
+public class ITestJets3tNativeFileSystemStore {
+  private Configuration conf;
+  private Jets3tNativeFileSystemStore store;
+  private NativeS3FileSystem fs;
+
+  @Before
+  public void setUp() throws Exception {
+    conf = new Configuration();
+    store = new Jets3tNativeFileSystemStore();
+    fs = new NativeS3FileSystem(store);
+    conf.setBoolean("fs.s3n.multipart.uploads.enabled", true);
+    conf.setLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024);
+    fs.initialize(URI.create(conf.get("test.fs.s3n.name")), conf);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    try {
+      store.purge("test");
+    } catch (Exception e) {}
+  }
+
+  @BeforeClass
+  public static void checkSettings() throws Exception {
+    Configuration conf = new Configuration();
+    assumeNotNull(conf.get("fs.s3n.awsAccessKeyId"));
+    assumeNotNull(conf.get("fs.s3n.awsSecretAccessKey"));
+    assumeNotNull(conf.get("test.fs.s3n.name"));
+  }
+
+  protected void writeRenameReadCompare(Path path, long len)
+      throws IOException, NoSuchAlgorithmException {
+    // If len > fs.s3n.multipart.uploads.block.size,
+    // we'll use a multipart upload copy
+    MessageDigest digest = MessageDigest.getInstance("MD5");
+    OutputStream out = new BufferedOutputStream(
+        new DigestOutputStream(fs.create(path, false), digest));
+    for (long i = 0; i < len; i++) {
+      out.write('Q');
+    }
+    out.flush();
+    out.close();
+
+    assertTrue("Exists", fs.exists(path));
+
+    // Depending on if this file is over 5 GB or not,
+    // rename will cause a multipart upload copy
+    Path copyPath = path.suffix(".copy");
+    fs.rename(path, copyPath);
+
+    assertTrue("Copy exists", fs.exists(copyPath));
+
+    // Download file from S3 and compare the digest against the original
+    MessageDigest digest2 = MessageDigest.getInstance("MD5");
+    InputStream in = new BufferedInputStream(
+        new DigestInputStream(fs.open(copyPath), digest2));
+    long copyLen = 0;
+    while (in.read() != -1) {
+      copyLen++;
+    }
+    in.close();
+
+    assertEquals("Copy length matches original", len, copyLen);
+    assertArrayEquals("Digests match", digest.digest(), digest2.digest());
+  }
+
+  @Test
+  public void testSmallUpload() throws IOException, NoSuchAlgorithmException {
+    // Regular upload, regular copy
+    writeRenameReadCompare(new Path("/test/small"), 16384);
+  }
+
+  @Test
+  public void testMediumUpload() throws IOException, NoSuchAlgorithmException {
+    // Multipart upload, regular copy
+    writeRenameReadCompare(new Path("/test/medium"), 33554432);    // 100 MB
+  }
+
+  /*
+  Enable Multipart upload to run this test
+  @Test
+  public void testExtraLargeUpload()
+      throws IOException, NoSuchAlgorithmException {
+    // Multipart upload, multipart copy
+    writeRenameReadCompare(new Path("/test/xlarge"), 5368709121L); // 5GB+1byte
+  }
+  */
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeS3FileSystemContract.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeS3FileSystemContract.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeS3FileSystemContract.java
new file mode 100644
index 0000000..e51eaf6
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/ITestJets3tNativeS3FileSystemContract.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3native;
+
+import java.io.IOException;
+
+/**
+ * S3N basic contract tests through live S3 service.
+ */
+public class ITestJets3tNativeS3FileSystemContract
+    extends NativeS3FileSystemContractBaseTest {
+
+  @Override
+  NativeFileSystemStore getNativeFileSystemStore() throws IOException {
+    return new Jets3tNativeFileSystemStore();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestInMemoryNativeS3FileSystemContract.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestInMemoryNativeS3FileSystemContract.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestInMemoryNativeS3FileSystemContract.java
deleted file mode 100644
index 664d39e..0000000
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestInMemoryNativeS3FileSystemContract.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import java.io.IOException;
-
-public class TestInMemoryNativeS3FileSystemContract
-  extends NativeS3FileSystemContractBaseTest {
-
-  @Override
-  NativeFileSystemStore getNativeFileSystemStore() throws IOException {
-    return new InMemoryNativeFileSystemStore();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeFileSystemStore.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeFileSystemStore.java
deleted file mode 100644
index dbd476e..0000000
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeFileSystemStore.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-
-import static org.junit.Assert.*;
-import static org.junit.Assume.*;
-
-import org.junit.Before;
-import org.junit.After;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.URI;
-import java.security.DigestInputStream;
-import java.security.DigestOutputStream;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-
-
-public class TestJets3tNativeFileSystemStore {
-  private Configuration conf;
-  private Jets3tNativeFileSystemStore store;
-  private NativeS3FileSystem fs;
-
-  @Before
-  public void setUp() throws Exception {
-    conf = new Configuration();
-    store = new Jets3tNativeFileSystemStore();
-    fs = new NativeS3FileSystem(store);
-    conf.setBoolean("fs.s3n.multipart.uploads.enabled", true);
-    conf.setLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024);
-    fs.initialize(URI.create(conf.get("test.fs.s3n.name")), conf);
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    try {
-      store.purge("test");
-    } catch (Exception e) {}
-  }
-
-  @BeforeClass
-  public static void checkSettings() throws Exception {
-    Configuration conf = new Configuration();
-    assumeNotNull(conf.get("fs.s3n.awsAccessKeyId"));
-    assumeNotNull(conf.get("fs.s3n.awsSecretAccessKey"));
-    assumeNotNull(conf.get("test.fs.s3n.name"));
-  }
-
-  protected void writeRenameReadCompare(Path path, long len)
-      throws IOException, NoSuchAlgorithmException {
-    // If len > fs.s3n.multipart.uploads.block.size,
-    // we'll use a multipart upload copy
-    MessageDigest digest = MessageDigest.getInstance("MD5");
-    OutputStream out = new BufferedOutputStream(
-        new DigestOutputStream(fs.create(path, false), digest));
-    for (long i = 0; i < len; i++) {
-      out.write('Q');
-    }
-    out.flush();
-    out.close();
-
-    assertTrue("Exists", fs.exists(path));
-
-    // Depending on if this file is over 5 GB or not,
-    // rename will cause a multipart upload copy
-    Path copyPath = path.suffix(".copy");
-    fs.rename(path, copyPath);
-
-    assertTrue("Copy exists", fs.exists(copyPath));
-
-    // Download file from S3 and compare the digest against the original
-    MessageDigest digest2 = MessageDigest.getInstance("MD5");
-    InputStream in = new BufferedInputStream(
-        new DigestInputStream(fs.open(copyPath), digest2));
-    long copyLen = 0;
-    while (in.read() != -1) {copyLen++;}
-    in.close();
-
-    assertEquals("Copy length matches original", len, copyLen);
-    assertArrayEquals("Digests match", digest.digest(), digest2.digest());
-  }
-
-  @Test
-  public void testSmallUpload() throws IOException, NoSuchAlgorithmException {
-    // Regular upload, regular copy
-    writeRenameReadCompare(new Path("/test/small"), 16384);
-  }
-
-  @Test
-  public void testMediumUpload() throws IOException, NoSuchAlgorithmException {
-    // Multipart upload, regular copy
-    writeRenameReadCompare(new Path("/test/medium"), 33554432);    // 100 MB
-  }
-
-  /*
-  Enable Multipart upload to run this test
-  @Test
-  public void testExtraLargeUpload()
-      throws IOException, NoSuchAlgorithmException {
-    // Multipart upload, multipart copy
-    writeRenameReadCompare(new Path("/test/xlarge"), 5368709121L); // 5GB+1byte
-  }
-  */
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f9c346e/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeS3FileSystemContract.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeS3FileSystemContract.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeS3FileSystemContract.java
deleted file mode 100644
index 42d6f06..0000000
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestJets3tNativeS3FileSystemContract.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import java.io.IOException;
-
-public class TestJets3tNativeS3FileSystemContract
-  extends NativeS3FileSystemContractBaseTest {
-
-  @Override
-  NativeFileSystemStore getNativeFileSystemStore() throws IOException {
-    return new Jets3tNativeFileSystemStore();
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to