Modified: 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java?rev=1544664&r1=1544663&r2=1544664&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
 (original)
+++ 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
 Fri Nov 22 20:07:10 2013
@@ -17,12 +17,33 @@
  */
 package org.apache.hadoop.hdfs.server.common;
 
-import com.google.common.base.Strings;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.net.InetSocketAddress;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.jsp.JspWriter;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
@@ -48,20 +69,7 @@ import org.mockito.stubbing.Answer;
 import org.xml.sax.InputSource;
 import org.xml.sax.SAXException;
 
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.jsp.JspWriter;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.IOException;
-import java.io.StringReader;
-import java.net.InetSocketAddress;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import com.google.common.base.Strings;
 
 
 public class TestJspHelper {
@@ -459,8 +467,8 @@ public class TestJspHelper {
     DatanodeDescriptor dnDesc2 = new DatanodeDescriptor(dnId2, "rack2");
 
     // Update the DatanodeDescriptors with their attached storages.
-    dnDesc1.updateStorage(new DatanodeStorage("dnStorage1"));
-    dnDesc2.updateStorage(new DatanodeStorage("dnStorage2"));
+    BlockManagerTestUtil.updateStorage(dnDesc1, new 
DatanodeStorage("dnStorage1"));
+    BlockManagerTestUtil.updateStorage(dnDesc2, new 
DatanodeStorage("dnStorage2"));
 
     StorageReport[] report1 = new StorageReport[] {
         new StorageReport("dnStorage1", false, 1024, 100, 924, 100)

Modified: 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java?rev=1544664&r1=1544663&r2=1544664&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
 (original)
+++ 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
 Fri Nov 22 20:07:10 2013
@@ -324,7 +324,7 @@ public class SimulatedFSDataset implemen
   private static class SimulatedStorage {
     private Map<String, SimulatedBPStorage> map = 
       new HashMap<String, SimulatedBPStorage>();
-    private final String storageUuid = "SimulatedStroage-" + 
DatanodeStorage.newStorageID();
+    private final String storageUuid = "SimulatedStroage-" + 
DatanodeStorage.generateUuid();
 
     private final long capacity;  // in bytes
     
@@ -470,8 +470,7 @@ public class SimulatedFSDataset implemen
     }
   }
 
-  @Override
-  public synchronized BlockListAsLongs getBlockReport(String bpid) {
+  synchronized BlockListAsLongs getBlockReport(String bpid) {
     final List<Block> blocks = new ArrayList<Block>();
     final Map<Block, BInfo> map = blockMap.get(bpid);
     if (map != null) {
@@ -684,7 +683,7 @@ public class SimulatedFSDataset implemen
   }
 
   @Override // FsDatasetSpi
-  public Replica recoverClose(ExtendedBlock b, long newGS, long 
expectedBlockLen)
+  public String recoverClose(ExtendedBlock b, long newGS, long 
expectedBlockLen)
       throws IOException {
     final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
     BInfo binfo = map.get(b.getLocalBlock());
@@ -698,7 +697,7 @@ public class SimulatedFSDataset implemen
     map.remove(b.getLocalBlock());
     binfo.theBlock.setGenerationStamp(newGS);
     map.put(binfo.theBlock, binfo);
-    return binfo;
+    return binfo.getStorageUuid();
   }
   
   @Override // FsDatasetSpi

Modified: 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java?rev=1544664&r1=1544663&r2=1544664&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
 (original)
+++ 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
 Fri Nov 22 20:07:10 2013
@@ -35,7 +35,6 @@ import static org.mockito.Mockito.when;
 import java.io.File;
 import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -46,9 +45,18 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
-import org.apache.hadoop.hdfs.*;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSTestUtil;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.MiniDFSNNTopology;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
@@ -73,7 +81,10 @@ import org.apache.hadoop.test.GenericTes
 import org.apache.hadoop.util.Daemon;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.log4j.Level;
-import org.junit.*;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
@@ -125,7 +136,7 @@ public class TestBlockRecovery {
     File dataDir = new File(DATA_DIR);
     FileUtil.fullyDelete(dataDir);
     dataDir.mkdirs();
-    StorageLocation location = new StorageLocation(new URI(dataDir.getPath()));
+    StorageLocation location = StorageLocation.parse(dataDir.getPath());
     locations.add(location);
     final DatanodeProtocolClientSideTranslatorPB namenode =
       mock(DatanodeProtocolClientSideTranslatorPB.class);

Modified: 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java?rev=1544664&r1=1544663&r2=1544664&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
 (original)
+++ 
hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
 Fri Nov 22 20:07:10 2013
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hdfs.server.datanode;
 
 import java.io.*;
-import java.net.URI;
 import java.util.*;
 
 import org.apache.hadoop.conf.Configuration;
@@ -40,7 +39,7 @@ public class TestDataDirs {
   @Test (timeout = 30000)
   public void testDataDirParsing() throws Throwable {
     Configuration conf = new Configuration();
-    ArrayList<StorageLocation> locations;
+    List<StorageLocation> locations;
     File dir0 = new File("/dir0");
     File dir1 = new File("/dir1");
     File dir2 = new File("/dir2");
@@ -50,7 +49,7 @@ public class TestDataDirs {
     // type is not case-sensitive
     String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3";
     conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1);
-    locations = new 
ArrayList<StorageLocation>(DataNode.getStorageLocations(conf));
+    locations = DataNode.getStorageLocations(conf);
     assertThat(locations.size(), is(4));
     assertThat(locations.get(0).getStorageType(), is(StorageType.DISK));
     assertThat(locations.get(0).getUri(), is(dir0.toURI()));
@@ -61,23 +60,21 @@ public class TestDataDirs {
     assertThat(locations.get(3).getStorageType(), is(StorageType.DISK));
     assertThat(locations.get(3).getUri(), is(dir3.toURI()));
 
-    // Verify that an unrecognized storage type is ignored.
+    // Verify that an unrecognized storage type result in an exception.
     String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2";
     conf.set(DFS_DATANODE_DATA_DIR_KEY, locations2);
-    locations = new 
ArrayList<StorageLocation>(DataNode.getStorageLocations(conf));
-    assertThat(locations.size(), is(3));
-    assertThat(locations.get(0).getStorageType(), is(StorageType.DISK));
-    assertThat(locations.get(0).getUri(), is(dir0.toURI()));
-    assertThat(locations.get(1).getStorageType(), is(StorageType.SSD));
-    assertThat(locations.get(1).getUri(), is(dir1.toURI()));
-    assertThat(locations.get(2).getStorageType(), is(StorageType.DISK));
-    assertThat(locations.get(2).getUri(), is(dir2.toURI()));
+    try {
+      locations = DataNode.getStorageLocations(conf);
+      fail();
+    } catch(IllegalArgumentException iae) {
+      DataNode.LOG.info("The exception is expected.", iae);
+    }
 
     // Assert that a string with no storage type specified is
     // correctly parsed and the default storage type is picked up.
     String locations3 = "/dir0,/dir1";
     conf.set(DFS_DATANODE_DATA_DIR_KEY, locations3);
-    locations = new 
ArrayList<StorageLocation>(DataNode.getStorageLocations(conf));
+    locations = DataNode.getStorageLocations(conf);
     assertThat(locations.size(), is(2));
     assertThat(locations.get(0).getStorageType(), is(StorageType.DISK));
     assertThat(locations.get(0).getUri(), is(dir0.toURI()));
@@ -94,11 +91,11 @@ public class TestDataDirs {
     LocalFileSystem fs = mock(LocalFileSystem.class);
     AbstractList<StorageLocation> locations = new ArrayList<StorageLocation>();
 
-    locations.add(new StorageLocation(new URI("file:/p1/")));
-    locations.add(new StorageLocation(new URI("file:/p2/")));
-    locations.add(new StorageLocation(new URI("file:/p3/")));
+    locations.add(StorageLocation.parse("file:/p1/"));
+    locations.add(StorageLocation.parse("file:/p2/"));
+    locations.add(StorageLocation.parse("file:/p3/"));
 
-    ArrayList<StorageLocation> checkedLocations =
+    List<StorageLocation> checkedLocations =
         DataNode.checkStorageLocations(locations, fs, diskChecker);
     assertEquals("number of valid data dirs", 1, checkedLocations.size());
     String validDir = checkedLocations.iterator().next().getFile().getPath();


Reply via email to