Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java?rev=1601992&r1=1601991&r2=1601992&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java Wed Jun 11 19:25:17 2014 @@ -22,16 +22,22 @@ import static org.apache.hadoop.fs.permi import static org.apache.hadoop.fs.permission.FsAction.*; import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.*; +import java.io.IOException; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.XAttr; +import org.apache.hadoop.fs.XAttrCodec; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.server.namenode.INodeId; @@ -186,6 +192,48 @@ public class TestJsonUtil { JsonUtil.toJsonString(aclStatusBuilder.build())); } + + @Test + public void testToJsonFromXAttrs() throws IOException { + String jsonString = + "{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," + + "{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}"; + XAttr xAttr1 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER). + setName("a1").setValue(XAttrCodec.decodeValue("0x313233")).build(); + XAttr xAttr2 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER). + setName("a2").setValue(XAttrCodec.decodeValue("0x313131")).build(); + List<XAttr> xAttrs = Lists.newArrayList(); + xAttrs.add(xAttr1); + xAttrs.add(xAttr2); + + Assert.assertEquals(jsonString, JsonUtil.toJsonString(xAttrs, + XAttrCodec.HEX)); + } + + @Test + public void testToXAttrMap() throws IOException { + String jsonString = + "{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," + + "{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}"; + Map<?, ?> json = (Map<?, ?>)JSON.parse(jsonString); + XAttr xAttr1 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER). + setName("a1").setValue(XAttrCodec.decodeValue("0x313233")).build(); + XAttr xAttr2 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER). + setName("a2").setValue(XAttrCodec.decodeValue("0x313131")).build(); + List<XAttr> xAttrs = Lists.newArrayList(); + xAttrs.add(xAttr1); + xAttrs.add(xAttr2); + Map<String, byte[]> xAttrMap = XAttrHelper.buildXAttrMap(xAttrs); + Map<String, byte[]> parsedXAttrMap = JsonUtil.toXAttrs(json); + + Assert.assertEquals(xAttrMap.size(), parsedXAttrMap.size()); + Iterator<Entry<String, byte[]>> iter = xAttrMap.entrySet().iterator(); + while(iter.hasNext()) { + Entry<String, byte[]> entry = iter.next(); + Assert.assertArrayEquals(entry.getValue(), + parsedXAttrMap.get(entry.getKey())); + } + } private void checkDecodeFailure(Map<String, Object> map) { try {
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java?rev=1601992&r1=1601991&r2=1601992&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java Wed Jun 11 19:25:17 2014 @@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resou import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import java.io.IOException; import java.util.Arrays; import java.util.EnumSet; import java.util.List; @@ -30,6 +31,8 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.XAttrCodec; +import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; @@ -349,6 +352,43 @@ public class TestParam { } @Test + public void testXAttrNameParam() { + final XAttrNameParam p = new XAttrNameParam("user.a1"); + Assert.assertEquals(p.getXAttrName(), "user.a1"); + try { + new XAttrNameParam("a1"); + Assert.fail(); + } catch (IllegalArgumentException e) { + LOG.info("EXPECTED: " + e); + } + } + + @Test + public void testXAttrValueParam() throws IOException { + final XAttrValueParam p = new XAttrValueParam("0x313233"); + Assert.assertArrayEquals(p.getXAttrValue(), + XAttrCodec.decodeValue("0x313233")); + } + + @Test + public void testXAttrEncodingParam() { + final XAttrEncodingParam p = new XAttrEncodingParam(XAttrCodec.BASE64); + Assert.assertEquals(p.getEncoding(), XAttrCodec.BASE64); + final XAttrEncodingParam p1 = new XAttrEncodingParam(p.getValueString()); + Assert.assertEquals(p1.getEncoding(), XAttrCodec.BASE64); + } + + @Test + public void testXAttrSetFlagParam() { + EnumSet<XAttrSetFlag> flag = EnumSet.of( + XAttrSetFlag.CREATE, XAttrSetFlag.REPLACE); + final XAttrSetFlagParam p = new XAttrSetFlagParam(flag); + Assert.assertEquals(p.getFlag(), flag); + final XAttrSetFlagParam p1 = new XAttrSetFlagParam(p.getValueString()); + Assert.assertEquals(p1.getFlag(), flag); + } + + @Test public void testRenameOptionSetParam() { final RenameOptionSetParam p = new RenameOptionSetParam( Options.Rename.OVERWRITE, Options.Rename.NONE); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored?rev=1601992&r1=1601991&r2=1601992&view=diff ============================================================================== Binary files - no diff available. Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml?rev=1601992&r1=1601991&r2=1601992&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml Wed Jun 11 19:25:17 2014 @@ -1,6 +1,6 @@ <?xml version="1.0" encoding="UTF-8"?> <EDITS> - <EDITS_VERSION>-56</EDITS_VERSION> + <EDITS_VERSION>-57</EDITS_VERSION> <RECORD> <OPCODE>OP_START_LOG_SEGMENT</OPCODE> <DATA> @@ -938,9 +938,34 @@ </DATA> </RECORD> <RECORD> - <OPCODE>OP_END_LOG_SEGMENT</OPCODE> + <OPCODE>OP_SET_XATTR</OPCODE> <DATA> <TXID>75</TXID> + <SRC>/file_concat_target</SRC> + <XATTR> + <NAMESPACE>USER</NAMESPACE> + <NAME>a1</NAME> + <VALUE>0x313233</VALUE> + </XATTR> + <RPC_CLIENTID>9b85a845-bbfa-42f6-8a16-c433614b8eb9</RPC_CLIENTID> + <RPC_CALLID>80</RPC_CALLID> + </DATA> + </RECORD> + <RECORD> + <OPCODE>OP_REMOVE_XATTR</OPCODE> + <DATA> + <TXID>76</TXID> + <SRC>/file_concat_target</SRC> + <XATTR> + <NAMESPACE>USER</NAMESPACE> + <NAME>a1</NAME> + </XATTR> + </DATA> + </RECORD> + <RECORD> + <OPCODE>OP_END_LOG_SEGMENT</OPCODE> + <DATA> + <TXID>77</TXID> </DATA> </RECORD> </EDITS>