Author: jing9
Date: Thu Jan 23 19:10:45 2014
New Revision: 1560789

URL: http://svn.apache.org/r1560789
Log:
HDFS-5824. Add a Type field in Snapshot DiffEntry's protobuf definition. 
Contributed by Jing Zhao.

Modified:
    
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5698.txt
    
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
    
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
    
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto

Modified: 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5698.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5698.txt?rev=1560789&r1=1560788&r2=1560789&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5698.txt
 (original)
+++ 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5698.txt
 Thu Jan 23 19:10:45 2014
@@ -19,3 +19,6 @@ HDFS-5698 subtasks
 
     HDFS-5744. Serialize information for token managers in protobuf. (Haohui 
Mai
     via jing9)
+
+    HDFS-5824. Add a Type field in Snapshot DiffEntry's protobuf definition.
+    (jing9)

Modified: 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java?rev=1560789&r1=1560788&r2=1560789&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
 (original)
+++ 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
 Thu Jan 23 19:10:45 2014
@@ -52,14 +52,13 @@ import org.apache.hadoop.hdfs.security.t
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection;
+import 
org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection;
 import org.apache.hadoop.hdfs.server.namenode.snapshot.FSImageFormatPBSnapshot;
 import org.apache.hadoop.hdfs.util.MD5FileUtils;
-import 
org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection;
 import org.apache.hadoop.io.MD5Hash;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressorStream;
-import org.apache.hadoop.security.UserGroupInformation;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -221,7 +220,7 @@ public final class FSImageFormatProtobuf
           inodeLoader.loadFilesUnderConstructionSection(in);
           break;
         case SNAPSHOT:
-          snapshotLoader.loadSnapshotsSection(in);
+          snapshotLoader.loadSnapshotSection(in);
           break;
         case SNAPSHOT_DIFF:
           snapshotLoader.loadSnapshotDiffSection(in);
@@ -365,7 +364,7 @@ public final class FSImageFormatProtobuf
       FSImageFormatPBSnapshot.Saver snapshotSaver =
           new FSImageFormatPBSnapshot.Saver(this, summary,
               context.getSourceNamesystem());
-      snapshotSaver.serializeSnapshotsSection(sectionOutputStream);
+      snapshotSaver.serializeSnapshotSection(sectionOutputStream);
       snapshotSaver.serializeSnapshotDiffSection(sectionOutputStream);
     }
 

Modified: 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java?rev=1560789&r1=1560788&r2=1560789&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
 (original)
+++ 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
 Thu Jan 23 19:10:45 2014
@@ -45,7 +45,8 @@ import org.apache.hadoop.hdfs.server.nam
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection;
 import 
org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry;
-import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotsSection;
+import 
org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type;
+import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection;
 import org.apache.hadoop.hdfs.server.namenode.INode;
 import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INodeDirectoryAttributes;
@@ -87,9 +88,9 @@ public class FSImageFormatPBSnapshot {
      *
      * @return A map containing all the snapshots loaded from the fsimage.
      */
-    public void loadSnapshotsSection(InputStream in) throws IOException {
+    public void loadSnapshotSection(InputStream in) throws IOException {
       SnapshotManager sm = fsn.getSnapshotManager();
-      SnapshotsSection section = SnapshotsSection.parseDelimitedFrom(in);
+      SnapshotSection section = SnapshotSection.parseDelimitedFrom(in);
       int snum = section.getNumSnapshots();
       sm.setNumSnapshots(snum);
       sm.setSnapshotCounter(section.getSnapshotCounter());
@@ -111,7 +112,7 @@ public class FSImageFormatPBSnapshot {
 
     private void loadSnapshots(InputStream in, int size) throws IOException {
       for (int i = 0; i < size; i++) {
-        SnapshotsSection.Snapshot pbs = SnapshotsSection.Snapshot
+        SnapshotSection.Snapshot pbs = SnapshotSection.Snapshot
             .parseDelimitedFrom(in);
         INodeDirectory root = loadINodeDirectory(pbs.getRoot(),
             parent.getStringTable());
@@ -138,10 +139,14 @@ public class FSImageFormatPBSnapshot {
         }
         long inodeId = entry.getInodeId();
         INode inode = fsDir.getInode(inodeId);
-        if (inode.isFile()) {
+        SnapshotDiffSection.DiffEntry.Type type = entry.getType();
+        switch (type) {
+        case FILEDIFF:
           loadFileDiffList(in, inode.asFile(), entry.getNumOfDiff());
-        } else if (inode.isDirectory()) {
+          break;
+        case DIRECTORYDIFF:
           loadDirectoryDiffList(in, inode.asDirectory(), entry.getNumOfDiff());
+          break;
         }
       }
     }
@@ -198,8 +203,8 @@ public class FSImageFormatPBSnapshot {
      *        inodes' ids are directly recorded in protobuf
      */
     private List<INode> loadDeletedList(InputStream in, INodeDirectory dir,
-        int totalSize, List<Long> deletedNodes) throws IOException {
-      List<INode> dlist = new ArrayList<INode>(totalSize);
+        int refNum, List<Long> deletedNodes) throws IOException {
+      List<INode> dlist = new ArrayList<INode>(refNum + deletedNodes.size());
       // load non-reference inodes
       for (long deletedId : deletedNodes) {
         INode deleted = fsDir.getInode(deletedId);
@@ -207,7 +212,6 @@ public class FSImageFormatPBSnapshot {
         addToDeletedList(deleted, dir);
       }
       // load reference nodes in the deleted list
-      int refNum = totalSize - deletedNodes.size();
       for (int r = 0; r < refNum; r++) {
         INodeSection.INodeReference ref = INodeSection.INodeReference
             .parseDelimitedFrom(in);
@@ -256,10 +260,11 @@ public class FSImageFormatPBSnapshot {
                   modTime, dirCopyInPb.getNsQuota(), dirCopyInPb.getDsQuota());
         }
         // load created list
-        List<INode> clist = loadCreatedList(in, dir, diffInPb.getClistSize());
+        List<INode> clist = loadCreatedList(in, dir,
+            diffInPb.getCreatedListSize());
         // load deleted list
-        List<INode> dlist = loadDeletedList(in, dir, diffInPb.getDlistSize(),
-            diffInPb.getDeletedINodeList());
+        List<INode> dlist = loadDeletedList(in, dir,
+            diffInPb.getNumOfDeletedRef(), diffInPb.getDeletedINodeList());
         // create the directory diff
         DirectoryDiff diff = new DirectoryDiff(snapshotId, copy, null,
             childrenSize, clist, dlist, useRoot);
@@ -286,9 +291,9 @@ public class FSImageFormatPBSnapshot {
     /**
      * save all the snapshottable directories and snapshots to fsimage
      */
-    public void serializeSnapshotsSection(OutputStream out) throws IOException 
{
+    public void serializeSnapshotSection(OutputStream out) throws IOException {
       SnapshotManager sm = fsn.getSnapshotManager();
-      SnapshotsSection.Builder b = SnapshotsSection.newBuilder()
+      SnapshotSection.Builder b = SnapshotSection.newBuilder()
           .setSnapshotCounter(sm.getSnapshotCounter())
           .setNumSnapshots(sm.getNumSnapshots());
 
@@ -301,7 +306,7 @@ public class FSImageFormatPBSnapshot {
       for(INodeDirectorySnapshottable sdir : snapshottables) {
         for(Snapshot s : sdir.getSnapshotsByNames()) {
           Root sroot = s.getRoot();
-          SnapshotsSection.Snapshot.Builder sb = SnapshotsSection.Snapshot
+          SnapshotSection.Snapshot.Builder sb = SnapshotSection.Snapshot
               .newBuilder().setSnapshotId(s.getId());
           INodeSection.INodeDirectory.Builder db = buildINodeDirectory(sroot,
               parent.getStringMap());
@@ -343,7 +348,7 @@ public class FSImageFormatPBSnapshot {
       if (sf != null) {
         List<FileDiff> diffList = sf.getDiffs().asList();
         SnapshotDiffSection.DiffEntry entry = SnapshotDiffSection.DiffEntry
-            .newBuilder().setInodeId(file.getId())
+            .newBuilder().setInodeId(file.getId()).setType(Type.FILEDIFF)
             .setNumOfDiff(diffList.size()).build();
         entry.writeDelimitedTo(out);
         for (int i = diffList.size() - 1; i >= 0; i--) {
@@ -382,7 +387,7 @@ public class FSImageFormatPBSnapshot {
       if (sf != null) {
         List<DirectoryDiff> diffList = sf.getDiffs().asList();
         SnapshotDiffSection.DiffEntry entry = SnapshotDiffSection.DiffEntry
-            .newBuilder().setInodeId(dir.getId())
+            .newBuilder().setInodeId(dir.getId()).setType(Type.DIRECTORYDIFF)
             .setNumOfDiff(diffList.size()).build();
         entry.writeDelimitedTo(out);
         for (int i = diffList.size() - 1; i >= 0; i--) { // reverse order!
@@ -400,9 +405,8 @@ public class FSImageFormatPBSnapshot {
           // process created list and deleted list
           List<INode> created = diff.getChildrenDiff()
               .getList(ListType.CREATED);
-          db.setClistSize(created.size());
+          db.setCreatedListSize(created.size());
           List<INode> deleted = 
diff.getChildrenDiff().getList(ListType.DELETED);
-          db.setDlistSize(deleted.size());
           List<INodeReference> refs = new ArrayList<INodeReference>();
           for (INode d : deleted) {
             if (d.isReference()) {
@@ -411,6 +415,7 @@ public class FSImageFormatPBSnapshot {
               db.addDeletedINode(d.getId());
             }
           }
+          db.setNumOfDeletedRef(refs.size());
           db.build().writeDelimitedTo(out);
           saveCreatedDeletedList(created, refs, out);
         }

Modified: 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto?rev=1560789&r1=1560788&r2=1560789&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
 (original)
+++ 
hadoop/common/branches/HDFS-5698/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
 Thu Jan 23 19:10:45 2014
@@ -174,7 +174,7 @@ message INodeDirectorySection {
  * This section records the information about snapshot
  * NAME: SNAPSHOT
  */
-message SnapshotsSection {
+message SnapshotSection {
   message Snapshot {
     optional uint32 snapshotId = 1;
     // Snapshot root
@@ -185,7 +185,7 @@ message SnapshotsSection {
   repeated uint64 snapshottableDir = 2;
   // total number of snapshots
   optional uint32 numSnapshots = 3;
-  // repeated Snapshots...
+  // repeated Snapshot...
 }
 
 /**
@@ -203,11 +203,11 @@ message SnapshotDiffSection {
     optional bool isSnapshotRoot = 3;
     optional bytes name = 4;
     optional INodeSection.INodeDirectory snapshotCopy = 5;
-    optional uint32 clistSize = 6;
-    optional uint32 dlistSize = 7;
+    optional uint32 createdListSize = 6;
+    optional uint32 numOfDeletedRef = 7; // number of reference nodes in 
deleted list
     repeated uint64 deletedINode = 8; // id of deleted inode
-    // repeated CreatedListEntry
-    // repeated INodeReference (number of ref: dlistSize - dlist.size)
+    // repeated CreatedListEntry (size is specified by createdListSize)
+    // repeated INodeReference (reference inodes in deleted list)
   }
 
   message FileDiff {
@@ -218,8 +218,14 @@ message SnapshotDiffSection {
   }
 
   message DiffEntry {
-    optional uint64 inodeId = 1;
-    optional uint32 numOfDiff = 2;
+    enum Type {
+      FILEDIFF = 1;
+      DIRECTORYDIFF = 2;
+    }
+    required Type type = 1;
+    optional uint64 inodeId = 2;
+    optional uint32 numOfDiff = 3;
+
     // repeated DirectoryDiff or FileDiff
   }
 


Reply via email to