Author: jing9 Date: Thu Feb 13 00:00:42 2014 New Revision: 1567812 URL: http://svn.apache.org/r1567812 Log: HDFS-5847. Consolidate INodeReference into a separate section. Contributed by Jing Zhao.
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Thu Feb 13 00:00:42 2014 @@ -338,6 +338,8 @@ Trunk (Unreleased) HDFS-5915. Refactor FSImageFormatProtobuf to simplify cross section reads. (Haohui Mai via cnauroth) + HDFS-5847. Consolidate INodeReference into a separate section. (jing9) + Release 2.4.0 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java Thu Feb 13 00:00:42 2014 @@ -43,9 +43,6 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection; -import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference; -import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; -import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.ReadOnlyList; @@ -69,25 +66,6 @@ public final class FSImageFormatPBINode new FsPermission(perm)); } - public static INodeReference loadINodeReference( - INodeSection.INodeReference r, FSDirectory dir) throws IOException { - long referredId = r.getReferredId(); - INode referred = dir.getInode(referredId); - WithCount withCount = (WithCount) referred.getParentReference(); - if (withCount == null) { - withCount = new INodeReference.WithCount(null, referred); - } - final INodeReference ref; - if (r.hasDstSnapshotId()) { // DstReference - ref = new INodeReference.DstReference(null, withCount, - r.getDstSnapshotId()); - } else { - ref = new INodeReference.WithName(null, withCount, r.getName() - .toByteArray(), r.getLastSnapshotId()); - } - return ref; - } - public static INodeDirectory loadINodeDirectory(INodeSection.INode n, final String[] stringTable) { assert n.getType() == INodeSection.INode.Type.DIRECTORY; @@ -126,6 +104,8 @@ public final class FSImageFormatPBINode } void loadINodeDirectorySection(InputStream in) throws IOException { + final List<INodeReference> refList = parent.getLoaderContext() + .getRefList(); while (true) { INodeDirectorySection.DirEntry e = INodeDirectorySection.DirEntry .parseDelimitedFrom(in); @@ -138,20 +118,13 @@ public final class FSImageFormatPBINode INode child = dir.getInode(id); addToParent(p, child); } - for (int i = 0; i < e.getNumOfRef(); i++) { - INodeReference ref = loadINodeReference(in); + for (int refId : e.getRefChildrenList()) { + INodeReference ref = refList.get(refId); addToParent(p, ref); } } } - private INodeReference loadINodeReference(InputStream in) - throws IOException { - INodeSection.INodeReference ref = INodeSection.INodeReference - .parseDelimitedFrom(in); - return loadINodeReference(ref, dir); - } - void loadINodeSection(InputStream in) throws IOException { INodeSection s = INodeSection.parseDelimitedFrom(in); fsn.resetLastInodeId(s.getLastInodeId()); @@ -306,19 +279,6 @@ public final class FSImageFormatPBINode return b; } - public static INodeSection.INodeReference.Builder buildINodeReference( - INodeReference ref) throws IOException { - INodeSection.INodeReference.Builder rb = INodeSection.INodeReference - .newBuilder().setReferredId(ref.getId()); - if (ref instanceof WithName) { - rb.setLastSnapshotId(((WithName) ref).getLastSnapshotId()).setName( - ByteString.copyFrom(ref.getLocalNameBytes())); - } else if (ref instanceof DstReference) { - rb.setDstSnapshotId(((DstReference) ref).getDstSnapshotId()); - } - return rb; - } - private final FSNamesystem fsn; private final FileSummary.Builder summary; private final SaveNamespaceContext context; @@ -334,6 +294,8 @@ public final class FSImageFormatPBINode void serializeINodeDirectorySection(OutputStream out) throws IOException { Iterator<INodeWithAdditionalFields> iter = fsn.getFSDirectory() .getINodeMap().getMapIterator(); + final ArrayList<INodeReference> refList = parent.getSaverContext() + .getRefList(); int i = 0; while (iter.hasNext()) { INodeWithAdditionalFields n = iter.next(); @@ -346,21 +308,16 @@ public final class FSImageFormatPBINode if (children.size() > 0) { INodeDirectorySection.DirEntry.Builder b = INodeDirectorySection. DirEntry.newBuilder().setParent(n.getId()); - List<INodeReference> refs = new ArrayList<INodeReference>(); for (INode inode : children) { if (!inode.isReference()) { b.addChildren(inode.getId()); } else { - refs.add(inode.asReference()); + refList.add(inode.asReference()); + b.addRefChildren(refList.size() - 1); } } - b.setNumOfRef(refs.size()); INodeDirectorySection.DirEntry e = b.build(); e.writeDelimitedTo(out); - for (INodeReference ref : refs) { - INodeSection.INodeReference.Builder rb = buildINodeReference(ref); - rb.build().writeDelimitedTo(out); - } } ++i; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java Thu Feb 13 00:00:42 2014 @@ -75,10 +75,15 @@ public final class FSImageFormatProtobuf public static final class LoaderContext { private String[] stringTable; + private final ArrayList<INodeReference> refList = Lists.newArrayList(); public String[] getStringTable() { return stringTable; } + + public ArrayList<INodeReference> getRefList() { + return refList; + } } public static final class SaverContext { @@ -112,10 +117,15 @@ public final class FSImageFormatProtobuf } } private final DeduplicationMap<String> stringMap = DeduplicationMap.newMap(); + private final ArrayList<INodeReference> refList = Lists.newArrayList(); public DeduplicationMap<String> getStringMap() { return stringMap; } + + public ArrayList<INodeReference> getRefList() { + return refList; + } } public static final class Loader implements FSImageFormat.AbstractLoader { @@ -123,7 +133,6 @@ public final class FSImageFormatProtobuf private final Configuration conf; private final FSNamesystem fsn; private final LoaderContext ctx; - /** The MD5 sum of the loaded file */ private MD5Hash imgDigest; /** The transaction ID of the last edit represented by the loaded file */ @@ -226,6 +235,9 @@ public final class FSImageFormatProtobuf inodeLoader.loadINodeSection(in); } break; + case INODE_REFRENCE: + snapshotLoader.loadINodeReferenceSection(in); + break; case INODE_DIR: inodeLoader.loadINodeDirectorySection(in); break; @@ -313,9 +325,10 @@ public final class FSImageFormatProtobuf } public static final class Saver { + public static final int CHECK_CANCEL_INTERVAL = 4096; + private final SaveNamespaceContext context; private final SaverContext saverContext; - private long currentOffset = FSImageUtil.MAGIC_HEADER.length; private MD5Hash savedDigest; @@ -324,7 +337,6 @@ public final class FSImageFormatProtobuf private OutputStream sectionOutputStream; private CompressionCodec codec; private OutputStream underlyingOutputStream; - public static final int CHECK_CANCEL_INTERVAL = 4096; Saver(SaveNamespaceContext context) { this.context = context; @@ -400,6 +412,7 @@ public final class FSImageFormatProtobuf snapshotSaver.serializeSnapshotSection(sectionOutputStream); snapshotSaver.serializeSnapshotDiffSection(sectionOutputStream); + snapshotSaver.serializeINodeReferenceSection(sectionOutputStream); } private void saveInternal(FileOutputStream fout, @@ -535,6 +548,7 @@ public final class FSImageFormatProtobuf NS_INFO("NS_INFO"), STRING_TABLE("STRING_TABLE"), INODE("INODE"), + INODE_REFRENCE("INODE_REFRENCE"), SNAPSHOT("SNAPSHOT"), INODE_DIR("INODE_DIR"), FILES_UNDERCONSTRUCTION("FILES_UNDERCONSTRUCTION"), Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java Thu Feb 13 00:00:42 2014 @@ -18,12 +18,10 @@ package org.apache.hadoop.hdfs.server.namenode.snapshot; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.loadINodeDirectory; -import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.loadINodeReference; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.loadPermission; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.updateBlocksMap; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Saver.buildINodeDirectory; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Saver.buildINodeFile; -import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Saver.buildINodeReference; import java.io.IOException; import java.io.InputStream; @@ -40,8 +38,10 @@ import org.apache.hadoop.classification. import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf; +import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary; +import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry; @@ -54,6 +54,9 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.INodeFileAttributes; import org.apache.hadoop.hdfs.server.namenode.INodeMap; import org.apache.hadoop.hdfs.server.namenode.INodeReference; +import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference; +import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; +import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName; import org.apache.hadoop.hdfs.server.namenode.INodeWithAdditionalFields; import org.apache.hadoop.hdfs.server.namenode.SaveNamespaceContext; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; @@ -75,7 +78,6 @@ public class FSImageFormatPBSnapshot { private final FSImageFormatProtobuf.Loader parent; private final Map<Integer, Snapshot> snapshotMap; - public Loader(FSNamesystem fsn, FSImageFormatProtobuf.Loader parent) { this.fsn = fsn; this.fsDir = fsn.getFSDirectory(); @@ -84,6 +86,43 @@ public class FSImageFormatPBSnapshot { } /** + * The sequence of the ref node in refList must be strictly the same with + * the sequence in fsimage + */ + public void loadINodeReferenceSection(InputStream in) throws IOException { + final List<INodeReference> refList = parent.getLoaderContext() + .getRefList(); + while (true) { + INodeReferenceSection.INodeReference e = INodeReferenceSection + .INodeReference.parseDelimitedFrom(in); + if (e == null) { + break; + } + INodeReference ref = loadINodeReference(e); + refList.add(ref); + } + } + + private INodeReference loadINodeReference( + INodeReferenceSection.INodeReference r) throws IOException { + long referredId = r.getReferredId(); + INode referred = fsDir.getInode(referredId); + WithCount withCount = (WithCount) referred.getParentReference(); + if (withCount == null) { + withCount = new INodeReference.WithCount(null, referred); + } + final INodeReference ref; + if (r.hasDstSnapshotId()) { // DstReference + ref = new INodeReference.DstReference(null, withCount, + r.getDstSnapshotId()); + } else { + ref = new INodeReference.WithName(null, withCount, r.getName() + .toByteArray(), r.getLastSnapshotId()); + } + return ref; + } + + /** * Load the snapshots section from fsimage. Also convert snapshottable * directories into {@link INodeDirectorySnapshottable}. * @@ -131,6 +170,8 @@ public class FSImageFormatPBSnapshot { * Load the snapshot diff section from fsimage. */ public void loadSnapshotDiffSection(InputStream in) throws IOException { + final List<INodeReference> refList = parent.getLoaderContext() + .getRefList(); while (true) { SnapshotDiffSection.DiffEntry entry = SnapshotDiffSection.DiffEntry .parseDelimitedFrom(in); @@ -145,7 +186,8 @@ public class FSImageFormatPBSnapshot { loadFileDiffList(in, inode.asFile(), entry.getNumOfDiff()); break; case DIRECTORYDIFF: - loadDirectoryDiffList(in, inode.asDirectory(), entry.getNumOfDiff()); + loadDirectoryDiffList(in, inode.asDirectory(), entry.getNumOfDiff(), + refList); break; } } @@ -199,13 +241,13 @@ public class FSImageFormatPBSnapshot { /** * Load the deleted list in a DirectoryDiff - * @param totalSize the total size of the deleted list - * @param deletedNodes non-reference inodes in the deleted list. These - * inodes' ids are directly recorded in protobuf */ - private List<INode> loadDeletedList(InputStream in, INodeDirectory dir, - int refNum, List<Long> deletedNodes) throws IOException { - List<INode> dlist = new ArrayList<INode>(refNum + deletedNodes.size()); + private List<INode> loadDeletedList(final List<INodeReference> refList, + InputStream in, INodeDirectory dir, List<Long> deletedNodes, + List<Integer> deletedRefNodes) + throws IOException { + List<INode> dlist = new ArrayList<INode>(deletedRefNodes.size() + + deletedNodes.size()); // load non-reference inodes for (long deletedId : deletedNodes) { INode deleted = fsDir.getInode(deletedId); @@ -213,13 +255,12 @@ public class FSImageFormatPBSnapshot { addToDeletedList(deleted, dir); } // load reference nodes in the deleted list - for (int r = 0; r < refNum; r++) { - INodeSection.INodeReference ref = INodeSection.INodeReference - .parseDelimitedFrom(in); - INodeReference refNode = loadINodeReference(ref, fsDir); - dlist.add(refNode); - addToDeletedList(refNode, dir); + for (int refId : deletedRefNodes) { + INodeReference deletedRef = refList.get(refId); + dlist.add(deletedRef); + addToDeletedList(deletedRef, dir); } + Collections.sort(dlist, new Comparator<INode>() { @Override public int compare(INode n1, INode n2) { @@ -231,7 +272,7 @@ public class FSImageFormatPBSnapshot { /** Load DirectoryDiff list for a directory with snapshot feature */ private void loadDirectoryDiffList(InputStream in, INodeDirectory dir, - int size) throws IOException { + int size, final List<INodeReference> refList) throws IOException { if (!dir.isWithSnapshot()) { dir.addSnapshotFeature(null); } @@ -247,7 +288,7 @@ public class FSImageFormatPBSnapshot { INodeDirectoryAttributes copy = null; if (useRoot) { copy = snapshot.getRoot(); - }else if (diffInPb.hasSnapshotCopy()) { + } else if (diffInPb.hasSnapshotCopy()) { INodeSection.INodeDirectory dirCopyInPb = diffInPb.getSnapshotCopy(); final byte[] name = diffInPb.getName().toByteArray(); PermissionStatus permission = loadPermission( @@ -265,8 +306,8 @@ public class FSImageFormatPBSnapshot { List<INode> clist = loadCreatedList(in, dir, diffInPb.getCreatedListSize()); // load deleted list - List<INode> dlist = loadDeletedList(in, dir, - diffInPb.getNumOfDeletedRef(), diffInPb.getDeletedINodeList()); + List<INode> dlist = loadDeletedList(refList, in, dir, + diffInPb.getDeletedINodeList(), diffInPb.getDeletedINodeRefList()); // create the directory diff DirectoryDiff diff = new DirectoryDiff(snapshotId, copy, null, childrenSize, clist, dlist, useRoot); @@ -285,7 +326,8 @@ public class FSImageFormatPBSnapshot { private final SaveNamespaceContext context; public Saver(FSImageFormatProtobuf.Saver parent, - FileSummary.Builder headers, SaveNamespaceContext context, FSNamesystem fsn) { + FileSummary.Builder headers, SaveNamespaceContext context, + FSNamesystem fsn) { this.parent = parent; this.headers = headers; this.context = context; @@ -331,11 +373,41 @@ public class FSImageFormatPBSnapshot { } /** + * This can only be called after serializing both INode_Dir and SnapshotDiff + */ + public void serializeINodeReferenceSection(OutputStream out) + throws IOException { + final List<INodeReference> refList = parent.getSaverContext() + .getRefList(); + for (INodeReference ref : refList) { + INodeReferenceSection.INodeReference.Builder rb = buildINodeReference(ref); + rb.build().writeDelimitedTo(out); + } + parent.commitSection(headers, SectionName.INODE_REFRENCE); + } + + private INodeReferenceSection.INodeReference.Builder buildINodeReference( + INodeReference ref) throws IOException { + INodeReferenceSection.INodeReference.Builder rb = + INodeReferenceSection.INodeReference.newBuilder(). + setReferredId(ref.getId()); + if (ref instanceof WithName) { + rb.setLastSnapshotId(((WithName) ref).getLastSnapshotId()).setName( + ByteString.copyFrom(ref.getLocalNameBytes())); + } else if (ref instanceof DstReference) { + rb.setDstSnapshotId(((DstReference) ref).getDstSnapshotId()); + } + return rb; + } + + /** * save all the snapshot diff to fsimage */ public void serializeSnapshotDiffSection(OutputStream out) throws IOException { INodeMap inodesMap = fsn.getFSDirectory().getINodeMap(); + final List<INodeReference> refList = parent.getSaverContext() + .getRefList(); int i = 0; Iterator<INodeWithAdditionalFields> iter = inodesMap.getMapIterator(); while (iter.hasNext()) { @@ -343,7 +415,7 @@ public class FSImageFormatPBSnapshot { if (inode.isFile()) { serializeFileDiffList(inode.asFile(), out); } else if (inode.isDirectory()) { - serializeDirDiffList(inode.asDirectory(), out); + serializeDirDiffList(inode.asDirectory(), refList, out); } ++i; if (i % FSImageFormatProtobuf.Saver.CHECK_CANCEL_INTERVAL == 0) { @@ -378,22 +450,18 @@ public class FSImageFormatPBSnapshot { } } - private void saveCreatedDeletedList(List<INode> created, - List<INodeReference> deletedRefs, OutputStream out) throws IOException { + private void saveCreatedList(List<INode> created, OutputStream out) + throws IOException { // local names of the created list member for (INode c : created) { SnapshotDiffSection.CreatedListEntry.newBuilder() .setName(ByteString.copyFrom(c.getLocalNameBytes())).build() .writeDelimitedTo(out); } - // reference nodes in deleted list - for (INodeReference ref : deletedRefs) { - INodeSection.INodeReference.Builder rb = buildINodeReference(ref); - rb.build().writeDelimitedTo(out); - } } - private void serializeDirDiffList(INodeDirectory dir, OutputStream out) + private void serializeDirDiffList(INodeDirectory dir, + final List<INodeReference> refList, OutputStream out) throws IOException { DirectoryWithSnapshotFeature sf = dir.getDirectoryWithSnapshotFeature(); if (sf != null) { @@ -419,17 +487,16 @@ public class FSImageFormatPBSnapshot { .getList(ListType.CREATED); db.setCreatedListSize(created.size()); List<INode> deleted = diff.getChildrenDiff().getList(ListType.DELETED); - List<INodeReference> refs = new ArrayList<INodeReference>(); for (INode d : deleted) { if (d.isReference()) { - refs.add(d.asReference()); + refList.add(d.asReference()); + db.addDeletedINodeRef(refList.size() - 1); } else { db.addDeletedINode(d.getId()); } } - db.setNumOfDeletedRef(refs.size()); db.build().writeDelimitedTo(out); - saveCreatedDeletedList(created, refs, out); + saveCreatedList(created, out); } } } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java Thu Feb 13 00:00:42 2014 @@ -36,6 +36,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.FSImageUtil; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection; +import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory; @@ -79,6 +80,7 @@ final class LsrPBImage { private String[] stringTable; private HashMap<Long, INodeSection.INode> inodes = Maps.newHashMap(); private HashMap<Long, long[]> dirmap = Maps.newHashMap(); + private ArrayList<INodeReferenceSection.INodeReference> refList = Lists.newArrayList(); public LsrPBImage(Configuration conf, PrintWriter out) { this.conf = conf; @@ -125,6 +127,9 @@ final class LsrPBImage { case INODE: loadINodeSection(is); break; + case INODE_REFRENCE: + loadINodeReferenceSection(is); + break; case INODE_DIR: loadINodeDirectorySection(is); break; @@ -202,14 +207,26 @@ final class LsrPBImage { if (e == null) { break; } - long[] l = new long[e.getChildrenCount()]; - for (int i = 0; i < l.length; ++i) { + long[] l = new long[e.getChildrenCount() + e.getRefChildrenCount()]; + for (int i = 0; i < e.getChildrenCount(); ++i) { l[i] = e.getChildren(i); } + for (int i = e.getChildrenCount(); i < l.length; i++) { + int refId = e.getRefChildren(i - e.getChildrenCount()); + l[i] = refList.get(refId).getReferredId(); + } dirmap.put(e.getParent(), l); - for (int i = 0; i < e.getNumOfRef(); i++) { - INodeSection.INodeReference.parseDelimitedFrom(in); + } + } + + private void loadINodeReferenceSection(InputStream in) throws IOException { + while (true) { + INodeReferenceSection.INodeReference e = INodeReferenceSection + .INodeReference.parseDelimitedFrom(in); + if (e == null) { + break; } + refList.add(e); } } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java Thu Feb 13 00:00:42 2014 @@ -43,6 +43,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink; +import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection; @@ -132,6 +133,9 @@ public final class PBImageXmlWriter { case INODE: dumpINodeSection(is); break; + case INODE_REFRENCE: + dumpINodeReferenceSection(is); + break; case INODE_DIR: dumpINodeDirectorySection(is); break; @@ -227,18 +231,27 @@ public final class PBImageXmlWriter { for (long id : e.getChildrenList()) { o("inode", id); } - for (int i = 0; i < e.getNumOfRef(); i++) { - INodeSection.INodeReference r = INodeSection.INodeReference - .parseDelimitedFrom(in); - dumpINodeReference(r); - + for (int refId : e.getRefChildrenList()) { + o("inodereference-index", refId); } out.print("</directory>\n"); } out.print("</INodeDirectorySection>\n"); } - private void dumpINodeReference(INodeSection.INodeReference r) { + private void dumpINodeReferenceSection(InputStream in) throws IOException { + out.print("<INodeReferenceSection>"); + while (true) { + INodeReferenceSection.INodeReference e = INodeReferenceSection + .INodeReference.parseDelimitedFrom(in); + if (e == null) { + break; + } + dumpINodeReference(e); + } + } + + private void dumpINodeReference(INodeReferenceSection.INodeReference r) { out.print("<ref>"); o("referredId", r.getReferredId()).o("name", r.getName().toStringUtf8()) .o("dstSnapshotId", r.getDstSnapshotId()) @@ -362,10 +375,15 @@ public final class PBImageXmlWriter { o("name", ce.getName().toStringUtf8()); out.print("</created>\n"); } - for (int j = 0; j < d.getNumOfDeletedRef(); ++j) { - INodeSection.INodeReference r = INodeSection.INodeReference - .parseDelimitedFrom(in); - dumpINodeReference(r); + for (long did : d.getDeletedINodeList()) { + out.print("<deleted>"); + o("inode", did); + out.print("</deleted>\n"); + } + for (int dRefid : d.getDeletedINodeRefList()) { + out.print("<deleted>"); + o("inodereference-index", dRefid); + out.print("</deleted>\n"); } out.print("</dirdiff>\n"); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto?rev=1567812&r1=1567811&r2=1567812&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto Thu Feb 13 00:00:42 2014 @@ -112,17 +112,6 @@ message INodeSection { optional bytes target = 2; } - message INodeReference { - // id of the referred inode - optional uint64 referredId = 1; - // local name recorded in WithName - optional bytes name = 2; - // recorded in DstReference - optional uint32 dstSnapshotId = 3; - // recorded in WithName - optional uint32 lastSnapshotId = 4; - } - message INode { enum Type { FILE = 1; @@ -163,13 +152,28 @@ message FilesUnderConstructionSection { message INodeDirectorySection { message DirEntry { optional uint64 parent = 1; + // children that are not reference nodes repeated uint64 children = 2 [packed = true]; - optional uint64 numOfRef = 3; - // repeated INodeReference... + // children that are reference nodes, each element is a reference node id + repeated uint32 refChildren = 3 [packed = true]; } // repeated DirEntry, ended at the boundary of the section. } +message INodeReferenceSection { + message INodeReference { + // id of the referred inode + optional uint64 referredId = 1; + // local name recorded in WithName + optional bytes name = 2; + // recorded in DstReference + optional uint32 dstSnapshotId = 3; + // recorded in WithName + optional uint32 lastSnapshotId = 4; + } + // repeated INodeReference... +} + /** * This section records the information about snapshot * NAME: SNAPSHOT @@ -204,10 +208,10 @@ message SnapshotDiffSection { optional bytes name = 4; optional INodeSection.INodeDirectory snapshotCopy = 5; optional uint32 createdListSize = 6; - optional uint32 numOfDeletedRef = 7; // number of reference nodes in deleted list - repeated uint64 deletedINode = 8 [packed = true]; // id of deleted inode + repeated uint64 deletedINode = 7 [packed = true]; // id of deleted inodes + // id of reference nodes in the deleted list + repeated uint32 deletedINodeRef = 8 [packed = true]; // repeated CreatedListEntry (size is specified by createdListSize) - // repeated INodeReference (reference inodes in deleted list) } message FileDiff {