HDFS-6249. Output AclEntry in PBImageXmlWriter. Contributed by surendra singh lilhore.

This commit is contained in:
Akira Ajisaka 2015-06-17 17:41:10 -07:00
parent 1a169a26bc
commit cc432885ad
3 changed files with 44 additions and 3 deletions

View File

@ -653,6 +653,9 @@ Release 2.8.0 - UNRELEASED
HDFS-8589. Fix unused imports in BPServiceActor and BlockReportLeaseManager HDFS-8589. Fix unused imports in BPServiceActor and BlockReportLeaseManager
(cmccabe) (cmccabe)
HDFS-6249. Output AclEntry in PBImageXmlWriter.
(surendra singh lilhore via aajisaka)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

View File

@ -29,6 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
@ -41,6 +42,7 @@
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection.FileUnderConstructionEntry;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.AclFeatureProto;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection;
@ -51,7 +53,7 @@
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection;
import org.apache.hadoop.hdfs.util.XMLUtils; import org.apache.hadoop.hdfs.util.XMLUtils;
import org.apache.hadoop.util.LimitInputStream; import org.apache.hadoop.util.LimitInputStream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
/** /**
@ -188,7 +190,7 @@ private void dumpFileUnderConstructionSection(InputStream in)
private void dumpINodeDirectory(INodeDirectory d) { private void dumpINodeDirectory(INodeDirectory d) {
o("mtime", d.getModificationTime()).o("permission", o("mtime", d.getModificationTime()).o("permission",
dumpPermission(d.getPermission())); dumpPermission(d.getPermission()));
dumpAcls(d.getAcl());
if (d.hasDsQuota() && d.hasNsQuota()) { if (d.hasDsQuota() && d.hasNsQuota()) {
o("nsquota", d.getNsQuota()).o("dsquota", d.getDsQuota()); o("nsquota", d.getNsQuota()).o("dsquota", d.getDsQuota());
} }
@ -242,7 +244,7 @@ private void dumpINodeFile(INodeSection.INodeFile f) {
.o("atime", f.getAccessTime()) .o("atime", f.getAccessTime())
.o("perferredBlockSize", f.getPreferredBlockSize()) .o("perferredBlockSize", f.getPreferredBlockSize())
.o("permission", dumpPermission(f.getPermission())); .o("permission", dumpPermission(f.getPermission()));
dumpAcls(f.getAcl());
if (f.getBlocksCount() > 0) { if (f.getBlocksCount() > 0) {
out.print("<blocks>"); out.print("<blocks>");
for (BlockProto b : f.getBlocksList()) { for (BlockProto b : f.getBlocksList()) {
@ -263,6 +265,18 @@ private void dumpINodeFile(INodeSection.INodeFile f) {
} }
} }
private void dumpAcls(AclFeatureProto aclFeatureProto) {
ImmutableList<AclEntry> aclEntryList = FSImageFormatPBINode.Loader
.loadAclEntries(aclFeatureProto, stringTable);
if (aclEntryList.size() > 0) {
out.print("<acls>");
for (AclEntry aclEntry : aclEntryList) {
o("acl", aclEntry.toString());
}
out.print("</acls>");
}
}
private void dumpINodeSection(InputStream in) throws IOException { private void dumpINodeSection(InputStream in) throws IOException {
INodeSection s = INodeSection.parseDelimitedFrom(in); INodeSection s = INodeSection.parseDelimitedFrom(in);
out.print("<INodeSection>"); out.print("<INodeSection>");

View File

@ -17,14 +17,23 @@
*/ */
package org.apache.hadoop.hdfs.tools.offlineImageViewer; package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream;
import java.io.RandomAccessFile;
import java.io.StringReader;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.util.HashMap; import java.util.HashMap;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -42,6 +51,9 @@
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS; import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS;
import static org.apache.hadoop.fs.permission.AclEntryScope.DEFAULT; import static org.apache.hadoop.fs.permission.AclEntryScope.DEFAULT;
@ -202,4 +214,16 @@ public void testWebImageViewerForAcl() throws Exception {
viewer.close(); viewer.close();
} }
} }
@Test
public void testPBImageXmlWriterForAcl() throws Exception{
ByteArrayOutputStream output = new ByteArrayOutputStream();
PrintStream o = new PrintStream(output);
PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o);
v.visit(new RandomAccessFile(originalFsimage, "r"));
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser parser = spf.newSAXParser();
final String xml = output.toString();
parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());
}
} }