HDFS-10983. OIV tool should make an EC file explicit. Contributed by Manoj Govindassamy.

This commit is contained in:
Andrew Wang 2017-03-08 15:36:19 -08:00
parent d7762a5511
commit 5ca6ef0c26
5 changed files with 112 additions and 8 deletions

View File

@ -332,6 +332,7 @@ private INodeFile loadINodeFile(INodeSection.INode n) {
BlockType blockType = PBHelperClient.convert(f.getBlockType());
LoaderContext state = parent.getLoaderContext();
boolean isStriped = f.hasErasureCodingPolicyID();
assert ((!isStriped) || (isStriped && !f.hasReplication()));
Short replication = (!isStriped ? (short) f.getReplication() : null);
ErasureCodingPolicy ecPolicy = isStriped ?
ErasureCodingPolicyManager.getPolicyByPolicyID(

View File

@ -647,6 +647,10 @@ private INodeSection.INodeFile.Builder createINodeFileBuilder(Node node)
break;
case "STRIPED":
bld.setBlockType(HdfsProtos.BlockTypeProto.STRIPED);
ival = node.removeChildInt(INODE_SECTION_EC_POLICY_ID);
if (ival != null) {
bld.setErasureCodingPolicyID(ival);
}
break;
default:
throw new IOException("INode XML found with unknown <blocktype> " +

View File

@ -40,7 +40,6 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTypeProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName;
@ -59,6 +58,7 @@
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.util.XMLUtils;
import org.apache.hadoop.util.LimitInputStream;
import com.google.common.collect.ImmutableList;
@ -132,6 +132,8 @@ public final class PBImageXmlWriter {
public static final String INODE_SECTION_STORAGE_POLICY_ID =
"storagePolicyId";
public static final String INODE_SECTION_BLOCK_TYPE = "blockType";
public static final String INODE_SECTION_EC_POLICY_ID =
"erasureCodingPolicyId";
public static final String INODE_SECTION_NS_QUOTA = "nsquota";
public static final String INODE_SECTION_DS_QUOTA = "dsquota";
public static final String INODE_SECTION_TYPE_QUOTA = "typeQuota";
@ -472,8 +474,12 @@ private void dumpINodeReference(INodeReferenceSection.INodeReference r) {
}
private void dumpINodeFile(INodeSection.INodeFile f) {
o(SECTION_REPLICATION, f.getReplication())
.o(INODE_SECTION_MTIME, f.getModificationTime())
if (f.hasErasureCodingPolicyID()) {
o(SECTION_REPLICATION, INodeFile.DEFAULT_REPL_FOR_STRIPED_BLOCKS);
} else {
o(SECTION_REPLICATION, f.getReplication());
}
o(INODE_SECTION_MTIME, f.getModificationTime())
.o(INODE_SECTION_ATIME, f.getAccessTime())
.o(INODE_SECTION_PREFERRED_BLOCK_SIZE, f.getPreferredBlockSize())
.o(INODE_SECTION_PERMISSION, dumpPermission(f.getPermission()));
@ -495,8 +501,9 @@ private void dumpINodeFile(INodeSection.INodeFile f) {
if (f.hasStoragePolicyID()) {
o(INODE_SECTION_STORAGE_POLICY_ID, f.getStoragePolicyID());
}
if (f.getBlockType() != BlockTypeProto.CONTIGUOUS) {
if (f.hasErasureCodingPolicyID()) {
o(INODE_SECTION_BLOCK_TYPE, f.getBlockType().name());
o(INODE_SECTION_EC_POLICY_ID, f.getErasureCodingPolicyID());
}
if (f.hasFileUC()) {

View File

@ -223,6 +223,7 @@ private void testSaveAndLoadStripedINodeFile(FSNamesystem fsn, Configuration con
// blocks to/from legacy fsimage
assertEquals(3, fileByLoaded.getBlocks().length);
assertEquals(preferredBlockSize, fileByLoaded.getPreferredBlockSize());
assertEquals(file.getFileReplication(), fileByLoaded.getFileReplication());
if (isUC) {
assertEquals(client,

View File

@ -76,11 +76,13 @@
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.BlockType;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager;
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.io.IOUtils;
@ -91,9 +93,8 @@
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
@ -112,7 +113,6 @@ public class TestOfflineImageViewer {
// namespace as written to dfs, to be compared with viewer's output
final static HashMap<String, FileStatus> writtenFiles = Maps.newHashMap();
static int dirCount = 0;
private static File tempDir;
// Create a populated namespace for later testing. Save its contents to a
@ -358,6 +358,96 @@ public void testFileDistributionCalculatorWithOptions() throws Exception {
assertEquals(0, status);
}
/**
* SAX handler to verify EC Files and their policies.
*/
class ECXMLHandler extends DefaultHandler {
private boolean isInode = false;
private boolean isAttrRepl = false;
private boolean isAttrName = false;
private boolean isXAttrs = false;
private boolean isAttrECPolicy = false;
private boolean isAttrBlockType = false;
private String currentInodeName;
private String currentECPolicy;
private String currentBlockType;
private String currentRepl;
@Override
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
super.startElement(uri, localName, qName, attributes);
if (qName.equalsIgnoreCase(PBImageXmlWriter.INODE_SECTION_INODE)) {
isInode = true;
} else if (isInode && !isXAttrs && qName.equalsIgnoreCase(
PBImageXmlWriter.SECTION_NAME)) {
isAttrName = true;
} else if (isInode && qName.equalsIgnoreCase(
PBImageXmlWriter.SECTION_REPLICATION)) {
isAttrRepl = true;
} else if (isInode &&
qName.equalsIgnoreCase(PBImageXmlWriter.INODE_SECTION_EC_POLICY_ID)) {
isAttrECPolicy = true;
} else if (isInode && qName.equalsIgnoreCase(
PBImageXmlWriter.INODE_SECTION_BLOCK_TYPE)) {
isAttrBlockType = true;
} else if (isInode && qName.equalsIgnoreCase(
PBImageXmlWriter.INODE_SECTION_XATTRS)) {
isXAttrs = true;
}
}
@Override
public void endElement(String uri, String localName, String qName)
throws SAXException {
super.endElement(uri, localName, qName);
if (qName.equalsIgnoreCase(PBImageXmlWriter.INODE_SECTION_INODE)) {
if (currentInodeName != null && currentInodeName.length() > 0) {
if (currentBlockType != null && currentBlockType.equalsIgnoreCase(
BlockType.STRIPED.name())) {
Assert.assertEquals("INode '"
+ currentInodeName + "' has unexpected EC Policy!",
Byte.parseByte(currentECPolicy),
ErasureCodingPolicyManager.getPolicyByPolicyID(
HdfsConstants.XOR_2_1_POLICY_ID).getId());
Assert.assertEquals("INode '"
+ currentInodeName + "' has unexpected replication!",
currentRepl,
Short.toString(INodeFile.DEFAULT_REPL_FOR_STRIPED_BLOCKS));
}
}
isInode = false;
currentInodeName = "";
currentECPolicy = "";
currentRepl = "";
} else if (qName.equalsIgnoreCase(
PBImageXmlWriter.INODE_SECTION_XATTRS)) {
isXAttrs = false;
}
}
@Override
public void characters(char[] ch, int start, int length)
throws SAXException {
super.characters(ch, start, length);
String value = new String(ch, start, length);
if (isAttrName) {
currentInodeName = value;
isAttrName = false;
} else if (isAttrRepl) {
currentRepl = value;
isAttrRepl = false;
} else if (isAttrECPolicy) {
currentECPolicy = value;
isAttrECPolicy = false;
} else if (isAttrBlockType) {
currentBlockType = value;
isAttrBlockType = false;
}
}
}
@Test
public void testPBImageXmlWriter() throws IOException, SAXException,
ParserConfigurationException {
@ -368,7 +458,8 @@ public void testPBImageXmlWriter() throws IOException, SAXException,
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser parser = spf.newSAXParser();
final String xml = output.toString();
parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());
ECXMLHandler ecxmlHandler = new ECXMLHandler();
parser.parse(new InputSource(new StringReader(xml)), ecxmlHandler);
}
@Test