HDFS-17190. EC: Fix bug of OIV processing XAttr. (#6067). Contributed by Shuyan Zhang.
Signed-off-by: He Xiaoqiao <hexiaoqiao@apache.org>
This commit is contained in:
parent
120620c1b7
commit
9e489b9ab5
@ -72,7 +72,7 @@ public static String getName(int record) {
|
|||||||
return SerialNumberManager.XATTR.getString(nid);
|
return SerialNumberManager.XATTR.getString(nid);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int toInt(XAttr a) {
|
public static int toInt(XAttr a) {
|
||||||
int nid = SerialNumberManager.XATTR.getSerialNumber(a.getName());
|
int nid = SerialNumberManager.XATTR.getSerialNumber(a.getName());
|
||||||
int nsOrd = a.getNameSpace().ordinal();
|
int nsOrd = a.getNameSpace().ordinal();
|
||||||
long value = NS.BITS.combine(nsOrd & NS_MASK, 0L);
|
long value = NS.BITS.combine(nsOrd & NS_MASK, 0L);
|
||||||
|
@ -52,6 +52,7 @@
|
|||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.XAttr;
|
import org.apache.hadoop.fs.XAttr;
|
||||||
import org.apache.hadoop.fs.permission.PermissionStatus;
|
import org.apache.hadoop.fs.permission.PermissionStatus;
|
||||||
|
import org.apache.hadoop.hdfs.XAttrHelper;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
|
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
|
||||||
@ -514,6 +515,8 @@ public long getParentId(long id) throws IOException {
|
|||||||
private File filename;
|
private File filename;
|
||||||
private int numThreads;
|
private int numThreads;
|
||||||
private String parallelOutputFile;
|
private String parallelOutputFile;
|
||||||
|
private final XAttr ecXAttr =
|
||||||
|
XAttrHelper.buildXAttr(XATTR_ERASURECODING_POLICY);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a PB FsImage writer to generate text file.
|
* Construct a PB FsImage writer to generate text file.
|
||||||
@ -1040,7 +1043,7 @@ public static void mergeFiles(String[] srcPaths, String resultPath)
|
|||||||
List<XAttr> xattrs =
|
List<XAttr> xattrs =
|
||||||
FSImageFormatPBINode.Loader.loadXAttrs(xattrFeatureProto, stringTable);
|
FSImageFormatPBINode.Loader.loadXAttrs(xattrFeatureProto, stringTable);
|
||||||
for (XAttr xattr : xattrs) {
|
for (XAttr xattr : xattrs) {
|
||||||
if (XATTR_ERASURECODING_POLICY.contains(xattr.getName())){
|
if (xattr.equalsIgnoreValue(ecXAttr)){
|
||||||
try{
|
try{
|
||||||
ByteArrayInputStream bIn = new ByteArrayInputStream(xattr.getValue());
|
ByteArrayInputStream bIn = new ByteArrayInputStream(xattr.getValue());
|
||||||
DataInputStream dIn = new DataInputStream(bIn);
|
DataInputStream dIn = new DataInputStream(bIn);
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.DataOutputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
@ -66,6 +67,7 @@
|
|||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.SafeModeAction;
|
import org.apache.hadoop.fs.SafeModeAction;
|
||||||
|
import org.apache.hadoop.fs.XAttr;
|
||||||
import org.apache.hadoop.fs.permission.FsAction;
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.permission.PermissionStatus;
|
import org.apache.hadoop.fs.permission.PermissionStatus;
|
||||||
@ -73,20 +75,26 @@
|
|||||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
import org.apache.hadoop.hdfs.XAttrHelper;
|
||||||
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
|
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
|
||||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
|
||||||
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
|
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
|
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
|
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FsImageProto;
|
import org.apache.hadoop.hdfs.server.namenode.FsImageProto;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
|
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
|
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.XAttrFormat;
|
||||||
import org.apache.hadoop.hdfs.util.MD5FileUtils;
|
import org.apache.hadoop.hdfs.util.MD5FileUtils;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.io.MD5Hash;
|
import org.apache.hadoop.io.MD5Hash;
|
||||||
|
import org.apache.hadoop.io.WritableUtils;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
@ -719,7 +727,13 @@ private FsImageProto.INodeSection.INode createSampleFileInode() {
|
|||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
private FsImageProto.INodeSection.INode createSampleDirInode() {
|
private FsImageProto.INodeSection.INode createSampleDirInode()
|
||||||
|
throws IOException {
|
||||||
|
return createSampleDirInode(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private FsImageProto.INodeSection.INode createSampleDirInode(
|
||||||
|
boolean builXAttr) throws IOException {
|
||||||
FsImageProto.INodeSection.AclFeatureProto.Builder acl =
|
FsImageProto.INodeSection.AclFeatureProto.Builder acl =
|
||||||
FsImageProto.INodeSection.AclFeatureProto.newBuilder()
|
FsImageProto.INodeSection.AclFeatureProto.newBuilder()
|
||||||
.addEntries(2);
|
.addEntries(2);
|
||||||
@ -729,6 +743,19 @@ private FsImageProto.INodeSection.INode createSampleDirInode() {
|
|||||||
.setNsQuota(700)
|
.setNsQuota(700)
|
||||||
.setModificationTime(SAMPLE_TIMESTAMP)
|
.setModificationTime(SAMPLE_TIMESTAMP)
|
||||||
.setAcl(acl);
|
.setAcl(acl);
|
||||||
|
if (builXAttr) {
|
||||||
|
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
|
||||||
|
DataOutputStream dOut = new DataOutputStream(bOut);
|
||||||
|
WritableUtils.writeString(dOut, "test-value");
|
||||||
|
XAttr a = XAttrHelper.buildXAttr("system.hdfs", bOut.toByteArray());
|
||||||
|
XAttrFeatureProto.Builder b = XAttrFeatureProto.newBuilder();
|
||||||
|
XAttrCompactProto.Builder xAttrCompactBuilder = XAttrCompactProto.newBuilder();
|
||||||
|
int v = XAttrFormat.toInt(a);
|
||||||
|
xAttrCompactBuilder.setName(v);
|
||||||
|
xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue()));
|
||||||
|
b.addXAttrs(xAttrCompactBuilder.build());
|
||||||
|
directory.setXAttrs(b);
|
||||||
|
}
|
||||||
|
|
||||||
return FsImageProto.INodeSection.INode.newBuilder()
|
return FsImageProto.INodeSection.INode.newBuilder()
|
||||||
.setType(FsImageProto.INodeSection.INode.Type.DIRECTORY)
|
.setType(FsImageProto.INodeSection.INode.Type.DIRECTORY)
|
||||||
@ -754,6 +781,11 @@ private FsImageProto.INodeSection.INode createSampleSymlink() {
|
|||||||
|
|
||||||
private PBImageDelimitedTextWriter createDelimitedWriterSpy()
|
private PBImageDelimitedTextWriter createDelimitedWriterSpy()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
return createDelimitedWriterSpy(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private PBImageDelimitedTextWriter createDelimitedWriterSpy(boolean printECPolicy)
|
||||||
|
throws IOException {
|
||||||
FsPermission fsPermission = new FsPermission(
|
FsPermission fsPermission = new FsPermission(
|
||||||
FsAction.ALL,
|
FsAction.ALL,
|
||||||
FsAction.WRITE_EXECUTE,
|
FsAction.WRITE_EXECUTE,
|
||||||
@ -764,7 +796,9 @@ private PBImageDelimitedTextWriter createDelimitedWriterSpy()
|
|||||||
fsPermission);
|
fsPermission);
|
||||||
|
|
||||||
PBImageDelimitedTextWriter writer = new
|
PBImageDelimitedTextWriter writer = new
|
||||||
PBImageDelimitedTextWriter(null, ",", "");
|
PBImageDelimitedTextWriter(null, ",", "", false,
|
||||||
|
printECPolicy, 1, "-", new Configuration());
|
||||||
|
|
||||||
PBImageDelimitedTextWriter writerSpy = spy(writer);
|
PBImageDelimitedTextWriter writerSpy = spy(writer);
|
||||||
when(writerSpy.getPermission(anyLong())).thenReturn(permStatus);
|
when(writerSpy.getPermission(anyLong())).thenReturn(permStatus);
|
||||||
return writerSpy;
|
return writerSpy;
|
||||||
@ -786,6 +820,14 @@ public void testWriterOutputEntryBuilderForDirectory() throws IOException {
|
|||||||
createSampleDirInode()));
|
createSampleDirInode()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testECXAttr() throws IOException {
|
||||||
|
assertEquals("/path/dir,0,2000-01-01 00:00,1970-01-01 00:00" +
|
||||||
|
",0,0,0,700,1000,drwx-wx-w-+,user_1,group_1,-",
|
||||||
|
createDelimitedWriterSpy(true).getEntry("/path/",
|
||||||
|
createSampleDirInode(true)));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWriterOutputEntryBuilderForSymlink() throws IOException {
|
public void testWriterOutputEntryBuilderForSymlink() throws IOException {
|
||||||
assertEquals("/path/sym,0,2000-01-01 00:00,2000-01-01 00:00" +
|
assertEquals("/path/sym,0,2000-01-01 00:00,2000-01-01 00:00" +
|
||||||
|
Loading…
Reference in New Issue
Block a user