From 9e489b9ab5367b431849f957d0fe1ffadc43cd47 Mon Sep 17 00:00:00 2001 From: zhangshuyan <81411509+zhangshuyan0@users.noreply.github.com> Date: Mon, 18 Sep 2023 10:35:13 +0800 Subject: [PATCH] HDFS-17190. EC: Fix bug of OIV processing XAttr. (#6067). Contributed by Shuyan Zhang. Signed-off-by: He Xiaoqiao --- .../hdfs/server/namenode/XAttrFormat.java | 2 +- .../offlineImageViewer/PBImageTextWriter.java | 5 +- .../TestOfflineImageViewer.java | 46 ++++++++++++++++++- 3 files changed, 49 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java index 4d46e691df..69f21c176d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java @@ -72,7 +72,7 @@ public static String getName(int record) { return SerialNumberManager.XATTR.getString(nid); } - static int toInt(XAttr a) { + public static int toInt(XAttr a) { int nid = SerialNumberManager.XATTR.getSerialNumber(a.getName()); int nsOrd = a.getNameSpace().ordinal(); long value = NS.BITS.combine(nsOrd & NS_MASK, 0L); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java index bd6c860ccf..f2b329fa2f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java @@ -52,6 +52,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.permission.PermissionStatus; +import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode; @@ -514,6 +515,8 @@ public long getParentId(long id) throws IOException { private File filename; private int numThreads; private String parallelOutputFile; + private final XAttr ecXAttr = + XAttrHelper.buildXAttr(XATTR_ERASURECODING_POLICY); /** * Construct a PB FsImage writer to generate text file. @@ -1040,7 +1043,7 @@ public static void mergeFiles(String[] srcPaths, String resultPath) List xattrs = FSImageFormatPBINode.Loader.loadXAttrs(xattrFeatureProto, stringTable); for (XAttr xattr : xattrs) { - if (XATTR_ERASURECODING_POLICY.contains(xattr.getName())){ + if (xattr.equalsIgnoreValue(ecXAttr)){ try{ ByteArrayInputStream bIn = new ByteArrayInputStream(xattr.getValue()); DataInputStream dIn = new DataInputStream(bIn); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java index b4ba775732..c24c9132cb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java @@ -29,6 +29,7 @@ import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -66,6 +67,7 @@ import org.apache.hadoop.fs.FileSystemTestHelper; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.SafeModeAction; +import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; @@ -73,20 +75,26 @@ import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse; import org.apache.hadoop.hdfs.protocol.BlockType; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; +import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil; import org.apache.hadoop.hdfs.server.namenode.FsImageProto; +import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrCompactProto; +import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.XAttrFeatureProto; import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion; +import org.apache.hadoop.hdfs.server.namenode.XAttrFormat; import org.apache.hadoop.hdfs.util.MD5FileUtils; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MD5Hash; +import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ErasureCodeConstants; import org.apache.hadoop.net.NetUtils; @@ -719,7 +727,13 @@ private FsImageProto.INodeSection.INode createSampleFileInode() { .build(); } - private FsImageProto.INodeSection.INode createSampleDirInode() { + private FsImageProto.INodeSection.INode createSampleDirInode() + throws IOException { + return createSampleDirInode(false); + } + + private FsImageProto.INodeSection.INode createSampleDirInode( + boolean builXAttr) throws IOException { FsImageProto.INodeSection.AclFeatureProto.Builder acl = FsImageProto.INodeSection.AclFeatureProto.newBuilder() .addEntries(2); @@ -729,6 +743,19 @@ private FsImageProto.INodeSection.INode createSampleDirInode() { .setNsQuota(700) .setModificationTime(SAMPLE_TIMESTAMP) .setAcl(acl); + if (builXAttr) { + ByteArrayOutputStream bOut = new ByteArrayOutputStream(); + DataOutputStream dOut = new DataOutputStream(bOut); + WritableUtils.writeString(dOut, "test-value"); + XAttr a = XAttrHelper.buildXAttr("system.hdfs", bOut.toByteArray()); + XAttrFeatureProto.Builder b = XAttrFeatureProto.newBuilder(); + XAttrCompactProto.Builder xAttrCompactBuilder = XAttrCompactProto.newBuilder(); + int v = XAttrFormat.toInt(a); + xAttrCompactBuilder.setName(v); + xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue())); + b.addXAttrs(xAttrCompactBuilder.build()); + directory.setXAttrs(b); + } return FsImageProto.INodeSection.INode.newBuilder() .setType(FsImageProto.INodeSection.INode.Type.DIRECTORY) @@ -754,6 +781,11 @@ private FsImageProto.INodeSection.INode createSampleSymlink() { private PBImageDelimitedTextWriter createDelimitedWriterSpy() throws IOException { + return createDelimitedWriterSpy(false); + } + + private PBImageDelimitedTextWriter createDelimitedWriterSpy(boolean printECPolicy) + throws IOException { FsPermission fsPermission = new FsPermission( FsAction.ALL, FsAction.WRITE_EXECUTE, @@ -764,7 +796,9 @@ private PBImageDelimitedTextWriter createDelimitedWriterSpy() fsPermission); PBImageDelimitedTextWriter writer = new - PBImageDelimitedTextWriter(null, ",", ""); + PBImageDelimitedTextWriter(null, ",", "", false, + printECPolicy, 1, "-", new Configuration()); + PBImageDelimitedTextWriter writerSpy = spy(writer); when(writerSpy.getPermission(anyLong())).thenReturn(permStatus); return writerSpy; @@ -786,6 +820,14 @@ public void testWriterOutputEntryBuilderForDirectory() throws IOException { createSampleDirInode())); } + @Test + public void testECXAttr() throws IOException { + assertEquals("/path/dir,0,2000-01-01 00:00,1970-01-01 00:00" + + ",0,0,0,700,1000,drwx-wx-w-+,user_1,group_1,-", + createDelimitedWriterSpy(true).getEntry("/path/", + createSampleDirInode(true))); + } + @Test public void testWriterOutputEntryBuilderForSymlink() throws IOException { assertEquals("/path/sym,0,2000-01-01 00:00,2000-01-01 00:00" +