diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java index 66ce9ee52e..433d9b7d5f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java @@ -160,6 +160,10 @@ public HAServiceState getState() { return state; } + public void setClusterID(String clusterID) { + this.clusterID = clusterID; + } + @Override public String toString(){ return super.toString() + ";bpid=" + blockPoolID; diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java index 2e57c9f747..b66c8309a5 100644 --- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java +++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java @@ -68,6 +68,7 @@ static Options options() { options.addOption("b", "blockclass", true, "Block output class"); options.addOption("i", "blockidclass", true, "Block resolver class"); options.addOption("c", "cachedirs", true, "Max active dirents"); + options.addOption("cid", "clusterID", true, "Cluster ID"); options.addOption("h", "help", false, "Print usage"); return options; } @@ -112,6 +113,9 @@ public int run(String[] argv) throws Exception { case "c": opts.cache(Integer.parseInt(o.getValue())); break; + case "cid": + opts.clusterID(o.getValue()); + break; default: throw new UnsupportedOperationException("Internal error"); } diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java index 390bb393d8..9bd8852007 100644 --- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java +++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java @@ -126,13 +126,16 @@ public ImageWriter(Options opts) throws IOException { throw new IllegalStateException("Incompatible layout " + info.getLayoutVersion() + " (expected " + LAYOUT_VERSION); } + // set the cluster id, if given + if (opts.clusterID.length() > 0) { + info.setClusterID(opts.clusterID); + } stor.format(info); blockPoolID = info.getBlockPoolID(); } outdir = new Path(tmp, "current"); out = outfs.create(new Path(outdir, "fsimage_0000000000000000000")); } else { - // XXX necessary? writing a NNStorage now... outdir = null; outfs = null; out = opts.outStream; @@ -517,6 +520,7 @@ public static class Options implements Configurable { private UGIResolver ugis; private Class ugisClass; private BlockAliasMap blocks; + private String clusterID; @SuppressWarnings("rawtypes") private Class aliasMap; @@ -543,6 +547,7 @@ public void setConf(Configuration conf) { NullBlockAliasMap.class, BlockAliasMap.class); blockIdsClass = conf.getClass(BLOCK_RESOLVER_CLASS, FixedBlockResolver.class, BlockResolver.class); + clusterID = ""; } @Override @@ -601,6 +606,10 @@ public Options blocks(Class blocksClass) { return this; } + public Options clusterID(String clusterID) { + this.clusterID = clusterID; + return this; + } } } diff --git a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java index 1f6aebbc09..22f00aa72f 100644 --- a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java +++ b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java @@ -155,11 +155,18 @@ public void shutdown() throws Exception { void createImage(TreeWalk t, Path out, Class blockIdsClass) throws Exception { + createImage(t, out, blockIdsClass, ""); + } + + void createImage(TreeWalk t, Path out, + Class blockIdsClass, String clusterID) + throws Exception { ImageWriter.Options opts = ImageWriter.defaults(); opts.setConf(conf); opts.output(out.toString()) .blocks(TextFileRegionAliasMap.class) - .blockIds(blockIdsClass); + .blockIds(blockIdsClass) + .clusterID(clusterID); try (ImageWriter w = new ImageWriter(opts)) { for (TreePath e : t) { w.accept(e); @@ -562,4 +569,19 @@ private void verifyFileLocation(int fileIndex) dnInfos[0].getDatanodeUuid()); } } + + @Test + public void testSetClusterID() throws Exception { + String clusterID = "PROVIDED-CLUSTER"; + createImage(new FSTreeWalk(NAMEPATH, conf), NNDIRPATH, + FixedBlockResolver.class, clusterID); + // 2 Datanodes, 1 PROVIDED and other DISK + startCluster(NNDIRPATH, 2, null, + new StorageType[][] { + {StorageType.PROVIDED}, + {StorageType.DISK}}, + false); + NameNode nn = cluster.getNameNode(); + assertEquals(clusterID, nn.getNamesystem().getClusterId()); + } }