HDDS-1473. DataNode ID file should be human readable. (#781)

This commit is contained in:
Siddharth 2019-05-02 13:59:15 -07:00 committed by Hanisha Koneru
parent 7a3188d054
commit 1df679985b
3 changed files with 207 additions and 29 deletions

View File

@ -18,41 +18,33 @@
package org.apache.hadoop.ozone.container.common.helpers; package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions; import static org.apache.commons.io.FilenameUtils.removeExtension;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.CONTAINER_CHECKSUM_ERROR;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.NO_SUCH_ALGORITHM;
import static org.apache.hadoop.ozone.container.common.impl.ContainerData.CHARSET_ENCODING;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
.ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.impl.ContainerData; import org.apache.hadoop.ozone.container.common.impl.ContainerData;
import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml; import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
import org.apache.hadoop.ozone.container.common.impl.ContainerSet; import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Paths;
import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.Yaml;
import static org.apache.commons.io.FilenameUtils.removeExtension; import com.google.common.base.Preconditions;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.Result.CONTAINER_CHECKSUM_ERROR;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.Result.NO_SUCH_ALGORITHM;
import static org.apache.hadoop.ozone.container.common.impl.ContainerData
.CHARSET_ENCODING;
/** /**
* A set of helper functions to create proper responses. * A set of helper functions to create proper responses.
@ -211,11 +203,7 @@ public synchronized static void writeDatanodeDetailsTo(
throw new IOException("Unable to create datanode ID directories."); throw new IOException("Unable to create datanode ID directories.");
} }
} }
try (FileOutputStream out = new FileOutputStream(path)) { DatanodeIdYaml.createDatanodeIdFile(datanodeDetails, path);
HddsProtos.DatanodeDetailsProto proto =
datanodeDetails.getProtoBufMessage();
proto.writeTo(out);
}
} }
/** /**
@ -230,9 +218,8 @@ public synchronized static DatanodeDetails readDatanodeDetailsFrom(File path)
if (!path.exists()) { if (!path.exists()) {
throw new IOException("Datanode ID file not found."); throw new IOException("Datanode ID file not found.");
} }
try(FileInputStream in = new FileInputStream(path)) { try {
return DatanodeDetails.getFromProtoBuf( return DatanodeIdYaml.readDatanodeIdFile(path);
HddsProtos.DatanodeDetailsProto.parseFrom(in));
} catch (IOException e) { } catch (IOException e) {
throw new IOException("Failed to parse DatanodeDetails from " throw new IOException("Failed to parse DatanodeDetails from "
+ path.getAbsolutePath(), e); + path.getAbsolutePath(), e);

View File

@ -0,0 +1,182 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.common.helpers;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
/**
* Class for creating datanode.id file in yaml format.
*/
public final class DatanodeIdYaml {
private DatanodeIdYaml() {
// static helper methods only, no state.
}
/**
* Creates a yaml file using DatnodeDetails. This method expects the path
* validation to be performed by the caller.
*
* @param datanodeDetails {@link DatanodeDetails}
* @param path Path to datnode.id file
*/
public static void createDatanodeIdFile(DatanodeDetails datanodeDetails,
File path) throws IOException {
DumperOptions options = new DumperOptions();
options.setPrettyFlow(true);
options.setDefaultFlowStyle(DumperOptions.FlowStyle.FLOW);
Yaml yaml = new Yaml(options);
try (Writer writer = new OutputStreamWriter(
new FileOutputStream(path), "UTF-8")) {
yaml.dump(getDatanodeDetailsYaml(datanodeDetails), writer);
}
}
/**
* Read datanode.id from file.
*/
public static DatanodeDetails readDatanodeIdFile(File path)
throws IOException {
DatanodeDetails datanodeDetails;
try (FileInputStream inputFileStream = new FileInputStream(path)) {
Yaml yaml = new Yaml();
DatanodeDetailsYaml datanodeDetailsYaml;
try {
datanodeDetailsYaml =
yaml.loadAs(inputFileStream, DatanodeDetailsYaml.class);
} catch (Exception e) {
throw new IOException("Unable to parse yaml file.", e);
}
DatanodeDetails.Builder builder = DatanodeDetails.newBuilder();
builder.setUuid(datanodeDetailsYaml.getUuid())
.setIpAddress(datanodeDetailsYaml.getIpAddress())
.setHostName(datanodeDetailsYaml.getHostName())
.setCertSerialId(datanodeDetailsYaml.getCertSerialId());
if (!MapUtils.isEmpty(datanodeDetailsYaml.getPortDetails())) {
for (Map.Entry<String, Integer> portEntry :
datanodeDetailsYaml.getPortDetails().entrySet()) {
builder.addPort(DatanodeDetails.newPort(
DatanodeDetails.Port.Name.valueOf(portEntry.getKey()),
portEntry.getValue()));
}
}
datanodeDetails = builder.build();
}
return datanodeDetails;
}
/**
* Datanode details bean to be written to the yaml file.
*/
public static class DatanodeDetailsYaml {
private String uuid;
private String ipAddress;
private String hostName;
private String certSerialId;
private Map<String, Integer> portDetails;
public DatanodeDetailsYaml() {
// Needed for snake-yaml introspection.
}
private DatanodeDetailsYaml(String uuid, String ipAddress,
String hostName, String certSerialId,
Map<String, Integer> portDetails) {
this.uuid = uuid;
this.ipAddress = ipAddress;
this.hostName = hostName;
this.certSerialId = certSerialId;
this.portDetails = portDetails;
}
public String getUuid() {
return uuid;
}
public String getIpAddress() {
return ipAddress;
}
public String getHostName() {
return hostName;
}
public String getCertSerialId() {
return certSerialId;
}
public Map<String, Integer> getPortDetails() {
return portDetails;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
public void setHostName(String hostName) {
this.hostName = hostName;
}
public void setCertSerialId(String certSerialId) {
this.certSerialId = certSerialId;
}
public void setPortDetails(Map<String, Integer> portDetails) {
this.portDetails = portDetails;
}
}
private static DatanodeDetailsYaml getDatanodeDetailsYaml(
DatanodeDetails datanodeDetails) {
Map<String, Integer> portDetails = new LinkedHashMap<>();
if (!CollectionUtils.isEmpty(datanodeDetails.getPorts())) {
for (DatanodeDetails.Port port : datanodeDetails.getPorts()) {
portDetails.put(port.getName().toString(), port.getValue());
}
}
return new DatanodeDetailsYaml(
datanodeDetails.getUuid().toString(),
datanodeDetails.getIpAddress(),
datanodeDetails.getHostName(),
datanodeDetails.getCertSerialId(),
portDetails);
}
}

View File

@ -42,9 +42,11 @@
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.yaml.snakeyaml.Yaml;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
@ -132,6 +134,13 @@ public void testDatanodeIDPersistent() throws Exception {
File validIdsFile = new File(WRITE_TMP, "valid-values.id"); File validIdsFile = new File(WRITE_TMP, "valid-values.id");
validIdsFile.delete(); validIdsFile.delete();
ContainerUtils.writeDatanodeDetailsTo(id1, validIdsFile); ContainerUtils.writeDatanodeDetailsTo(id1, validIdsFile);
// Validate using yaml parser
Yaml yaml = new Yaml();
try {
yaml.load(new FileReader(validIdsFile));
} catch (Exception e) {
Assert.fail("Failed parsing datanode id yaml.");
}
DatanodeDetails validId = ContainerUtils.readDatanodeDetailsFrom( DatanodeDetails validId = ContainerUtils.readDatanodeDetailsFrom(
validIdsFile); validIdsFile);