HDFS-6987. Move CipherSuite xattr information up to the encryption zone root. Contributed by Zhe Zhang.

This commit is contained in:
Andrew Wang 2014-09-21 21:28:14 -07:00
parent c50fc92502
commit 1737950d0f
13 changed files with 277 additions and 108 deletions

View File

@ -34,6 +34,7 @@ public class FileEncryptionInfo {
private final CipherSuite cipherSuite; private final CipherSuite cipherSuite;
private final byte[] edek; private final byte[] edek;
private final byte[] iv; private final byte[] iv;
private final String keyName;
private final String ezKeyVersionName; private final String ezKeyVersionName;
/** /**
@ -42,14 +43,16 @@ public class FileEncryptionInfo {
* @param suite CipherSuite used to encrypt the file * @param suite CipherSuite used to encrypt the file
* @param edek encrypted data encryption key (EDEK) of the file * @param edek encrypted data encryption key (EDEK) of the file
* @param iv initialization vector (IV) used to encrypt the file * @param iv initialization vector (IV) used to encrypt the file
* @param keyName name of the key used for the encryption zone
* @param ezKeyVersionName name of the KeyVersion used to encrypt the * @param ezKeyVersionName name of the KeyVersion used to encrypt the
* encrypted data encryption key. * encrypted data encryption key.
*/ */
public FileEncryptionInfo(final CipherSuite suite, final byte[] edek, public FileEncryptionInfo(final CipherSuite suite, final byte[] edek,
final byte[] iv, final String ezKeyVersionName) { final byte[] iv, final String keyName, final String ezKeyVersionName) {
checkNotNull(suite); checkNotNull(suite);
checkNotNull(edek); checkNotNull(edek);
checkNotNull(iv); checkNotNull(iv);
checkNotNull(keyName);
checkNotNull(ezKeyVersionName); checkNotNull(ezKeyVersionName);
checkArgument(edek.length == suite.getAlgorithmBlockSize(), checkArgument(edek.length == suite.getAlgorithmBlockSize(),
"Unexpected key length"); "Unexpected key length");
@ -58,6 +61,7 @@ public FileEncryptionInfo(final CipherSuite suite, final byte[] edek,
this.cipherSuite = suite; this.cipherSuite = suite;
this.edek = edek; this.edek = edek;
this.iv = iv; this.iv = iv;
this.keyName = keyName;
this.ezKeyVersionName = ezKeyVersionName; this.ezKeyVersionName = ezKeyVersionName;
} }
@ -83,6 +87,11 @@ public byte[] getIV() {
return iv; return iv;
} }
/**
* @return name of the encryption zone key.
*/
public String getKeyName() { return keyName; }
/** /**
* @return name of the encryption zone KeyVersion used to encrypt the * @return name of the encryption zone KeyVersion used to encrypt the
* encrypted data encryption key (EDEK). * encrypted data encryption key (EDEK).
@ -95,6 +104,7 @@ public String toString() {
builder.append("cipherSuite: " + cipherSuite); builder.append("cipherSuite: " + cipherSuite);
builder.append(", edek: " + Hex.encodeHexString(edek)); builder.append(", edek: " + Hex.encodeHexString(edek));
builder.append(", iv: " + Hex.encodeHexString(iv)); builder.append(", iv: " + Hex.encodeHexString(iv));
builder.append(", keyName: " + keyName);
builder.append(", ezKeyVersionName: " + ezKeyVersionName); builder.append(", ezKeyVersionName: " + ezKeyVersionName);
builder.append("}"); builder.append("}");
return builder.toString(); return builder.toString();

View File

@ -169,7 +169,6 @@ public void start() throws Exception {
kms.set(KMSConfiguration.KEY_PROVIDER_URI, kms.set(KMSConfiguration.KEY_PROVIDER_URI,
"jceks://file@" + new Path(kmsConfDir, "kms.keystore").toUri()); "jceks://file@" + new Path(kmsConfDir, "kms.keystore").toUri());
kms.set("hadoop.kms.authentication.type", "simple"); kms.set("hadoop.kms.authentication.type", "simple");
kms.setBoolean(KMSConfiguration.KEY_AUTHORIZATION_ENABLE, false);
Writer writer = new FileWriter(kmsFile); Writer writer = new FileWriter(kmsFile);
kms.writeXml(writer); kms.writeXml(writer);
writer.close(); writer.close();

View File

@ -133,6 +133,9 @@ Trunk (Unreleased)
HDFS-6609. Use DirectorySnapshottableFeature to represent a snapshottable HDFS-6609. Use DirectorySnapshottableFeature to represent a snapshottable
directory. (Jing Zhao via wheat9) directory. (Jing Zhao via wheat9)
HDFS-6987. Move CipherSuite xattr information up to the encryption zone
root. (Zhe Zhang via wang)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -1319,8 +1319,7 @@ private KeyVersion decryptEncryptedDataEncryptionKey(FileEncryptionInfo
" an encrypted file"); " an encrypted file");
} }
EncryptedKeyVersion ekv = EncryptedKeyVersion.createForDecryption( EncryptedKeyVersion ekv = EncryptedKeyVersion.createForDecryption(
//TODO: here we have to put the keyName to be provided by HDFS-6987 feInfo.getKeyName(), feInfo.getEzKeyVersionName(), feInfo.getIV(),
null, feInfo.getEzKeyVersionName(), feInfo.getIV(),
feInfo.getEncryptedDataEncryptionKey()); feInfo.getEncryptedDataEncryptionKey());
try { try {
return provider.decryptEncryptedKey(ekv); return provider.decryptEncryptedKey(ekv);

View File

@ -21,6 +21,7 @@
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CipherSuite;
/** /**
* A simple class for representing an encryption zone. Presently an encryption * A simple class for representing an encryption zone. Presently an encryption
@ -31,32 +32,40 @@
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class EncryptionZone { public class EncryptionZone {
private final String path;
private final String keyName;
private final long id; private final long id;
private final String path;
private final CipherSuite suite;
private final String keyName;
public EncryptionZone(String path, String keyName, long id) { public EncryptionZone(long id, String path,
this.path = path; CipherSuite suite, String keyName) {
this.keyName = keyName;
this.id = id; this.id = id;
} this.path = path;
this.suite = suite;
public String getPath() { this.keyName = keyName;
return path;
}
public String getKeyName() {
return keyName;
} }
public long getId() { public long getId() {
return id; return id;
} }
public String getPath() {
return path;
}
public CipherSuite getSuite() {
return suite;
}
public String getKeyName() {
return keyName;
}
@Override @Override
public int hashCode() { public int hashCode() {
return new HashCodeBuilder(13, 31). return new HashCodeBuilder(13, 31).
append(path).append(keyName).append(id). append(id).append(path).
append(suite).append(keyName).
toHashCode(); toHashCode();
} }
@ -74,16 +83,18 @@ public boolean equals(Object obj) {
EncryptionZone rhs = (EncryptionZone) obj; EncryptionZone rhs = (EncryptionZone) obj;
return new EqualsBuilder(). return new EqualsBuilder().
append(path, rhs.path).
append(keyName, rhs.keyName).
append(id, rhs.id). append(id, rhs.id).
append(path, rhs.path).
append(suite, rhs.suite).
append(keyName, rhs.keyName).
isEquals(); isEquals();
} }
@Override @Override
public String toString() { public String toString() {
return "EncryptionZone [path=" + path + return "EncryptionZone [id=" + id +
", keyName=" + keyName + ", path=" + path +
", id=" + id + "]"; ", suite=" + suite +
", keyName=" + keyName + "]";
} }
} }

View File

@ -177,7 +177,6 @@
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.common.StorageInfo;
import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature; import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
import org.apache.hadoop.hdfs.server.namenode.INodeId; import org.apache.hadoop.hdfs.server.namenode.INodeId;
@ -2332,12 +2331,14 @@ public static EncryptionZoneProto convert(EncryptionZone zone) {
return EncryptionZoneProto.newBuilder() return EncryptionZoneProto.newBuilder()
.setId(zone.getId()) .setId(zone.getId())
.setKeyName(zone.getKeyName()) .setKeyName(zone.getKeyName())
.setPath(zone.getPath()).build(); .setPath(zone.getPath())
.setSuite(convert(zone.getSuite()))
.build();
} }
public static EncryptionZone convert(EncryptionZoneProto proto) { public static EncryptionZone convert(EncryptionZoneProto proto) {
return new EncryptionZone(proto.getPath(), proto.getKeyName(), return new EncryptionZone(proto.getId(), proto.getPath(),
proto.getId()); convert(proto.getSuite()), proto.getKeyName());
} }
public static ShortCircuitShmSlotProto convert(SlotId slotId) { public static ShortCircuitShmSlotProto convert(SlotId slotId) {
@ -2662,6 +2663,30 @@ public static HdfsProtos.FileEncryptionInfoProto convert(
.setKey(getByteString(info.getEncryptedDataEncryptionKey())) .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
.setIv(getByteString(info.getIV())) .setIv(getByteString(info.getIV()))
.setEzKeyVersionName(info.getEzKeyVersionName()) .setEzKeyVersionName(info.getEzKeyVersionName())
.setKeyName(info.getKeyName())
.build();
}
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
FileEncryptionInfo info) {
if (info == null) {
return null;
}
return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
.setKey(getByteString(info.getEncryptedDataEncryptionKey()))
.setIv(getByteString(info.getIV()))
.setEzKeyVersionName(info.getEzKeyVersionName())
.build();
}
public static HdfsProtos.ZoneEncryptionInfoProto convert(
CipherSuite suite, String keyName) {
if (suite == null || keyName == null) {
return null;
}
return HdfsProtos.ZoneEncryptionInfoProto.newBuilder()
.setSuite(convert(suite))
.setKeyName(keyName)
.build(); .build();
} }
@ -2674,7 +2699,20 @@ public static FileEncryptionInfo convert(
byte[] key = proto.getKey().toByteArray(); byte[] key = proto.getKey().toByteArray();
byte[] iv = proto.getIv().toByteArray(); byte[] iv = proto.getIv().toByteArray();
String ezKeyVersionName = proto.getEzKeyVersionName(); String ezKeyVersionName = proto.getEzKeyVersionName();
return new FileEncryptionInfo(suite, key, iv, ezKeyVersionName); String keyName = proto.getKeyName();
return new FileEncryptionInfo(suite, key, iv, keyName, ezKeyVersionName);
}
public static FileEncryptionInfo convert(
HdfsProtos.PerFileEncryptionInfoProto fileProto,
CipherSuite suite, String keyName) {
if (fileProto == null || suite == null || keyName == null) {
return null;
}
byte[] key = fileProto.getKey().toByteArray();
byte[] iv = fileProto.getIv().toByteArray();
String ezKeyVersionName = fileProto.getEzKeyVersionName();
return new FileEncryptionInfo(suite, key, iv, keyName, ezKeyVersionName);
} }
} }

View File

@ -26,6 +26,7 @@
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.XAttrSetFlag;
@ -33,6 +34,8 @@
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException; import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -53,8 +56,8 @@ public class EncryptionZoneManager {
public static Logger LOG = LoggerFactory.getLogger(EncryptionZoneManager public static Logger LOG = LoggerFactory.getLogger(EncryptionZoneManager
.class); .class);
private static final EncryptionZone NULL_EZ = public static final EncryptionZone NULL_EZ =
new EncryptionZone("", "", -1); new EncryptionZone(-1, "", CipherSuite.UNKNOWN, "");
/** /**
* EncryptionZoneInt is the internal representation of an encryption zone. The * EncryptionZoneInt is the internal representation of an encryption zone. The
@ -62,21 +65,27 @@ public class EncryptionZoneManager {
* contains the EZ's pathname. * contains the EZ's pathname.
*/ */
private static class EncryptionZoneInt { private static class EncryptionZoneInt {
private final String keyName;
private final long inodeId; private final long inodeId;
private final CipherSuite suite;
private final String keyName;
EncryptionZoneInt(long inodeId, String keyName) { EncryptionZoneInt(long inodeId, CipherSuite suite, String keyName) {
this.keyName = keyName;
this.inodeId = inodeId; this.inodeId = inodeId;
} this.suite = suite;
this.keyName = keyName;
String getKeyName() {
return keyName;
} }
long getINodeId() { long getINodeId() {
return inodeId; return inodeId;
} }
CipherSuite getSuite() {
return suite;
}
String getKeyName() {
return keyName;
}
} }
private final TreeMap<Long, EncryptionZoneInt> encryptionZones; private final TreeMap<Long, EncryptionZoneInt> encryptionZones;
@ -109,9 +118,9 @@ public EncryptionZoneManager(FSDirectory dir, Configuration conf) {
* @param inodeId of the encryption zone * @param inodeId of the encryption zone
* @param keyName encryption zone key name * @param keyName encryption zone key name
*/ */
void addEncryptionZone(Long inodeId, String keyName) { void addEncryptionZone(Long inodeId, CipherSuite suite, String keyName) {
assert dir.hasWriteLock(); assert dir.hasWriteLock();
unprotectedAddEncryptionZone(inodeId, keyName); unprotectedAddEncryptionZone(inodeId, suite, keyName);
} }
/** /**
@ -122,8 +131,10 @@ void addEncryptionZone(Long inodeId, String keyName) {
* @param inodeId of the encryption zone * @param inodeId of the encryption zone
* @param keyName encryption zone key name * @param keyName encryption zone key name
*/ */
void unprotectedAddEncryptionZone(Long inodeId, String keyName) { void unprotectedAddEncryptionZone(Long inodeId,
final EncryptionZoneInt ez = new EncryptionZoneInt(inodeId, keyName); CipherSuite suite, String keyName) {
final EncryptionZoneInt ez = new EncryptionZoneInt(
inodeId, suite, keyName);
encryptionZones.put(inodeId, ez); encryptionZones.put(inodeId, ez);
} }
@ -207,8 +218,8 @@ EncryptionZone getEZINodeForPath(INodesInPath iip) {
if (ezi == null) { if (ezi == null) {
return NULL_EZ; return NULL_EZ;
} else { } else {
return new EncryptionZone(getFullPathName(ezi), ezi.getKeyName(), return new EncryptionZone(ezi.getINodeId(), getFullPathName(ezi),
ezi.getINodeId()); ezi.getSuite(), ezi.getKeyName());
} }
} }
@ -264,7 +275,7 @@ void checkMoveValidity(INodesInPath srcIIP, INodesInPath dstIIP, String src)
* <p/> * <p/>
* Called while holding the FSDirectory lock. * Called while holding the FSDirectory lock.
*/ */
XAttr createEncryptionZone(String src, String keyName) XAttr createEncryptionZone(String src, CipherSuite suite, String keyName)
throws IOException { throws IOException {
assert dir.hasWriteLock(); assert dir.hasWriteLock();
if (dir.isNonEmptyDirectory(src)) { if (dir.isNonEmptyDirectory(src)) {
@ -284,8 +295,10 @@ XAttr createEncryptionZone(String src, String keyName)
"encryption zone. (" + getFullPathName(ezi) + ")"); "encryption zone. (" + getFullPathName(ezi) + ")");
} }
final HdfsProtos.ZoneEncryptionInfoProto proto =
PBHelper.convert(suite, keyName);
final XAttr ezXAttr = XAttrHelper final XAttr ezXAttr = XAttrHelper
.buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, keyName.getBytes()); .buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, proto.toByteArray());
final List<XAttr> xattrs = Lists.newArrayListWithCapacity(1); final List<XAttr> xattrs = Lists.newArrayListWithCapacity(1);
xattrs.add(ezXAttr); xattrs.add(ezXAttr);
@ -327,8 +340,8 @@ BatchedListEntries<EncryptionZone> listEncryptionZones(long prevId)
continue; continue;
} }
// Add the EZ to the result list // Add the EZ to the result list
zones.add(new EncryptionZone(pathName, zones.add(new EncryptionZone(ezi.getINodeId(), pathName,
ezi.getKeyName(), ezi.getINodeId())); ezi.getSuite(), ezi.getKeyName()));
count++; count++;
if (count >= numResponses) { if (count >= numResponses) {
break; break;

View File

@ -37,6 +37,7 @@
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileEncryptionInfo;
@ -1402,9 +1403,10 @@ DirectoryListing getListing(String src, byte[] startAfter,
if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) { if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) {
return getSnapshotsListing(srcs, startAfter); return getSnapshotsListing(srcs, startAfter);
} }
final INodesInPath inodesInPath = getLastINodeInPath(srcs); final INodesInPath inodesInPath = getINodesInPath(srcs, true);
final INode[] inodes = inodesInPath.getINodes();
final int snapshot = inodesInPath.getPathSnapshotId(); final int snapshot = inodesInPath.getPathSnapshotId();
final INode targetNode = inodesInPath.getLastINode(); final INode targetNode = inodes[inodes.length - 1];
if (targetNode == null) if (targetNode == null)
return null; return null;
byte parentStoragePolicy = isSuperUser ? byte parentStoragePolicy = isSuperUser ?
@ -1414,7 +1416,7 @@ DirectoryListing getListing(String src, byte[] startAfter,
return new DirectoryListing( return new DirectoryListing(
new HdfsFileStatus[]{createFileStatus(HdfsFileStatus.EMPTY_NAME, new HdfsFileStatus[]{createFileStatus(HdfsFileStatus.EMPTY_NAME,
targetNode, needLocation, parentStoragePolicy, snapshot, targetNode, needLocation, parentStoragePolicy, snapshot,
isRawPath)}, 0); isRawPath, inodesInPath)}, 0);
} }
final INodeDirectory dirInode = targetNode.asDirectory(); final INodeDirectory dirInode = targetNode.asDirectory();
@ -1431,7 +1433,7 @@ DirectoryListing getListing(String src, byte[] startAfter,
cur.getLocalStoragePolicyID(): BlockStoragePolicy.ID_UNSPECIFIED; cur.getLocalStoragePolicyID(): BlockStoragePolicy.ID_UNSPECIFIED;
listing[i] = createFileStatus(cur.getLocalNameBytes(), cur, needLocation, listing[i] = createFileStatus(cur.getLocalNameBytes(), cur, needLocation,
getStoragePolicyID(curPolicy, parentStoragePolicy), snapshot, getStoragePolicyID(curPolicy, parentStoragePolicy), snapshot,
isRawPath); isRawPath, inodesInPath);
listingCnt++; listingCnt++;
if (needLocation) { if (needLocation) {
// Once we hit lsLimit locations, stop. // Once we hit lsLimit locations, stop.
@ -1482,7 +1484,8 @@ private DirectoryListing getSnapshotsListing(String src, byte[] startAfter)
for (int i = 0; i < numOfListing; i++) { for (int i = 0; i < numOfListing; i++) {
Root sRoot = snapshots.get(i + skipSize).getRoot(); Root sRoot = snapshots.get(i + skipSize).getRoot();
listing[i] = createFileStatus(sRoot.getLocalNameBytes(), sRoot, listing[i] = createFileStatus(sRoot.getLocalNameBytes(), sRoot,
BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID, false); BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
false, null);
} }
return new DirectoryListing( return new DirectoryListing(
listing, snapshots.size() - skipSize - numOfListing); listing, snapshots.size() - skipSize - numOfListing);
@ -1505,12 +1508,14 @@ HdfsFileStatus getFileInfo(String src, boolean resolveLink,
if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) { if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) {
return getFileInfo4DotSnapshot(srcs); return getFileInfo4DotSnapshot(srcs);
} }
final INodesInPath inodesInPath = getLastINodeInPath(srcs, resolveLink); final INodesInPath inodesInPath = getINodesInPath(srcs, resolveLink);
final INode i = inodesInPath.getINode(0); final INode[] inodes = inodesInPath.getINodes();
final INode i = inodes[inodes.length - 1];
byte policyId = includeStoragePolicy && i != null && !i.isSymlink() ? byte policyId = includeStoragePolicy && i != null && !i.isSymlink() ?
i.getStoragePolicyID() : BlockStoragePolicy.ID_UNSPECIFIED; i.getStoragePolicyID() : BlockStoragePolicy.ID_UNSPECIFIED;
return i == null ? null : createFileStatus(HdfsFileStatus.EMPTY_NAME, i, return i == null ? null : createFileStatus(HdfsFileStatus.EMPTY_NAME, i,
policyId, inodesInPath.getPathSnapshotId(), isRawPath); policyId, inodesInPath.getPathSnapshotId(), isRawPath,
inodesInPath);
} finally { } finally {
readUnlock(); readUnlock();
} }
@ -2162,8 +2167,17 @@ public final void addToInodeMap(INode inode) {
for (XAttr xattr : xattrs) { for (XAttr xattr : xattrs) {
final String xaName = XAttrHelper.getPrefixName(xattr); final String xaName = XAttrHelper.getPrefixName(xattr);
if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) { if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) {
try {
final HdfsProtos.ZoneEncryptionInfoProto ezProto =
HdfsProtos.ZoneEncryptionInfoProto.parseFrom(
xattr.getValue());
ezManager.unprotectedAddEncryptionZone(inode.getId(), ezManager.unprotectedAddEncryptionZone(inode.getId(),
new String(xattr.getValue())); PBHelper.convert(ezProto.getSuite()),
ezProto.getKeyName());
} catch (InvalidProtocolBufferException e) {
NameNode.LOG.warn("Error parsing protocol buffer of " +
"EZ XAttr " + xattr.getName());
}
} }
} }
} }
@ -2355,12 +2369,15 @@ void reset() {
* @throws IOException if any error occurs * @throws IOException if any error occurs
*/ */
private HdfsFileStatus createFileStatus(byte[] path, INode node, private HdfsFileStatus createFileStatus(byte[] path, INode node,
boolean needLocation, byte storagePolicy, int snapshot, boolean isRawPath) boolean needLocation, byte storagePolicy, int snapshot,
boolean isRawPath, INodesInPath iip)
throws IOException { throws IOException {
if (needLocation) { if (needLocation) {
return createLocatedFileStatus(path, node, storagePolicy, snapshot, isRawPath); return createLocatedFileStatus(path, node, storagePolicy, snapshot,
isRawPath, iip);
} else { } else {
return createFileStatus(path, node, storagePolicy, snapshot, isRawPath); return createFileStatus(path, node, storagePolicy, snapshot,
isRawPath, iip);
} }
} }
@ -2368,14 +2385,14 @@ private HdfsFileStatus createFileStatus(byte[] path, INode node,
* Create FileStatus by file INode * Create FileStatus by file INode
*/ */
HdfsFileStatus createFileStatus(byte[] path, INode node, byte storagePolicy, HdfsFileStatus createFileStatus(byte[] path, INode node, byte storagePolicy,
int snapshot, boolean isRawPath) throws IOException { int snapshot, boolean isRawPath, INodesInPath iip) throws IOException {
long size = 0; // length is zero for directories long size = 0; // length is zero for directories
short replication = 0; short replication = 0;
long blocksize = 0; long blocksize = 0;
final boolean isEncrypted; final boolean isEncrypted;
final FileEncryptionInfo feInfo = isRawPath ? null : final FileEncryptionInfo feInfo = isRawPath ? null :
getFileEncryptionInfo(node, snapshot); getFileEncryptionInfo(node, snapshot, iip);
if (node.isFile()) { if (node.isFile()) {
final INodeFile fileNode = node.asFile(); final INodeFile fileNode = node.asFile();
@ -2413,7 +2430,8 @@ HdfsFileStatus createFileStatus(byte[] path, INode node, byte storagePolicy,
* Create FileStatus with location info by file INode * Create FileStatus with location info by file INode
*/ */
private HdfsLocatedFileStatus createLocatedFileStatus(byte[] path, INode node, private HdfsLocatedFileStatus createLocatedFileStatus(byte[] path, INode node,
byte storagePolicy, int snapshot, boolean isRawPath) throws IOException { byte storagePolicy, int snapshot, boolean isRawPath,
INodesInPath iip) throws IOException {
assert hasReadLock(); assert hasReadLock();
long size = 0; // length is zero for directories long size = 0; // length is zero for directories
short replication = 0; short replication = 0;
@ -2421,7 +2439,7 @@ private HdfsLocatedFileStatus createLocatedFileStatus(byte[] path, INode node,
LocatedBlocks loc = null; LocatedBlocks loc = null;
final boolean isEncrypted; final boolean isEncrypted;
final FileEncryptionInfo feInfo = isRawPath ? null : final FileEncryptionInfo feInfo = isRawPath ? null :
getFileEncryptionInfo(node, snapshot); getFileEncryptionInfo(node, snapshot, iip);
if (node.isFile()) { if (node.isFile()) {
final INodeFile fileNode = node.asFile(); final INodeFile fileNode = node.asFile();
size = fileNode.computeFileSize(snapshot); size = fileNode.computeFileSize(snapshot);
@ -2746,11 +2764,11 @@ String getKeyName(INodesInPath iip) {
} }
} }
XAttr createEncryptionZone(String src, String keyName) XAttr createEncryptionZone(String src, CipherSuite suite, String keyName)
throws IOException { throws IOException {
writeLock(); writeLock();
try { try {
return ezManager.createEncryptionZone(src, keyName); return ezManager.createEncryptionZone(src, suite, keyName);
} finally { } finally {
writeUnlock(); writeUnlock();
} }
@ -2781,7 +2799,8 @@ BatchedListEntries<EncryptionZone> listEncryptionZones(long prevId)
void setFileEncryptionInfo(String src, FileEncryptionInfo info) void setFileEncryptionInfo(String src, FileEncryptionInfo info)
throws IOException { throws IOException {
// Make the PB for the xattr // Make the PB for the xattr
final HdfsProtos.FileEncryptionInfoProto proto = PBHelper.convert(info); final HdfsProtos.PerFileEncryptionInfoProto proto =
PBHelper.convertPerFileEncInfo(info);
final byte[] protoBytes = proto.toByteArray(); final byte[] protoBytes = proto.toByteArray();
final XAttr fileEncryptionAttr = final XAttr fileEncryptionAttr =
XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes); XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
@ -2797,35 +2816,64 @@ void setFileEncryptionInfo(String src, FileEncryptionInfo info)
} }
/** /**
* Return the FileEncryptionInfo for an INode, or null if the INode is not * This function combines the per-file encryption info (obtained
* an encrypted file. * from the inode's XAttrs), and the encryption info from its zone, and
* returns a consolidated FileEncryptionInfo instance. Null is returned
* for non-encrypted files.
*
* @param inode inode of the file
* @param snapshotId ID of the snapshot that
* we want to get encryption info from
* @param iip inodes in the path containing the file, passed in to
* avoid obtaining the list of inodes again; if iip is
* null then the list of inodes will be obtained again
* @return consolidated file encryption info; null for non-encrypted files
*/ */
FileEncryptionInfo getFileEncryptionInfo(INode inode, int snapshotId) FileEncryptionInfo getFileEncryptionInfo(INode inode, int snapshotId,
throws IOException { INodesInPath iip) throws IOException {
if (!inode.isFile()) { if (!inode.isFile()) {
return null; return null;
} }
readLock(); readLock();
try { try {
List<XAttr> xAttrs = XAttrStorage.readINodeXAttrs(inode, snapshotId); if (iip == null) {
if (xAttrs == null) { iip = getINodesInPath(inode.getFullPathName(), true);
}
EncryptionZone encryptionZone = getEZForPath(iip);
if (encryptionZone == null ||
encryptionZone.equals(EncryptionZoneManager.NULL_EZ)) {
// not an encrypted file
return null;
} else if(encryptionZone.getPath() == null
|| encryptionZone.getPath().isEmpty()) {
if (NameNode.LOG.isDebugEnabled()) {
NameNode.LOG.debug("Encryption zone " +
encryptionZone.getPath() + " does not have a valid path.");
}
}
CipherSuite suite = encryptionZone.getSuite();
String keyName = encryptionZone.getKeyName();
XAttr fileXAttr = unprotectedGetXAttrByName(inode, snapshotId,
CRYPTO_XATTR_FILE_ENCRYPTION_INFO);
if (fileXAttr == null) {
NameNode.LOG.warn("Could not find encryption XAttr for file " +
inode.getFullPathName() + " in encryption zone " +
encryptionZone.getPath());
return null; return null;
} }
for (XAttr x : xAttrs) {
if (XAttrHelper.getPrefixName(x)
.equals(CRYPTO_XATTR_FILE_ENCRYPTION_INFO)) {
try { try {
HdfsProtos.FileEncryptionInfoProto proto = HdfsProtos.PerFileEncryptionInfoProto fileProto =
HdfsProtos.FileEncryptionInfoProto.parseFrom(x.getValue()); HdfsProtos.PerFileEncryptionInfoProto.parseFrom(
FileEncryptionInfo feInfo = PBHelper.convert(proto); fileXAttr.getValue());
return feInfo; return PBHelper.convert(fileProto, suite, keyName);
} catch (InvalidProtocolBufferException e) { } catch (InvalidProtocolBufferException e) {
throw new IOException("Could not parse file encryption info for " + throw new IOException("Could not parse file encryption info for " +
"inode " + inode, e); "inode " + inode, e);
} }
}
}
return null;
} finally { } finally {
readUnlock(); readUnlock();
} }
@ -2860,7 +2908,11 @@ INode unprotectedSetXAttrs(final String src, final List<XAttr> xAttrs,
* of encryption zones. * of encryption zones.
*/ */
if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) { if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) {
ezManager.addEncryptionZone(inode.getId(), new String(xattr.getValue())); final HdfsProtos.ZoneEncryptionInfoProto ezProto =
HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue());
ezManager.addEncryptionZone(inode.getId(),
PBHelper.convert(ezProto.getSuite()),
ezProto.getKeyName());
} }
if (!isFile && SECURITY_XATTR_UNREADABLE_BY_SUPERUSER.equals(xaName)) { if (!isFile && SECURITY_XATTR_UNREADABLE_BY_SUPERUSER.equals(xaName)) {
@ -2977,6 +3029,22 @@ private List<XAttr> unprotectedGetXAttrs(INode inode, int snapshotId)
return XAttrStorage.readINodeXAttrs(inode, snapshotId); return XAttrStorage.readINodeXAttrs(inode, snapshotId);
} }
private XAttr unprotectedGetXAttrByName(INode inode, int snapshotId,
String xAttrName)
throws IOException {
List<XAttr> xAttrs = XAttrStorage.readINodeXAttrs(inode, snapshotId);
if (xAttrs == null) {
return null;
}
for (XAttr x : xAttrs) {
if (XAttrHelper.getPrefixName(x)
.equals(xAttrName)) {
return x;
}
}
return null;
}
private static INode resolveLastINode(String src, INodesInPath iip) private static INode resolveLastINode(String src, INodesInPath iip)
throws FileNotFoundException { throws FileNotFoundException {
INode inode = iip.getLastINode(); INode inode = iip.getLastINode();

View File

@ -341,8 +341,10 @@ private long applyEditLogOp(FSEditLogOp op, FSDirectory fsDir,
// 3. OP_ADD to open file for append // 3. OP_ADD to open file for append
// See if the file already exists (persistBlocks call) // See if the file already exists (persistBlocks call)
final INodesInPath iip = fsDir.getLastINodeInPath(path); final INodesInPath iip = fsDir.getINodesInPath(path, true);
INodeFile oldFile = INodeFile.valueOf(iip.getINode(0), path, true); final INode[] inodes = iip.getINodes();
INodeFile oldFile = INodeFile.valueOf(
inodes[inodes.length - 1], path, true);
if (oldFile != null && addCloseOp.overwrite) { if (oldFile != null && addCloseOp.overwrite) {
// This is OP_ADD with overwrite // This is OP_ADD with overwrite
fsDir.unprotectedDelete(path, addCloseOp.mtime); fsDir.unprotectedDelete(path, addCloseOp.mtime);
@ -372,7 +374,7 @@ private long applyEditLogOp(FSEditLogOp op, FSDirectory fsDir,
HdfsFileStatus stat = fsNamesys.dir.createFileStatus( HdfsFileStatus stat = fsNamesys.dir.createFileStatus(
HdfsFileStatus.EMPTY_NAME, newFile, HdfsFileStatus.EMPTY_NAME, newFile,
BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID, BlockStoragePolicy.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
false); false, iip);
fsNamesys.addCacheEntryWithPayload(addCloseOp.rpcClientId, fsNamesys.addCacheEntryWithPayload(addCloseOp.rpcClientId,
addCloseOp.rpcCallId, stat); addCloseOp.rpcCallId, stat);
} }

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
import static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension import static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension
.EncryptedKeyVersion; .EncryptedKeyVersion;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
@ -134,6 +133,7 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.CacheFlag;
@ -165,7 +165,6 @@
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.StorageType;
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.UnknownCipherSuiteException; import org.apache.hadoop.hdfs.UnknownCipherSuiteException;
import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.hdfs.protocol.AclException;
@ -1833,8 +1832,10 @@ private LocatedBlocks getBlockLocationsUpdateTimes(final String srcArg,
doAccessTime = false; doAccessTime = false;
} }
final INodesInPath iip = dir.getLastINodeInPath(src); final INodesInPath iip = dir.getINodesInPath(src, true);
final INodeFile inode = INodeFile.valueOf(iip.getLastINode(), src); final INode[] inodes = iip.getINodes();
final INodeFile inode = INodeFile.valueOf(
inodes[inodes.length - 1], src);
if (isPermissionEnabled) { if (isPermissionEnabled) {
checkUnreadableBySuperuser(pc, inode, iip.getPathSnapshotId()); checkUnreadableBySuperuser(pc, inode, iip.getPathSnapshotId());
} }
@ -1867,7 +1868,8 @@ && doAccessTime && isAccessTimeSupported()) {
final FileEncryptionInfo feInfo = final FileEncryptionInfo feInfo =
FSDirectory.isReservedRawName(srcArg) ? FSDirectory.isReservedRawName(srcArg) ?
null : dir.getFileEncryptionInfo(inode, iip.getPathSnapshotId()); null : dir.getFileEncryptionInfo(inode, iip.getPathSnapshotId(),
iip);
final LocatedBlocks blocks = final LocatedBlocks blocks =
blockManager.createLocatedBlocks(inode.getBlocks(), fileSize, blockManager.createLocatedBlocks(inode.getBlocks(), fileSize,
@ -2599,7 +2601,7 @@ private BlocksMapUpdateInfo startFileInternal(FSPermissionChecker pc,
feInfo = new FileEncryptionInfo(suite, feInfo = new FileEncryptionInfo(suite,
edek.getEncryptedKeyVersion().getMaterial(), edek.getEncryptedKeyVersion().getMaterial(),
edek.getEncryptedKeyIv(), edek.getEncryptedKeyIv(),
edek.getEncryptionKeyVersionName()); ezKeyName, edek.getEncryptionKeyVersionName());
Preconditions.checkNotNull(feInfo); Preconditions.checkNotNull(feInfo);
} }
@ -8619,8 +8621,8 @@ void createEncryptionZone(final String src, final String keyName)
throw new IOException("Must specify a key name when creating an " + throw new IOException("Must specify a key name when creating an " +
"encryption zone"); "encryption zone");
} }
KeyVersion keyVersion = provider.getCurrentKey(keyName); KeyProvider.Metadata metadata = provider.getMetadata(keyName);
if (keyVersion == null) { if (metadata == null) {
/* /*
* It would be nice if we threw something more specific than * It would be nice if we threw something more specific than
* IOException when the key is not found, but the KeyProvider API * IOException when the key is not found, but the KeyProvider API
@ -8631,7 +8633,8 @@ void createEncryptionZone(final String src, final String keyName)
*/ */
throw new IOException("Key " + keyName + " doesn't exist."); throw new IOException("Key " + keyName + " doesn't exist.");
} }
createEncryptionZoneInt(src, keyName, cacheEntry != null); createEncryptionZoneInt(src, metadata.getCipher(),
keyName, cacheEntry != null);
success = true; success = true;
} catch (AccessControlException e) { } catch (AccessControlException e) {
logAuditEvent(false, "createEncryptionZone", src); logAuditEvent(false, "createEncryptionZone", src);
@ -8641,8 +8644,8 @@ void createEncryptionZone(final String src, final String keyName)
} }
} }
private void createEncryptionZoneInt(final String srcArg, String keyName, private void createEncryptionZoneInt(final String srcArg, String cipher,
final boolean logRetryCache) throws IOException { String keyName, final boolean logRetryCache) throws IOException {
String src = srcArg; String src = srcArg;
HdfsFileStatus resultingStat = null; HdfsFileStatus resultingStat = null;
checkSuperuserPrivilege(); checkSuperuserPrivilege();
@ -8656,7 +8659,8 @@ private void createEncryptionZoneInt(final String srcArg, String keyName,
checkNameNodeSafeMode("Cannot create encryption zone on " + src); checkNameNodeSafeMode("Cannot create encryption zone on " + src);
src = resolvePath(src, pathComponents); src = resolvePath(src, pathComponents);
final XAttr ezXAttr = dir.createEncryptionZone(src, keyName); final CipherSuite suite = CipherSuite.convert(cipher);
final XAttr ezXAttr = dir.createEncryptionZone(src, suite, keyName);
List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1); List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
xAttrs.add(ezXAttr); xAttrs.add(ezXAttr);
getEditLog().logSetXAttrs(src, xAttrs, logRetryCache); getEditLog().logSetXAttrs(src, xAttrs, logRetryCache);

View File

@ -133,6 +133,7 @@ static INodesInPath resolve(final INodeDirectory startingDir,
* be thrown when the path refers to a symbolic link. * be thrown when the path refers to a symbolic link.
* @return the specified number of existing INodes in the path * @return the specified number of existing INodes in the path
*/ */
// TODO: Eliminate null elements from inodes (to be provided by HDFS-7104)
static INodesInPath resolve(final INodeDirectory startingDir, static INodesInPath resolve(final INodeDirectory startingDir,
final byte[][] components, final int numOfINodes, final byte[][] components, final int numOfINodes,
final boolean resolveLink) throws UnresolvedLinkException { final boolean resolveLink) throws UnresolvedLinkException {
@ -311,7 +312,7 @@ private void updateLatestSnapshotId(int sid) {
} }
/** /**
* @return the whole inodes array including the null elements. * @return the inodes array excluding the null elements.
*/ */
INode[] getINodes() { INode[] getINodes() {
if (capacity < inodes.length) { if (capacity < inodes.length) {

View File

@ -46,9 +46,10 @@ message ListEncryptionZonesRequestProto {
} }
message EncryptionZoneProto { message EncryptionZoneProto {
required string path = 1; required int64 id = 1;
required string keyName = 2; required string path = 2;
required int64 id = 3; required CipherSuite suite = 3;
required string keyName = 4;
} }
message ListEncryptionZonesResponseProto { message ListEncryptionZonesResponseProto {

View File

@ -216,7 +216,27 @@ message FileEncryptionInfoProto {
required CipherSuite suite = 1; required CipherSuite suite = 1;
required bytes key = 2; required bytes key = 2;
required bytes iv = 3; required bytes iv = 3;
required string ezKeyVersionName = 4; required string keyName = 4;
required string ezKeyVersionName = 5;
}
/**
* Encryption information for an individual
* file within an encryption zone
*/
message PerFileEncryptionInfoProto {
required bytes key = 1;
required bytes iv = 2;
required string ezKeyVersionName = 3;
}
/**
* Encryption information for an encryption
* zone
*/
message ZoneEncryptionInfoProto {
required CipherSuite suite = 1;
required string keyName = 2;
} }
/** /**