+ VALIDATE_CRL_DISTRIBUTION_POINTS = (e, b) -> TRUE;
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultProfile.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultProfile.java
new file mode 100644
index 0000000000..a9f55596c9
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultProfile.java
@@ -0,0 +1,333 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.validator.routines.DomainValidator;
+import org.bouncycastle.asn1.ASN1ObjectIdentifier;
+import org.bouncycastle.asn1.x500.RDN;
+import org.bouncycastle.asn1.x509.ExtendedKeyUsage;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.GeneralName;
+import org.bouncycastle.asn1.x509.GeneralNames;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.asn1.x509.KeyUsage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.bind.DatatypeConverter;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.AbstractMap.SimpleEntry;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.BiFunction;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static java.lang.Boolean.TRUE;
+import static org.bouncycastle.asn1.x509.KeyPurposeId.id_kp_clientAuth;
+import static org.bouncycastle.asn1.x509.KeyPurposeId.id_kp_serverAuth;
+
+/**
+ * Ozone PKI profile.
+ *
+ * This PKI profile is invoked by SCM CA to make sure that certificates issued
+ * by SCM CA are constrained
+ */
+public class DefaultProfile implements PKIProfile {
+ static final BiFunction
+ VALIDATE_KEY_USAGE = DefaultProfile::validateKeyUsage;
+ static final BiFunction
+ VALIDATE_AUTHORITY_KEY_IDENTIFIER = (e, b) -> TRUE;
+ static final BiFunction
+ VALIDATE_LOGO_TYPE = (e, b) -> TRUE;
+ private static final Logger LOG =
+ LoggerFactory.getLogger(DefaultProfile.class);
+ static final BiFunction
+ VALIDATE_SAN = DefaultProfile::validateSubjectAlternativeName;
+ static final BiFunction
+ VALIDATE_EXTENDED_KEY_USAGE = DefaultProfile::validateExtendedKeyUsage;
+ // If we decide to add more General Names, we should add those here and
+ // also update the logic in validateGeneralName function.
+ private static final int[] GENERAL_NAMES = {
+ GeneralName.dNSName,
+ GeneralName.iPAddress,
+ };
+ // Map that handles all the Extensions lookup and validations.
+ private static final Map> EXTENSIONS_MAP = Stream.of(
+ new SimpleEntry<>(Extension.keyUsage, VALIDATE_KEY_USAGE),
+ new SimpleEntry<>(Extension.subjectAlternativeName, VALIDATE_SAN),
+ new SimpleEntry<>(Extension.authorityKeyIdentifier,
+ VALIDATE_AUTHORITY_KEY_IDENTIFIER),
+ new SimpleEntry<>(Extension.extendedKeyUsage,
+ VALIDATE_EXTENDED_KEY_USAGE),
+ // Ozone certs are issued only for the use of Ozone.
+ // However, some users will discover that this is a full scale CA
+ // and decide to mis-use these certs for other purposes.
+ // To discourage usage of these certs for other purposes, we can leave
+ // the Ozone Logo inside these certs. So if a browser is used to
+ // connect these logos will show up.
+ // https://www.ietf.org/rfc/rfc3709.txt
+ new SimpleEntry<>(Extension.logoType, VALIDATE_LOGO_TYPE))
+ .collect(Collectors.toMap(SimpleEntry::getKey,
+ SimpleEntry::getValue));
+ // If we decide to add more General Names, we should add those here and
+ // also update the logic in validateGeneralName function.
+ private static final KeyPurposeId[] EXTENDED_KEY_USAGE = {
+ id_kp_serverAuth, // TLS Web server authentication
+ id_kp_clientAuth, // TLS Web client authentication
+
+ };
+ private final Set extendKeyPurposeSet;
+ private Set generalNameSet;
+
+ /**
+ * Construct DefaultProfile.
+ */
+ public DefaultProfile() {
+ generalNameSet = new HashSet<>();
+ for (int val : GENERAL_NAMES) {
+ generalNameSet.add(val);
+ }
+ extendKeyPurposeSet =
+ new HashSet<>(Arrays.asList(EXTENDED_KEY_USAGE));
+
+ }
+
+ /**
+ * This function validates that the KeyUsage Bits are subset of the Bits
+ * permitted by the ozone profile.
+ *
+ * @param ext - KeyUsage Extension.
+ * @param profile - PKI Profile - In this case this profile.
+ * @return True, if the request key usage is a subset, false otherwise.
+ */
+ private static Boolean validateKeyUsage(Extension ext, PKIProfile profile) {
+ KeyUsage keyUsage = profile.getKeyUsage();
+ KeyUsage requestedUsage = KeyUsage.getInstance(ext.getParsedValue());
+ BitSet profileBitSet = BitSet.valueOf(keyUsage.getBytes());
+ BitSet requestBitSet = BitSet.valueOf(requestedUsage.getBytes());
+ // Check if the requestBitSet is a subset of profileBitSet
+ // p & r == r should be equal if it is a subset.
+ profileBitSet.and(requestBitSet);
+ return profileBitSet.equals(requestBitSet);
+ }
+
+ /**
+ * Validates the SubjectAlternative names in the Certificate.
+ *
+ * @param ext - Extension - SAN, which allows us to get the SAN names.
+ * @param profile - This profile.
+ * @return - True if the request contains only SANs, General names that we
+ * support. False otherwise.
+ */
+ private static Boolean validateSubjectAlternativeName(Extension ext,
+ PKIProfile profile) {
+ if (ext.isCritical()) {
+ // SAN extensions should not be marked as critical under ozone profile.
+ LOG.error("SAN extension marked as critical in the Extension. {}",
+ GeneralNames.getInstance(ext.getParsedValue()).toString());
+ return false;
+ }
+ GeneralNames generalNames = GeneralNames.getInstance(ext.getParsedValue());
+ for (GeneralName name : generalNames.getNames()) {
+ try {
+ if (!profile.validateGeneralName(name.getTagNo(),
+ name.getName().toString())) {
+ return false;
+ }
+ } catch (UnknownHostException e) {
+ LOG.error("IP address validation failed."
+ + name.getName().toString(), e);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * This function validates that the KeyUsage Bits are subset of the Bits
+ * permitted by the ozone profile.
+ *
+ * @param ext - KeyUsage Extension.
+ * @param profile - PKI Profile - In this case this profile.
+ * @return True, if the request key usage is a subset, false otherwise.
+ */
+ private static Boolean validateExtendedKeyUsage(Extension ext,
+ PKIProfile profile) {
+ if (ext.isCritical()) {
+ // https://tools.ietf.org/html/rfc5280#section-4.2.1.12
+ // Ozone profile opts to mark this extension as non-critical.
+ LOG.error("Extended Key usage marked as critical.");
+ return false;
+ }
+ ExtendedKeyUsage extendedKeyUsage =
+ ExtendedKeyUsage.getInstance(ext.getParsedValue());
+ for (KeyPurposeId id : extendedKeyUsage.getUsages()) {
+ if (!profile.validateExtendedKeyUsage(id)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int[] getGeneralNames() {
+ return Arrays.copyOfRange(GENERAL_NAMES, 0, GENERAL_NAMES.length);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean isSupportedGeneralName(int generalName) {
+ return generalNameSet.contains(generalName);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean validateGeneralName(int type, String value) {
+ // TODO : We should add more validation for IP address, for example
+ // it matches the local network, and domain matches where the cluster
+ // exits.
+ if (!isSupportedGeneralName(type)) {
+ return false;
+ }
+ switch (type) {
+ case GeneralName.iPAddress:
+
+ // We need DatatypeConverter conversion, since the original CSR encodes
+ // an IP address int a Hex String, for example 8.8.8.8 is encoded as
+ // #08080808. Value string is always preceded by "#", we will strip
+ // that before passing it on.
+
+ // getByAddress call converts the IP address to hostname/ipAddress format.
+ // if the hostname cannot determined then it will be /ipAddress.
+
+ // TODO: Fail? if we cannot resolve the Hostname?
+ try {
+ final InetAddress byAddress = InetAddress.getByAddress(
+ DatatypeConverter.parseHexBinary(value.substring(1)));
+ LOG.debug("Host Name/IP Address : {}", byAddress.toString());
+ return true;
+ } catch (UnknownHostException e) {
+ return false;
+ }
+ case GeneralName.dNSName:
+ return DomainValidator.getInstance().isValid(value);
+ default:
+ // This should not happen, since it guarded via isSupportedGeneralName.
+ LOG.error("Unexpected type in General Name (int value) : " + type);
+ return false;
+ }
+ }
+
+ @Override
+ public boolean validateExtendedKeyUsage(KeyPurposeId id) {
+ return extendKeyPurposeSet.contains(id);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public ASN1ObjectIdentifier[] getSupportedExtensions() {
+ return EXTENSIONS_MAP.keySet().toArray(new ASN1ObjectIdentifier[0]);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean isSupportedExtension(Extension extension) {
+ return EXTENSIONS_MAP.containsKey(extension.getExtnId());
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean validateExtension(Extension extension) {
+ Preconditions.checkNotNull(extension, "Extension cannot be null");
+
+ if (!isSupportedExtension(extension)) {
+ LOG.error("Unsupported Extension found: {} ",
+ extension.getExtnId().getId());
+ return false;
+ }
+
+ BiFunction func =
+ EXTENSIONS_MAP.get(extension.getExtnId());
+
+ if (func != null) {
+ return func.apply(extension, this);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public KeyUsage getKeyUsage() {
+ return new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment
+ | KeyUsage.dataEncipherment | KeyUsage.keyAgreement);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public RDN[] getRDNs() {
+ return new RDN[0];
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean isValidRDN(RDN distinguishedName) {
+ // TODO: Right now we just approve all strings.
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean validateRDN(RDN name) {
+ return true;
+ }
+
+ @Override
+ public boolean isCA() {
+ return false;
+ }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/PKIProfile.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/PKIProfile.java
new file mode 100644
index 0000000000..c3ff198cd7
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/PKIProfile.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;
+
+import org.bouncycastle.asn1.ASN1ObjectIdentifier;
+import org.bouncycastle.asn1.x500.RDN;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.asn1.x509.KeyUsage;
+
+import java.net.UnknownHostException;
+
+/**
+ * Base class for profile rules. Generally profiles are documents that define
+ * the PKI policy. In HDDS/Ozone world, we have chosen to make PKIs
+ * executable code. So if an end-user wants to use a custom profile or one of
+ * the existing profile like the list below, they are free to implement a
+ * custom profile.
+ *
+ * PKIX - Internet PKI profile.
+ * FPKI - (US) Federal PKI profile.
+ * MISSI - US DoD profile.
+ * ISO 15782 - Banking - Certificate Management Part 1: Public Key
+ * Certificates.
+ * TeleTrust/MailTrusT - German MailTrusT profile for TeleTrusT (it
+ * really is
+ * capitalised that way).
+ * German SigG Profile - Profile to implement the German digital
+ * signature law
+ * ISIS Profile - Another German profile.
+ * Australian Profile - Profile for the Australian PKAF
+ * SS 61 43 31 Electronic ID Certificate - Swedish profile.
+ * FINEID S3 - Finnish profile.
+ * ANX Profile - Automotive Network Exchange profile.
+ * Microsoft Profile - This isn't a real profile, but windows uses this.
+ */
+public interface PKIProfile {
+
+ /**
+ * Returns the list of General Names supported by this profile.
+ * @return - an Array of supported General Names by this certificate profile.
+ */
+ int[] getGeneralNames();
+
+ /**
+ * Checks if a given General Name is permitted in this profile.
+ * @param generalName - General name.
+ * @return true if it is allowed, false otherwise.
+ */
+ boolean isSupportedGeneralName(int generalName);
+
+ /**
+ * Allows the profile to dictate what value ranges are valid.
+ * @param type - Type of the General Name.
+ * @param value - Value of the General Name.
+ * @return - true if the value is permitted, false otherwise.
+ * @throws UnknownHostException - on Error in IP validation.
+ */
+ boolean validateGeneralName(int type, String value)
+ throws UnknownHostException;
+
+ /**
+ * Returns an array of Object identifiers for extensions supported by this
+ * profile.
+ * @return an Array of ASN1ObjectIdentifier for the supported extensions.
+ */
+ ASN1ObjectIdentifier[] getSupportedExtensions();
+
+ /**
+ * Checks if the this extension is permitted in this profile.
+ * @param extension - Extension to check for.
+ * @return - true if this extension is supported, false otherwise.
+ */
+ boolean isSupportedExtension(Extension extension);
+
+ /**
+ * Checks if the extension has the value which this profile approves.
+ * @param extension - Extension to validate.
+ * @return - True if the extension is acceptable, false otherwise.
+ */
+ boolean validateExtension(Extension extension);
+
+ /**
+ * Validate the Extended Key Usage.
+ * @param id - KeyPurpose ID
+ * @return true, if this is a supported Purpose, false otherwise.
+ */
+ boolean validateExtendedKeyUsage(KeyPurposeId id);
+
+ /**
+ * Returns the permitted Key usage mask while using this profile.
+ * @return KeyUsage
+ */
+ KeyUsage getKeyUsage();
+
+ /**
+ * Gets the supported list of RDNs supported by this profile.
+ * @return Array of RDNs.
+ */
+ RDN[] getRDNs();
+
+ /**
+ * Returns true if this Relative Distinguished Name component is allowed in
+ * this profile.
+ * @param distinguishedName - RDN to check.
+ * @return boolean, True if this RDN is allowed, false otherwise.
+ */
+ boolean isValidRDN(RDN distinguishedName);
+
+ /**
+ * Allows the profile to control the value set of the RDN. Profile can
+ * reject a RDN name if needed.
+ * @param name - RDN.
+ * @return true if the name is acceptable to this profile, false otherwise.
+ */
+ boolean validateRDN(RDN name);
+
+ /**
+ * True if the profile we are checking is for issuing a CA certificate.
+ * @return True, if the profile used is for CA, false otherwise.
+ */
+ boolean isCA();
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/package-info.java
new file mode 100644
index 0000000000..36c885d310
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/package-info.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/**
+ * PKI PKIProfile package supports different kind of profiles that certificates
+ * can support. If you are not familiar with PKI profiles, there is an
+ * excellent introduction at
+ *
+ * https://www.cs.auckland.ac.nz/~pgut001/pubs/x509guide.txt
+ *
+ * At high level, the profiles in this directory define what kinds of
+ * Extensions, General names , Key usage and critical extensions are
+ * permitted when the CA is functional.
+ *
+ * An excellent example of a profile would be ozone profile if you would
+ * like to see a reference to create your own profiles.
+ */
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java
index 3624b32d79..7e9cf4dd02 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java
@@ -34,14 +34,19 @@
import org.bouncycastle.asn1.x509.GeneralName;
import org.bouncycastle.asn1.x509.GeneralNames;
import org.bouncycastle.asn1.x509.KeyUsage;
+import org.bouncycastle.openssl.jcajce.JcaPEMWriter;
import org.bouncycastle.operator.ContentSigner;
import org.bouncycastle.operator.OperatorCreationException;
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
import org.bouncycastle.pkcs.PKCS10CertificationRequest;
import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder;
import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
+import org.bouncycastle.util.io.pem.PemObject;
+import org.bouncycastle.util.io.pem.PemReader;
import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
import java.security.KeyPair;
import java.util.ArrayList;
import java.util.List;
@@ -70,7 +75,7 @@ public final class CertificateSignRequest {
* @param extensions - CSR extensions
*/
private CertificateSignRequest(String subject, String scmID, String clusterID,
- KeyPair keyPair, SecurityConfig config, Extensions extensions) {
+ KeyPair keyPair, SecurityConfig config, Extensions extensions) {
this.subject = subject;
this.clusterID = clusterID;
this.scmID = scmID;
@@ -97,6 +102,35 @@ private PKCS10CertificationRequest generateCSR() throws
}
return p10Builder.build(contentSigner);
}
+ public static String getEncodedString(PKCS10CertificationRequest request)
+ throws IOException {
+ PemObject pemObject =
+ new PemObject("CERTIFICATE REQUEST", request.getEncoded());
+ StringWriter str = new StringWriter();
+ try(JcaPEMWriter pemWriter = new JcaPEMWriter(str)) {
+ pemWriter.writeObject(pemObject);
+ }
+ return str.toString();
+ }
+
+
+ /**
+ * Gets a CertificateRequest Object from PEM encoded CSR.
+ *
+ * @param csr - PEM Encoded Certificate Request String.
+ * @return PKCS10CertificationRequest
+ * @throws IOException - On Error.
+ */
+ public static PKCS10CertificationRequest getCertificationRequest(String csr)
+ throws IOException {
+ try (PemReader reader = new PemReader(new StringReader(csr))) {
+ PemObject pemObject = reader.readPemObject();
+ if(pemObject.getContent() == null) {
+ throw new SCMSecurityException("Invalid Certificate signing request");
+ }
+ return new PKCS10CertificationRequest(pemObject.getContent());
+ }
+ }
/**
* Builder class for Certificate Sign Request.
@@ -144,12 +178,6 @@ public CertificateSignRequest.Builder addDnsName(String dnsName) {
return this;
}
- public CertificateSignRequest.Builder addRfc822Name(String name) {
- Preconditions.checkNotNull(name, "Rfc822Name cannot be null");
- this.addAltName(GeneralName.rfc822Name, name);
- return this;
- }
-
// IP address is subject to change which is optional for now.
public CertificateSignRequest.Builder addIpAddress(String ip) {
Preconditions.checkNotNull(ip, "Ip address cannot be null");
@@ -186,7 +214,7 @@ private Optional getSubjectAltNameExtension() throws
IOException {
if (altNames != null) {
return Optional.of(new Extension(Extension.subjectAlternativeName,
- true, new DEROctetString(new GeneralNames(
+ false, new DEROctetString(new GeneralNames(
altNames.toArray(new GeneralName[altNames.size()])))));
}
return Optional.empty();
@@ -202,7 +230,9 @@ private Extensions createExtensions() throws IOException {
List extensions = new ArrayList<>();
// Add basic extension
- extensions.add(getBasicExtension());
+ if(ca) {
+ extensions.add(getBasicExtension());
+ }
// Add key usage extension
extensions.add(getKeyUsageExtension());
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
index 1d45ef12fe..efd8d74765 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
@@ -47,6 +47,7 @@
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;
@@ -209,10 +210,22 @@ public PrivateKey readPrivateKey(Path basePath, String privateKeyFileName)
throws InvalidKeySpecException, NoSuchAlgorithmException, IOException {
PKCS8EncodedKeySpec encodedKeySpec = readKey(basePath, privateKeyFileName);
final KeyFactory keyFactory =
- KeyFactory.getInstance(securityConfig.getProvider());
- final PrivateKey privateKey =
+ KeyFactory.getInstance(securityConfig.getKeyAlgo());
+ return
keyFactory.generatePrivate(encodedKeySpec);
- return privateKey;
+ }
+
+ /**
+ * Read the Public Key using defaults.
+ * @return PublicKey.
+ * @throws InvalidKeySpecException - On Error.
+ * @throws NoSuchAlgorithmException - On Error.
+ * @throws IOException - On Error.
+ */
+ public PublicKey readPublicKey() throws InvalidKeySpecException,
+ NoSuchAlgorithmException, IOException {
+ return readPublicKey(this.location.toAbsolutePath(),
+ securityConfig.getPublicKeyFileName());
}
/**
@@ -229,12 +242,28 @@ public PublicKey readPublicKey(Path basePath, String publicKeyFileName)
throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
PKCS8EncodedKeySpec encodedKeySpec = readKey(basePath, publicKeyFileName);
final KeyFactory keyFactory =
- KeyFactory.getInstance(securityConfig.getProvider());
- final PublicKey publicKey =
- keyFactory.generatePublic(encodedKeySpec);
- return publicKey;
+ KeyFactory.getInstance(securityConfig.getKeyAlgo());
+ return
+ keyFactory.generatePublic(
+ new X509EncodedKeySpec(encodedKeySpec.getEncoded()));
+
}
+
+ /**
+ * Returns the private key using defaults.
+ * @return PrivateKey.
+ * @throws InvalidKeySpecException - On Error.
+ * @throws NoSuchAlgorithmException - On Error.
+ * @throws IOException - On Error.
+ */
+ public PrivateKey readPrivateKey() throws InvalidKeySpecException,
+ NoSuchAlgorithmException, IOException {
+ return readPrivateKey(this.location.toAbsolutePath(),
+ securityConfig.getPrivateKeyFileName());
+ }
+
+
/**
* Helper function that actually writes data to the files.
*
@@ -246,7 +275,7 @@ public PublicKey readPublicKey(Path basePath, String publicKeyFileName)
* @throws IOException - On I/O failure.
*/
private synchronized void writeKey(Path basePath, KeyPair keyPair,
- String privateKeyFileName, String publicKeyFileName, boolean force)
+ String privateKeyFileName, String publicKeyFileName, boolean force)
throws IOException {
checkPreconditions(basePath);
@@ -282,7 +311,7 @@ private synchronized void writeKey(Path basePath, KeyPair keyPair,
* @throws IOException - On I/O failure.
*/
private void checkKeyFile(File privateKeyFile, boolean force,
- File publicKeyFile) throws IOException {
+ File publicKeyFile) throws IOException {
if (privateKeyFile.exists() && force) {
if (!privateKeyFile.delete()) {
throw new IOException("Unable to delete private key file.");
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
index 89d5d51b83..0a327ae28d 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
@@ -19,7 +19,81 @@
/**
- * This package contains common routines used in creating an x509 based
- * identity framework for HDDS.
+ * This package contains common routines used in creating an x509 based identity
+ * framework for HDDS.
*/
package org.apache.hadoop.hdds.security.x509;
+/*
+
+Architecture of Certificate Infrastructure for SCM.
+====================================================
+
+The certificate infrastructure has two main parts, the certificate server or
+the Certificate authority and the clients who want certificates. The CA is
+responsible for issuing certificates to participating entities.
+
+To issue a certificate the CA has to verify the identity and the assertions
+in the certificate. The client starts off making a request to CA for a
+certificate. This request is called Certificate Signing Request or CSR
+(PKCS#10).
+
+When a CSR arrives on the CA, CA will decode the CSR and verify that all the
+fields in the CSR are in line with what the system expects. Since there are
+lots of possible ways to construct an X.509 certificate, we rely on PKI
+profiles.
+
+Generally, PKI profiles are policy documents or general guidelines that get
+followed by the requester and CA. However, most of the PKI profiles that are
+commonly available are general purpose and offers too much surface area.
+
+SCM CA infrastructure supports the notion of a PKI profile class which can
+codify the RDNs, Extensions and other certificate policies. The CA when
+issuing a certificate will invoke a certificate approver class, based on the
+authentication method used. For example, out of the box, we support manual,
+Kerberos, trusted network and testing authentication mechanisms.
+
+If there is no authentication mechanism in place, then when CA receives the
+CSR, it runs the standard PKI profile over it verify that all the fields are
+in expected ranges. Once that is done, The signing request is sent for human
+review and approval. This form of certificate approval is called Manual, Of
+all the certificate approval process this is the ** most secure **. This
+approval needs to be done once for each data node.
+
+For existing clusters, where data nodes already have a Kerberos keytab, we
+can leverage the Kerberos identity mechanism to identify the data node that
+is requesting the certificate. In this case, users can configure the system
+to leverage Kerberos while issuing certificates and SCM CA will be able to
+verify the data nodes identity and issue certificates automatically.
+
+In environments like Kubernetes, we can leverage the base system services to
+pass on a shared secret securely. In this model also, we can rely on these
+secrets to make sure that is the right data node that is talking to us. This
+kind of approval is called a Trusted network approval. In this process, each
+data node not only sends the CSR but signs the request with a shared secret
+with SCM. SCM then can issue a certificate without the intervention of a
+human administrator.
+
+The last, TESTING method which never should be used other than in development and
+testing clusters, is merely a mechanism to bypass all identity checks. If
+this flag is setup, then CA will issue a CSR if the base approves all fields.
+
+ * Please do not use this mechanism(TESTING) for any purpose other than
+ * testing.
+
+CA - Certificate Approval and Code Layout (as of Dec, 1st, 2018)
+=================================================================
+The CA implementation ( as of now it is called DefaultCA) receives a CSR from
+ the network layer. The network also tells the system what approver type to
+ use, that is if Kerberos or Shared secrets mechanism is used, it reports
+ that to Default CA.
+
+The default CA instantiates the approver based on the type of the approver
+indicated by the network layer. This approver creates an instance of the PKI
+profile and passes each field from the certificate signing request. The PKI
+profile (as of today Dec 1st, 2018, we have one profile called Ozone profile)
+ verifies that each field in the CSR meets the approved set of values.
+
+Once the PKI Profile validates the request, it is either auto approved or
+queued for manual review.
+
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java
new file mode 100644
index 0000000000..972ef4417a
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.PKIProfile;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+
+import java.io.IOException;
+import java.security.PrivateKey;
+import java.util.Date;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * A test approver class that makes testing easier.
+ */
+public class MockApprover extends BaseApprover {
+
+ public MockApprover(PKIProfile pkiProfile, SecurityConfig config) {
+ super(pkiProfile, config);
+ }
+
+ @Override
+ public CompletableFuture
+ approve(PKCS10CertificationRequest csr) {
+ return super.approve(csr);
+ }
+
+ @Override
+ public X509CertificateHolder sign(SecurityConfig config, PrivateKey caPrivate,
+ X509CertificateHolder caCertificate, Date validFrom,
+ Date validTill, PKCS10CertificationRequest certificationRequest)
+ throws IOException, OperatorCreationException {
+ return null;
+ }
+
+}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
index 0e98ba7154..6b913717af 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
@@ -23,14 +23,22 @@
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
+import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.IOException;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
import java.security.cert.CertificateException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
import java.util.function.Consumer;
import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
@@ -113,6 +121,49 @@ public void testMissingKey() {
// exception.
assertTrue(e.toString().contains("Missing Keys"));
}
-
}
+
+ /**
+ * The most important test of this test suite. This tests that we are able
+ * to create a Test CA, creates it own self-Signed CA and then issue a
+ * certificate based on a CSR.
+ * @throws SCMSecurityException - on ERROR.
+ * @throws ExecutionException - on ERROR.
+ * @throws InterruptedException - on ERROR.
+ * @throws NoSuchProviderException - on ERROR.
+ * @throws NoSuchAlgorithmException - on ERROR.
+ */
+ @Test
+ public void testRequestCertificate() throws IOException,
+ ExecutionException, InterruptedException,
+ NoSuchProviderException, NoSuchAlgorithmException {
+ KeyPair keyPair =
+ new HDDSKeyGenerator(conf).generateKey();
+ PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+ .addDnsName("hadoop.apache.org")
+ .addIpAddress("8.8.8.8")
+ .setCA(false)
+ .setClusterID("ClusterID")
+ .setScmID("SCMID")
+ .setSubject("Ozone Cluster")
+ .setConfiguration(conf)
+ .setKey(keyPair)
+ .build();
+
+ // Let us convert this to a string to mimic the common use case.
+ String csrString = CertificateSignRequest.getEncodedString(csr);
+
+ CertificateServer testCA = new DefaultCAServer("testCA",
+ RandomStringUtils.randomAlphabetic(4),
+ RandomStringUtils.randomAlphabetic(4));
+ testCA.init(new SecurityConfig(conf),
+ CertificateServer.CAType.SELF_SIGNED_CA);
+
+ Future holder = testCA.requestCertificate(csrString,
+ CertificateApprover.ApprovalType.TESTING_AUTOMATIC);
+ // Right now our calls are synchronous. Eventually this will have to wait.
+ assertTrue(holder.isDone());
+ assertNotNull(holder.get());
+ }
+
}
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java
new file mode 100644
index 0000000000..b5a6b35dc6
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java
@@ -0,0 +1,361 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultProfile;
+import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
+import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
+import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
+import org.bouncycastle.asn1.x500.X500Name;
+import org.bouncycastle.asn1.x500.X500NameBuilder;
+import org.bouncycastle.asn1.x500.style.BCStyle;
+import org.bouncycastle.asn1.x509.ExtendedKeyUsage;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.Extensions;
+import org.bouncycastle.asn1.x509.ExtensionsGenerator;
+import org.bouncycastle.asn1.x509.GeneralName;
+import org.bouncycastle.asn1.x509.GeneralNames;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.operator.ContentSigner;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder;
+import org.bouncycastle.pkcs.PKCSException;
+import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.IOException;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests for the default PKI Profile.
+ */
+public class TestDefaultProfile {
+ @Rule
+ public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+ private OzoneConfiguration configuration;
+ private SecurityConfig securityConfig;
+ private DefaultProfile defaultProfile;
+ private MockApprover testApprover;
+ private KeyPair keyPair;
+
+ @Before
+ public void setUp() throws Exception {
+ configuration = new OzoneConfiguration();
+ configuration.set(OZONE_METADATA_DIRS,
+ temporaryFolder.newFolder().toString());
+ securityConfig = new SecurityConfig(configuration);
+ defaultProfile = new DefaultProfile();
+ testApprover = new MockApprover(defaultProfile,
+ securityConfig);
+ keyPair = new HDDSKeyGenerator(securityConfig).generateKey();
+ }
+
+ /**
+ * Tests the General Names that we support. The default profile supports only
+ * two names right now.
+ */
+ @Test
+ public void testisSupportedGeneralName() {
+// Positive tests
+ assertTrue(defaultProfile.isSupportedGeneralName(GeneralName.iPAddress));
+ assertTrue(defaultProfile.isSupportedGeneralName(GeneralName.dNSName));
+// Negative Tests
+ assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.directoryName));
+ assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.rfc822Name));
+ assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.otherName));
+ }
+
+ /**
+ * Test valid keys are validated correctly.
+ *
+ * @throws SCMSecurityException - on Error.
+ * @throws PKCSException - on Error.
+ * @throws OperatorCreationException - on Error.
+ */
+ @Test
+ public void testVerifyCertificate() throws SCMSecurityException,
+ PKCSException, OperatorCreationException {
+ PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+ .addDnsName("hadoop.apache.org")
+ .addIpAddress("8.8.8.8")
+ .setCA(false)
+ .setClusterID("ClusterID")
+ .setScmID("SCMID")
+ .setSubject("Ozone Cluster")
+ .setConfiguration(configuration)
+ .setKey(keyPair)
+ .build();
+ assertTrue(testApprover.verifyPkcs10Request(csr));
+ }
+
+
+
+
+ /**
+ * Test invalid keys fail in the validation.
+ *
+ * @throws SCMSecurityException - on Error.
+ * @throws PKCSException - on Error.
+ * @throws OperatorCreationException - on Error.
+ * @throws NoSuchProviderException - on Error.
+ * @throws NoSuchAlgorithmException - on Error.
+ */
+ @Test
+ public void testVerifyCertificateInvalidKeys() throws SCMSecurityException,
+ PKCSException, OperatorCreationException,
+ NoSuchProviderException, NoSuchAlgorithmException {
+ KeyPair newKeyPair = new HDDSKeyGenerator(securityConfig).generateKey();
+ KeyPair wrongKey = new KeyPair(keyPair.getPublic(),
+ newKeyPair.getPrivate());
+ PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+ .addDnsName("hadoop.apache.org")
+ .addIpAddress("8.8.8.8")
+ .setCA(false)
+ .setClusterID("ClusterID")
+ .setScmID("SCMID")
+ .setSubject("Ozone Cluster")
+ .setConfiguration(configuration)
+ .setKey(wrongKey)
+ .build();
+ // Signature verification should fail here, since the public/private key
+ // does not match.
+ assertFalse(testApprover.verifyPkcs10Request(csr));
+ }
+
+ /**
+ * Tests that normal valid extensions work with the default profile.
+ *
+ * @throws SCMSecurityException - on Error.
+ * @throws PKCSException - on Error.
+ * @throws OperatorCreationException - on Error.
+ */
+ @Test
+ public void testExtensions() throws SCMSecurityException {
+ PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+ .addDnsName("hadoop.apache.org")
+ .addIpAddress("192.10.234.6")
+ .setCA(false)
+ .setClusterID("ClusterID")
+ .setScmID("SCMID")
+ .setSubject("Ozone Cluster")
+ .setConfiguration(configuration)
+ .setKey(keyPair)
+ .build();
+ assertTrue(testApprover.verfiyExtensions(csr));
+ }
+
+ /**
+ * Tests that invalid extensions cause a failure in validation. We will fail
+ * if CA extension is enabled.
+ *
+ * @throws SCMSecurityException - on Error.
+ */
+
+ @Test
+ public void testInvalidExtensionsWithCA() throws SCMSecurityException {
+ PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+ .addDnsName("hadoop.apache.org")
+ .addIpAddress("192.10.234.6")
+ .setCA(true)
+ .setClusterID("ClusterID")
+ .setScmID("SCMID")
+ .setSubject("Ozone Cluster")
+ .setConfiguration(configuration)
+ .setKey(keyPair)
+ .build();
+ assertFalse(testApprover.verfiyExtensions(csr));
+ }
+
+ /**
+ * Tests that invalid extensions cause a failure in validation. We will fail
+ * if rfc222 type names are added, we also add the extension as both
+ * critical and non-critical fashion to verify that the we catch both cases.
+ *
+ * @throws SCMSecurityException - on Error.
+ */
+
+ @Test
+ public void testInvalidExtensionsWithEmail() throws IOException,
+ OperatorCreationException {
+ Extensions emailExtension = getSANExtension(GeneralName.rfc822Name,"bilbo" +
+ "@apache.org", false);
+ PKCS10CertificationRequest csr = getInvalidCSR(keyPair, emailExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+
+ emailExtension = getSANExtension(GeneralName.rfc822Name,"bilbo" +
+ "@apache.org", true);
+ csr = getInvalidCSR(keyPair, emailExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+
+ }
+
+ /**
+ * Same test for URI.
+ * @throws IOException - On Error.
+ * @throws OperatorCreationException- on Error.
+ */
+ @Test
+ public void testInvalidExtensionsWithURI() throws IOException,
+ OperatorCreationException {
+ Extensions oExtension = getSANExtension(
+ GeneralName.uniformResourceIdentifier,"s3g.ozone.org", false);
+ PKCS10CertificationRequest csr = getInvalidCSR(keyPair, oExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+ oExtension = getSANExtension(GeneralName.uniformResourceIdentifier,
+ "s3g.ozone.org", false);
+ csr = getInvalidCSR(keyPair, oExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+ }
+
+ /**
+ * Assert that if DNS is marked critical our PKI profile will reject it.
+ * @throws IOException - on Error.
+ * @throws OperatorCreationException - on Error.
+ */
+ @Test
+ public void testInvalidExtensionsWithCriticalDNS() throws IOException,
+ OperatorCreationException {
+ Extensions dnsExtension = getSANExtension(GeneralName.dNSName,
+ "ozone.hadoop.org",
+ true);
+ PKCS10CertificationRequest csr = getInvalidCSR(keyPair, dnsExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+ // This tests should pass, hence the assertTrue
+ dnsExtension = getSANExtension(GeneralName.dNSName,
+ "ozone.hadoop.org",
+ false);
+ csr = getInvalidCSR(keyPair, dnsExtension);
+ assertTrue(testApprover.verfiyExtensions(csr));
+ }
+
+
+ /**
+ * Verify that valid Extended Key usage works as expected.
+ * @throws IOException - on Error.
+ * @throws OperatorCreationException - on Error.
+ */
+ @Test
+ public void testValidExtendedKeyUsage() throws IOException,
+ OperatorCreationException {
+ Extensions extendedExtension =
+ getKeyUsageExtension(KeyPurposeId.id_kp_clientAuth, false);
+ PKCS10CertificationRequest csr = getInvalidCSR(keyPair, extendedExtension);
+ assertTrue(testApprover.verfiyExtensions(csr));
+
+ extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_serverAuth, false);
+ csr = getInvalidCSR(keyPair, extendedExtension);
+ assertTrue(testApprover.verfiyExtensions(csr));
+ }
+
+
+ /**
+ * Verify that Invalid Extended Key usage works as expected, that is rejected.
+ * @throws IOException - on Error.
+ * @throws OperatorCreationException - on Error.
+ */
+ @Test
+ public void testInValidExtendedKeyUsage() throws IOException,
+ OperatorCreationException {
+ Extensions extendedExtension =
+ getKeyUsageExtension(KeyPurposeId.id_kp_clientAuth, true);
+ PKCS10CertificationRequest csr = getInvalidCSR(keyPair, extendedExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+
+ extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_OCSPSigning, false);
+ csr = getInvalidCSR(keyPair, extendedExtension);
+ assertFalse(testApprover.verfiyExtensions(csr));
+ }
+
+
+
+ /**
+ * Generates an CSR with the extension specified.
+ * This function is used to get an Invalid CSR and test that PKI profile
+ * rejects these invalid extensions, Hence the function name, by itself it
+ * is a well formed CSR, but our PKI profile will treat it as invalid CSR.
+ *
+ * @param keyPair - Key Pair.
+ * @return CSR - PKCS10CertificationRequest
+ * @throws OperatorCreationException - on Error.
+ */
+ private PKCS10CertificationRequest getInvalidCSR(KeyPair keyPair,
+ Extensions extensions) throws OperatorCreationException {
+ X500NameBuilder namebuilder =
+ new X500NameBuilder(X500Name.getDefaultStyle());
+ namebuilder.addRDN(BCStyle.CN, "invalidCert");
+ PKCS10CertificationRequestBuilder p10Builder =
+ new JcaPKCS10CertificationRequestBuilder(namebuilder.build(),
+ keyPair.getPublic());
+ p10Builder.addAttribute(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest,
+ extensions);
+ JcaContentSignerBuilder csBuilder =
+ new JcaContentSignerBuilder(this.securityConfig.getSignatureAlgo());
+ ContentSigner signer = csBuilder.build(keyPair.getPrivate());
+ return p10Builder.build(signer);
+ }
+
+ /**
+ * Generate an Extension with rfc822Name.
+ * @param extensionCode - Extension Code.
+ * @param value - email to be added to the certificate
+ * @param critical - boolean value that marks the extension as critical.
+ * @return - An Extension list with email address.
+ * @throws IOException
+ */
+ private Extensions getSANExtension(int extensionCode, String value,
+ boolean critical) throws IOException {
+ GeneralName extn = new GeneralName(extensionCode,
+ value);
+ ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator();
+ extensionsGenerator.addExtension(Extension.subjectAlternativeName, critical,
+ new GeneralNames(extn));
+ return extensionsGenerator.generate();
+ }
+
+ /**
+ * Returns a extension with Extended Key usage.
+ * @param purposeId - Usage that we want to encode.
+ * @param critical - makes the extension critical.
+ * @return Extensions.
+ */
+ private Extensions getKeyUsageExtension(KeyPurposeId purposeId,
+ boolean critical) throws IOException {
+ ExtendedKeyUsage extendedKeyUsage = new ExtendedKeyUsage(purposeId);
+ ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator();
+ extensionsGenerator.addExtension(
+ Extension.extendedKeyUsage,critical, extendedKeyUsage);
+ return extensionsGenerator.generate();
+ }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
index d234b6602f..a6c826bf1c 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
@@ -99,11 +99,6 @@ public void testGenerateCSR() throws NoSuchProviderException,
Assert.assertEquals(1, csr.getAttributes().length);
Extensions extensions = SecurityUtil.getPkcs9Extensions(csr);
- // Verify basic constraints extension
- Extension basicExt = extensions.getExtension(Extension
- .basicConstraints);
- Assert.assertEquals(true, basicExt.isCritical());
-
// Verify key usage extension
Extension keyUsageExt = extensions.getExtension(Extension.keyUsage);
Assert.assertEquals(true, keyUsageExt.isCritical());
@@ -144,7 +139,6 @@ public void testGenerateCSRwithSan() throws NoSuchProviderException,
builder.addIpAddress("192.168.2.1");
builder.addDnsName("dn1.abc.com");
- builder.addRfc822Name("test@abc.com");
PKCS10CertificationRequest csr = builder.build();
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
index f0973f7f7d..d3e13d2383 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
@@ -20,6 +20,7 @@
package org.apache.hadoop.hdds.security.x509.keys;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
+import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -127,7 +128,7 @@ public void testWriteKey()
byte[] keyBytes = Base64.decodeBase64(privateKeydata);
PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
PrivateKey privateKeyDecoded = kf.generatePrivate(spec);
- Assert.assertNotNull("Private Key should not be null",
+ assertNotNull("Private Key should not be null",
privateKeyDecoded);
// Let us decode the public key and veriy that we can parse it back into
@@ -140,7 +141,7 @@ public void testWriteKey()
keyBytes = Base64.decodeBase64(publicKeydata);
X509EncodedKeySpec pubKeyspec = new X509EncodedKeySpec(keyBytes);
PublicKey publicKeyDecoded = kf.generatePublic(pubKeyspec);
- Assert.assertNotNull("Public Key should not be null",
+ assertNotNull("Public Key should not be null",
publicKeyDecoded);
// Now let us assert the permissions on the Directories and files are as
@@ -213,4 +214,18 @@ public void testWriteKeyInNonPosixFS()
.intercept(IOException.class, "Unsupported File System for pem file.",
() -> pemWriter.writeKey(kp));
}
+
+ @Test
+ public void testReadWritePublicKeywithoutArgs()
+ throws NoSuchProviderException, NoSuchAlgorithmException, IOException,
+ InvalidKeySpecException {
+
+ KeyPair kp = keyGenerator.generateKey();
+ KeyCodec keycodec = new KeyCodec(configuration);
+ keycodec.writeKey(kp);
+
+ PublicKey pubKey = keycodec.readPublicKey();
+ assertNotNull(pubKey);
+
+ }
}
\ No newline at end of file