diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java index df783f16ed..a7bc789174 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java @@ -147,6 +147,10 @@ protected final String getPathAsString() { protected abstract String getSchemeName(); + protected abstract String getKeyStoreType(); + + protected abstract String getAlgorithm(); + protected abstract OutputStream getOutputStreamForKeystore() throws IOException; @@ -264,8 +268,8 @@ CredentialEntry innerSetCredential(String alias, char[] material) writeLock.lock(); try { keyStore.setKeyEntry(alias, - new SecretKeySpec(new String(material).getBytes("UTF-8"), "AES"), - password, null); + new SecretKeySpec(new String(material).getBytes("UTF-8"), + getAlgorithm()), password, null); } catch (KeyStoreException e) { throw new IOException("Can't store credential " + alias + " in " + this, e); @@ -315,7 +319,7 @@ private void locateKeystore() throws IOException { password = CREDENTIAL_PASSWORD_DEFAULT.toCharArray(); } KeyStore ks; - ks = KeyStore.getInstance("jceks"); + ks = KeyStore.getInstance(getKeyStoreType()); if (keystoreExists()) { stashOriginalFilePermissions(); try (InputStream in = getInputStreamForFile()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/BouncyCastleFipsKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/BouncyCastleFipsKeyStoreProvider.java new file mode 100644 index 0000000000..7c7c2c6cee --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/BouncyCastleFipsKeyStoreProvider.java @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.alias; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; + +import java.io.IOException; +import java.net.URI; + +/** + * CredentialProvider based on Bouncy Castle FIPS KeyStore file format. + * The file may be stored in any Hadoop FileSystem using the following + * name mangling: + * bcfks://hdfs@nn1.example.com/my/creds.bcfks {@literal ->} + * hdfs://nn1.example.com/my/creds.bcfks bcfks://file/home/larry/creds.bcfks + * {@literal ->} file:///home/user1/creds.bcfks + */ +@InterfaceAudience.Private +public final class BouncyCastleFipsKeyStoreProvider extends KeyStoreProvider { + public static final String SCHEME_NAME = "bcfks"; + public static final String KEYSTORE_TYPE = "bcfks"; + public static final String ALGORITHM = "HMACSHA512"; + + private BouncyCastleFipsKeyStoreProvider(URI uri, Configuration conf) + throws IOException { + super(uri, conf); + } + + @Override + protected String getSchemeName() { + return SCHEME_NAME; + } + + @Override + protected String getKeyStoreType() { + return KEYSTORE_TYPE; + } + + @Override + protected String getAlgorithm() { + return ALGORITHM; + } + + /** + * The factory to create JksProviders, which is used by the ServiceLoader. + */ + public static class Factory extends CredentialProviderFactory { + @Override + public CredentialProvider createProvider(URI providerName, + Configuration conf) throws IOException { + if (SCHEME_NAME.equals(providerName.getScheme())) { + return new BouncyCastleFipsKeyStoreProvider(providerName, conf); + } + return null; + } + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java index 5028482dfc..f3b721f50b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java @@ -20,14 +20,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.permission.FsPermission; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.net.URI; /** @@ -38,11 +32,10 @@ * {@literal ->} file:///home/larry/creds.jceks */ @InterfaceAudience.Private -public class JavaKeyStoreProvider extends AbstractJavaKeyStoreProvider { +public final class JavaKeyStoreProvider extends KeyStoreProvider { public static final String SCHEME_NAME = "jceks"; - - private FileSystem fs; - private FsPermission permissions; + public static final String KEYSTORE_TYPE = "jceks"; + public static final String ALGORITHM = "AES"; private JavaKeyStoreProvider(URI uri, Configuration conf) throws IOException { @@ -55,38 +48,13 @@ protected String getSchemeName() { } @Override - protected OutputStream getOutputStreamForKeystore() throws IOException { - FSDataOutputStream out = FileSystem.create(fs, getPath(), permissions); - return out; + protected String getKeyStoreType() { + return KEYSTORE_TYPE; } @Override - protected boolean keystoreExists() throws IOException { - return fs.exists(getPath()); - } - - @Override - protected InputStream getInputStreamForFile() throws IOException { - return fs.open(getPath()); - } - - @Override - protected void createPermissions(String perms) { - permissions = new FsPermission(perms); - } - - @Override - protected void stashOriginalFilePermissions() throws IOException { - // save off permissions in case we need to - // rewrite the keystore in flush() - FileStatus s = fs.getFileStatus(getPath()); - permissions = s.getPermission(); - } - - protected void initFileSystem(URI uri) - throws IOException { - super.initFileSystem(uri); - fs = getPath().getFileSystem(getConf()); + protected String getAlgorithm() { + return ALGORITHM; } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/KeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/KeyStoreProvider.java new file mode 100644 index 0000000000..6909b07161 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/KeyStoreProvider.java @@ -0,0 +1,86 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.alias; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.permission.FsPermission; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; + +/** + * CredentialProvider based on Java Key Store API. + * The file may be stored in any Hadoop FileSystem using the following + * name mangling: + * bcfks://hdfs@nn1.example.com/my/creds.bcfks {@literal ->} + * hdfs://nn1.example.com/my/creds.bcfks bcfks://file/home/larry/creds.bcfks + * {@literal ->} file:///home/user1/creds.bcfks + */ +@InterfaceAudience.Private +public abstract class KeyStoreProvider extends AbstractJavaKeyStoreProvider { + + private FileSystem fs; + private FsPermission permissions; + + protected KeyStoreProvider(URI uri, Configuration conf) + throws IOException { + super(uri, conf); + } + + @Override + protected OutputStream getOutputStreamForKeystore() throws IOException { + FSDataOutputStream out = FileSystem.create(fs, getPath(), permissions); + return out; + } + + @Override + protected boolean keystoreExists() throws IOException { + return fs.exists(getPath()); + } + + @Override + protected InputStream getInputStreamForFile() throws IOException { + return fs.open(getPath()); + } + + @Override + protected void createPermissions(String perms) { + permissions = new FsPermission(perms); + } + + @Override + protected void stashOriginalFilePermissions() throws IOException { + // save off permissions in case we need to + // rewrite the keystore in flush() + FileStatus s = fs.getFileStatus(getPath()); + permissions = s.getPermission(); + } + + protected void initFileSystem(URI uri) + throws IOException { + super.initFileSystem(uri); + fs = getPath().getFileSystem(getConf()); + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalBouncyCastleFipsKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalBouncyCastleFipsKeyStoreProvider.java new file mode 100644 index 0000000000..1aef63a90d --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalBouncyCastleFipsKeyStoreProvider.java @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.alias; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; + +import java.io.IOException; +import java.net.URI; + +/** + * CredentialProvider based on bouncy castle FIPS KeyStore file format. + * The file may be stored only on the local filesystem using the + * following name mangling: + * localbcfks://file/home/larry/creds.bcfks {@literal ->} + * file:///home/larry/creds.bcfks + */ +@InterfaceAudience.Private +public final class LocalBouncyCastleFipsKeyStoreProvider extends + LocalKeyStoreProvider { + public static final String SCHEME_NAME = "localbcfks"; + public static final String KEYSTORE_TYPE = "bcfks"; + public static final String ALGORITHM = "HMACSHA512"; + + private LocalBouncyCastleFipsKeyStoreProvider(URI uri, Configuration conf) + throws IOException { + super(uri, conf); + } + + @Override + protected String getSchemeName() { + return SCHEME_NAME; + } + + @Override + protected String getKeyStoreType() { + return KEYSTORE_TYPE; + } + + @Override + protected String getAlgorithm() { + return ALGORITHM; + } + + /** + * The factory to create KeyStore Providers, which is used by the + * ServiceLoader. + */ + public static class Factory extends CredentialProviderFactory { + @Override + public CredentialProvider createProvider(URI providerName, + Configuration conf) throws IOException { + if (SCHEME_NAME.equals(providerName.getScheme())) { + return new LocalBouncyCastleFipsKeyStoreProvider(providerName, conf); + } + return null; + } + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java index c44e246b9d..dd92241289 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java @@ -20,24 +20,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.util.Shell; -import java.io.File; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.attribute.PosixFilePermission; -import java.nio.file.attribute.PosixFilePermissions; -import java.util.Set; -import java.util.StringTokenizer; -import java.util.EnumSet; /** * CredentialProvider based on Java's KeyStore file format. The file may be @@ -47,10 +32,10 @@ */ @InterfaceAudience.Private public final class LocalJavaKeyStoreProvider extends - AbstractJavaKeyStoreProvider { + LocalKeyStoreProvider { public static final String SCHEME_NAME = "localjceks"; - private File file; - private Set permissions; + public static final String KEYSTORE_TYPE = "jceks"; + public static final String ALGORITHM = "AES"; private LocalJavaKeyStoreProvider(URI uri, Configuration conf) throws IOException { @@ -63,106 +48,13 @@ protected String getSchemeName() { } @Override - protected OutputStream getOutputStreamForKeystore() throws IOException { - if (LOG.isDebugEnabled()) { - LOG.debug("using '" + file + "' for output stream."); - } - OutputStream out = Files.newOutputStream(file.toPath()); - return out; + protected String getKeyStoreType() { + return KEYSTORE_TYPE; } @Override - protected boolean keystoreExists() throws IOException { - /* The keystore loader doesn't handle zero length files. */ - return file.exists() && (file.length() > 0); - } - - @Override - protected InputStream getInputStreamForFile() throws IOException { - InputStream is = Files.newInputStream(file.toPath()); - return is; - } - - @Override - protected void createPermissions(String perms) throws IOException { - int mode = 700; - try { - mode = Integer.parseInt(perms, 8); - } catch (NumberFormatException nfe) { - throw new IOException("Invalid permissions mode provided while " - + "trying to createPermissions", nfe); - } - permissions = modeToPosixFilePermission(mode); - } - - @Override - protected void stashOriginalFilePermissions() throws IOException { - // save off permissions in case we need to - // rewrite the keystore in flush() - if (!Shell.WINDOWS) { - Path path = Paths.get(file.getCanonicalPath()); - permissions = Files.getPosixFilePermissions(path); - } else { - // On Windows, the JDK does not support the POSIX file permission APIs. - // Instead, we can do a winutils call and translate. - String[] cmd = Shell.getGetPermissionCommand(); - String[] args = new String[cmd.length + 1]; - System.arraycopy(cmd, 0, args, 0, cmd.length); - args[cmd.length] = file.getCanonicalPath(); - String out = Shell.execCommand(args); - StringTokenizer t = new StringTokenizer(out, Shell.TOKEN_SEPARATOR_REGEX); - // The winutils output consists of 10 characters because of the leading - // directory indicator, i.e. "drwx------". The JDK parsing method expects - // a 9-character string, so remove the leading character. - String permString = t.nextToken().substring(1); - permissions = PosixFilePermissions.fromString(permString); - } - } - - @Override - protected void initFileSystem(URI uri) - throws IOException { - super.initFileSystem(uri); - try { - file = new File(new URI(getPath().toString())); - if (LOG.isDebugEnabled()) { - LOG.debug("initialized local file as '" + file + "'."); - if (file.exists()) { - LOG.debug("the local file exists and is size " + file.length()); - if (LOG.isTraceEnabled()) { - if (file.canRead()) { - LOG.trace("we can read the local file."); - } - if (file.canWrite()) { - LOG.trace("we can write the local file."); - } - } - } else { - LOG.debug("the local file does not exist."); - } - } - } catch (URISyntaxException e) { - throw new IOException(e); - } - } - - @Override - public void flush() throws IOException { - super.flush(); - if (LOG.isDebugEnabled()) { - LOG.debug("Resetting permissions to '" + permissions + "'"); - } - if (!Shell.WINDOWS) { - Files.setPosixFilePermissions(Paths.get(file.getCanonicalPath()), - permissions); - } else { - // FsPermission expects a 10-character string because of the leading - // directory indicator, i.e. "drwx------". The JDK toString method returns - // a 9-character string, so prepend a leading character. - FsPermission fsPermission = FsPermission.valueOf( - "-" + PosixFilePermissions.toString(permissions)); - FileUtil.setPermission(file, fsPermission); - } + protected String getAlgorithm() { + return ALGORITHM; } /** @@ -178,37 +70,4 @@ public CredentialProvider createProvider(URI providerName, return null; } } - - private static Set modeToPosixFilePermission( - int mode) { - Set perms = EnumSet.noneOf(PosixFilePermission.class); - if ((mode & 0001) != 0) { - perms.add(PosixFilePermission.OTHERS_EXECUTE); - } - if ((mode & 0002) != 0) { - perms.add(PosixFilePermission.OTHERS_WRITE); - } - if ((mode & 0004) != 0) { - perms.add(PosixFilePermission.OTHERS_READ); - } - if ((mode & 0010) != 0) { - perms.add(PosixFilePermission.GROUP_EXECUTE); - } - if ((mode & 0020) != 0) { - perms.add(PosixFilePermission.GROUP_WRITE); - } - if ((mode & 0040) != 0) { - perms.add(PosixFilePermission.GROUP_READ); - } - if ((mode & 0100) != 0) { - perms.add(PosixFilePermission.OWNER_EXECUTE); - } - if ((mode & 0200) != 0) { - perms.add(PosixFilePermission.OWNER_WRITE); - } - if ((mode & 0400) != 0) { - perms.add(PosixFilePermission.OWNER_READ); - } - return perms; - } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalKeyStoreProvider.java new file mode 100644 index 0000000000..b355bbc9cd --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalKeyStoreProvider.java @@ -0,0 +1,194 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.alias; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.util.Shell; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.EnumSet; +import java.util.Set; +import java.util.StringTokenizer; + +/** + * CredentialProvider based on Java's KeyStore file format. The file may be + * stored only on the local filesystem using the following name mangling: + * localjceks://file/home/larry/creds.jceks {@literal ->} + * file:///home/larry/creds.jceks + */ +@InterfaceAudience.Private +public abstract class LocalKeyStoreProvider extends + AbstractJavaKeyStoreProvider { + private File file; + private Set permissions; + + protected LocalKeyStoreProvider(URI uri, Configuration conf) + throws IOException { + super(uri, conf); + } + + @Override + protected OutputStream getOutputStreamForKeystore() throws IOException { + if (LOG.isDebugEnabled()) { + LOG.debug("using '" + file + "' for output stream."); + } + OutputStream out = Files.newOutputStream(file.toPath()); + return out; + } + + @Override + protected boolean keystoreExists() throws IOException { + /* The keystore loader doesn't handle zero length files. */ + return file.exists() && (file.length() > 0); + } + + @Override + protected InputStream getInputStreamForFile() throws IOException { + InputStream is = Files.newInputStream(file.toPath()); + return is; + } + + @Override + protected void createPermissions(String perms) throws IOException { + int mode = 700; + try { + mode = Integer.parseInt(perms, 8); + } catch (NumberFormatException nfe) { + throw new IOException("Invalid permissions mode provided while " + + "trying to createPermissions", nfe); + } + permissions = modeToPosixFilePermission(mode); + } + + @Override + protected void stashOriginalFilePermissions() throws IOException { + // save off permissions in case we need to + // rewrite the keystore in flush() + if (!Shell.WINDOWS) { + Path path = Paths.get(file.getCanonicalPath()); + permissions = Files.getPosixFilePermissions(path); + } else { + // On Windows, the JDK does not support the POSIX file permission APIs. + // Instead, we can do a winutils call and translate. + String[] cmd = Shell.getGetPermissionCommand(); + String[] args = new String[cmd.length + 1]; + System.arraycopy(cmd, 0, args, 0, cmd.length); + args[cmd.length] = file.getCanonicalPath(); + String out = Shell.execCommand(args); + StringTokenizer t = new StringTokenizer(out, Shell.TOKEN_SEPARATOR_REGEX); + // The winutils output consists of 10 characters because of the leading + // directory indicator, i.e. "drwx------". The JDK parsing method expects + // a 9-character string, so remove the leading character. + String permString = t.nextToken().substring(1); + permissions = PosixFilePermissions.fromString(permString); + } + } + + @Override + protected void initFileSystem(URI uri) + throws IOException { + super.initFileSystem(uri); + try { + file = new File(new URI(getPath().toString())); + if (LOG.isDebugEnabled()) { + LOG.debug("initialized local file as '" + file + "'."); + if (file.exists()) { + LOG.debug("the local file exists and is size " + file.length()); + if (LOG.isTraceEnabled()) { + if (file.canRead()) { + LOG.trace("we can read the local file."); + } + if (file.canWrite()) { + LOG.trace("we can write the local file."); + } + } + } else { + LOG.debug("the local file does not exist."); + } + } + } catch (URISyntaxException e) { + throw new IOException(e); + } + } + + @Override + public void flush() throws IOException { + super.flush(); + if (LOG.isDebugEnabled()) { + LOG.debug("Resetting permissions to '" + permissions + "'"); + } + if (!Shell.WINDOWS) { + Files.setPosixFilePermissions(Paths.get(file.getCanonicalPath()), + permissions); + } else { + // FsPermission expects a 10-character string because of the leading + // directory indicator, i.e. "drwx------". The JDK toString method returns + // a 9-character string, so prepend a leading character. + FsPermission fsPermission = FsPermission.valueOf( + "-" + PosixFilePermissions.toString(permissions)); + FileUtil.setPermission(file, fsPermission); + } + } + + private static Set modeToPosixFilePermission( + int mode) { + Set perms = EnumSet.noneOf(PosixFilePermission.class); + if ((mode & 0001) != 0) { + perms.add(PosixFilePermission.OTHERS_EXECUTE); + } + if ((mode & 0002) != 0) { + perms.add(PosixFilePermission.OTHERS_WRITE); + } + if ((mode & 0004) != 0) { + perms.add(PosixFilePermission.OTHERS_READ); + } + if ((mode & 0010) != 0) { + perms.add(PosixFilePermission.GROUP_EXECUTE); + } + if ((mode & 0020) != 0) { + perms.add(PosixFilePermission.GROUP_WRITE); + } + if ((mode & 0040) != 0) { + perms.add(PosixFilePermission.GROUP_READ); + } + if ((mode & 0100) != 0) { + perms.add(PosixFilePermission.OWNER_EXECUTE); + } + if ((mode & 0200) != 0) { + perms.add(PosixFilePermission.OWNER_WRITE); + } + if ((mode & 0400) != 0) { + perms.add(PosixFilePermission.OWNER_READ); + } + return perms; + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/package-info.java new file mode 100644 index 0000000000..d05e3cb9f2 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Provides the hadoop credential provider API. + */ +package org.apache.hadoop.security.alias; \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory b/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory index f673cf4cae..1c6fc74d33 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory +++ b/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory @@ -16,3 +16,5 @@ org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider$Factory org.apache.hadoop.security.alias.UserProvider$Factory +org.apache.hadoop.security.alias.BouncyCastleFipsKeyStoreProvider$Factory +org.apache.hadoop.security.alias.LocalBouncyCastleFipsKeyStoreProvider$Factory diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/CredentialProviderAPI.md b/hadoop-common-project/hadoop-common/src/site/markdown/CredentialProviderAPI.md index 0c5f4861c1..0de09250b5 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/CredentialProviderAPI.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/CredentialProviderAPI.md @@ -133,18 +133,21 @@ In order to indicate a particular provider type and location, the user must prov 1. The `UserProvider`, which is represented by the provider URI `user:///`, is used to retrieve credentials from a user's Credentials file. This file is used to store various tokens, secrets and passwords that are needed by executing jobs and applications. 2. The `JavaKeyStoreProvider`, which is represented by the provider URI `jceks://SCHEME/path-to-keystore`, is used to retrieve credentials from a Java keystore file in a filesystem `` The underlying use of the Hadoop filesystem API allows credentials to be stored on the local filesystem or within cluster stores. -3. The `LocalJavaKeyStoreProvider`, which is represented by the provider URI `localjceks://file/path-to-keystore`, is used to access credentials from a Java keystore that is must be stored on the local filesystem. This is needed for credentials that would result in a recursive dependency on accessing HDFS. Anytime that your credential is required to gain access to HDFS we can't depend on getting a credential out of HDFS to do so. +3. The `LocalJavaKeyStoreProvider`, which is represented by the provider URI `localjceks://file/path-to-keystore`, is used to access credentials from a Java keystore that must be stored on the local filesystem. This is needed for credentials that would result in a recursive dependency on accessing HDFS. Anytime that your credential is required to gain access to HDFS we can't depend on getting a credential out of HDFS to do so. +4. The `BouncyCastleFIPSKeyStoreProvider`, which is represented by the provider URI `bcfks://SCHEME/path-to-keystore`, is used to retrieve credentials from a Bouncy Castle FIPS keystore file in a file system `` + The underlying use of the Hadoop filesystem API allows credentials to be stored on the local filesystem or within cluster stores. +5. The `LocalBcouncyCastleFIPSKeyStoreProvider`, which is represented by the provider URI `localbcfks://file/path-to-keystore`, is used to access credentials from a Bouncy Castle FIPS keystore that must be stored on the local filesystem. This is needed for credentials that would result in a recursive dependency on accessing HDFS. Anytime that your credential is required to gain access to HDFS we can't depend on getting a credential out of HDFS to do so. When credentials are stored in a filesystem, the following rules apply: -* Credentials stored in local `localjceks://` files are loaded in the process reading in the configuration. +* Credentials stored in local `localjceks://` or `localbcfks://` files are loaded in the process reading in the configuration. For use in a YARN application, this means that they must be visible across the entire cluster, in the local filesystems of the hosts. -* Credentials stored with the `jceks://` provider can be stored in the cluster filesystem, +* Credentials stored with the `jceks://` or `bcfks://` provider can be stored in the cluster filesystem, and so visible across the cluster —but not in the filesystem which requires the specific credentials for their access. -To wrap filesystem URIs with a `jceks` URI follow these steps: +To wrap filesystem URIs with a `jceks` URI follow these steps. Bouncy Castle FIPS provider follows a similar step by replacing `jceks` with `bcfks` along with OS/JDK level FIPS provider configured. 1. Take a filesystem URI such as `hdfs://namenode:9001/users/alice/secrets.jceks` 1. Place `jceks://` in front of the URL: `jceks://hdfs://namenode:9001/users/alice/secrets.jceks` diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java index ee7e42cb1d..f7bb8ec4a9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java @@ -36,6 +36,7 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,6 +52,9 @@ public class TestCredentialProviderFactory { @Rule public final TestName test = new TestName(); + @Rule + public ExpectedException exception = ExpectedException.none(); + @Before public void announce() { LOG.info("Running test " + test.getMethodName()); @@ -245,6 +249,21 @@ public void testLocalJksProvider() throws Exception { checkPermissionRetention(conf, ourUrl, path); } + @Test + public void testLocalBCFKSProvider() throws Exception { + Configuration conf = new Configuration(); + final Path ksPath = new Path(tmpDir.toString(), "test.bcfks"); + final String ourUrl = LocalBouncyCastleFipsKeyStoreProvider.SCHEME_NAME + + "://file" + ksPath.toUri(); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl); + + exception.expect(IOException.class); + exception.expectMessage("Can't create keystore"); + List providers = + CredentialProviderFactory.getProviders(conf); + assertTrue("BCFKS needs additional JDK setup", providers.isEmpty()); + } + public void checkPermissionRetention(Configuration conf, String ourUrl, Path path) throws Exception { CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);