diff --git a/hadoop-ozone/acceptance-test/src/test/compose/compose-secure/.env b/hadoop-dist/src/main/compose/ozonesecure/.env
similarity index 92%
rename from hadoop-ozone/acceptance-test/src/test/compose/compose-secure/.env
rename to hadoop-dist/src/main/compose/ozonesecure/.env
index 199d52ce30..a494004af2 100644
--- a/hadoop-ozone/acceptance-test/src/test/compose/compose-secure/.env
+++ b/hadoop-dist/src/main/compose/ozonesecure/.env
@@ -14,4 +14,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-OZONEDIR=../../../../../../hadoop-dist/target/ozone-0.3.0-SNAPSHOT
+HDDS_VERSION=${hdds.version}
+SRC_VOLUME=../../
diff --git a/hadoop-ozone/acceptance-test/src/test/compose/compose-secure/docker-compose.yaml b/hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml
similarity index 68%
rename from hadoop-ozone/acceptance-test/src/test/compose/compose-secure/docker-compose.yaml
rename to hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml
index db211bc3c5..42ab05ee3d 100644
--- a/hadoop-ozone/acceptance-test/src/test/compose/compose-secure/docker-compose.yaml
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml
@@ -16,47 +16,38 @@
version: "3"
services:
- ozone.kdc:
+ kdc:
image: ahadoop/kdc:v1
- namenode:
- image: ahadoop/ozone:v1
- hostname: namenode
+ hostname: kdc
volumes:
- - ${OZONEDIR}:/opt/hadoop
- ports:
- - 9000:9000
- environment:
- ENSURE_NAMENODE_DIR: /data/namenode
- env_file:
- - ./docker-config
- command: ["/opt/hadoop/bin/hdfs","namenode"]
+ - $SRC_VOLUME:/opt/hadoop
datanode:
- image: ahadoop/ozone:v1
- hostname: datanode
+ image: ahadoop/runner:latest
volumes:
- - ${OZONEDIR}:/opt/hadoop
+ - $SRC_VOLUME:/opt/hadoop
+ hostname: datanode
ports:
- - 9874
+ - 9864
+ command: ["/opt/hadoop/bin/ozone","datanode"]
env_file:
- ./docker-config
- command: ["/opt/hadoop/bin/ozone","datanode"]
- om:
- image: ahadoop/ozone:v1
+ ozoneManager:
+ image: ahadoop/runner:latest
hostname: om
volumes:
- - ${OZONEDIR}:/opt/hadoop
+ - $SRC_VOLUME:/opt/hadoop
ports:
- 9874:9874
environment:
- ENSURE_KSM_INITIALIZED: /data/metadata/om/current/VERSION
+ ENSURE_OM_INITIALIZED: /data/metadata/ozoneManager/current/VERSION
env_file:
- ./docker-config
- command: ["/opt/hadoop/bin/ozone","ksm"]
+ command: ["/opt/hadoop/bin/ozone","om"]
scm:
- image: ahadoop/ozone:v1
+ image: ahadoop/runner:latest
hostname: scm
volumes:
- - ${OZONEDIR}:/opt/hadoop
+ - $SRC_VOLUME:/opt/hadoop
ports:
- 9876:9876
env_file:
diff --git a/hadoop-ozone/acceptance-test/src/test/compose/compose-secure/docker-config b/hadoop-dist/src/main/compose/ozonesecure/docker-config
similarity index 82%
rename from hadoop-ozone/acceptance-test/src/test/compose/compose-secure/docker-config
rename to hadoop-dist/src/main/compose/ozonesecure/docker-config
index cdf72ced54..704dc7bd52 100644
--- a/hadoop-ozone/acceptance-test/src/test/compose/compose-secure/docker-config
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-config
@@ -14,25 +14,26 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-OZONE-SITE.XML_ozone.ksm.address=om
+OZONE-SITE.XML_ozone.om.address=om
+OZONE-SITE.XML_ozone.om.http-address=om:9874
OZONE-SITE.XML_ozone.scm.names=scm
OZONE-SITE.XML_ozone.enabled=True
-OZONE-SITE.XML_hdds.scm.datanode.id=/data/datanode.id
-OZONE-SITE.XML_hdds.scm.block.client.address=scm
+OZONE-SITE.XML_ozone.scm.datanode.id=/data/datanode.id
+OZONE-SITE.XML_ozone.scm.block.client.address=scm
OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
OZONE-SITE.XML_ozone.handler.type=distributed
-OZONE-SITE.XML_hdds.scm.client.address=scm
+OZONE-SITE.XML_ozone.scm.client.address=scm
OZONE-SITE.XML_ozone.replication=1
OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM
OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
OZONE-SITE.XML_ozone.om.kerberos.principal=om/om@EXAMPLE.COM
OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
OZONE-SITE.XML_ozone.security.enabled=true
-OZONE-SITE.XML_hdds.scm.web.authentication.kerberos.principal=HTTP/scm@EXAMPLE.COM
-OZONE-SITE.XML_hdds.scm.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.om.web.authentication.kerberos.principal=HTTP/om@EXAMPLE.COM
-OZONE-SITE.XML_ozone.om.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/datanode@EXAMPLE.COM
+OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/scm@EXAMPLE.COM
+OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.om.http.kerberos.principal=HTTP/om@EXAMPLE.COM
+OZONE-SITE.XML_ozone.om.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/_HOST@EXAMPLE.COM
HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/_HOST@EXAMPLE.COM
HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
@@ -41,11 +42,15 @@ HDFS-SITE.XML_dfs.datanode.http.address=0.0.0.0:1012
CORE-SITE.XML_dfs.data.transfer.protection=authentication
CORE-SITE.XML_hadoop.security.authentication=kerberos
CORE-SITE.XML_hadoop.security.auth_to_local=RULE:[2:$1@$0](.*)s/.*/root/
+HDFS-SITE.XML_rpc.metrics.quantile.enable=true
+HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
-
+LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
+LOG4J.PROPERTIES_log4j.logger.org.apache.ratis.conf.ConfUtils=WARN
+LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR
#Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation.
#BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm
@@ -87,13 +92,12 @@ LOG4J2.PROPERTIES_rootLogger.level=INFO
LOG4J2.PROPERTIES_rootLogger.appenderRefs=stdout
LOG4J2.PROPERTIES_rootLogger.appenderRef.stdout.ref=STDOUT
-
OZONE_DATANODE_SECURE_USER=root
CONF_DIR=/etc/security/keytabs
KERBEROS_KEYTABS=dn om scm HTTP testuser
KERBEROS_KEYSTORES=hadoop
-KERBEROS_SERVER=ozone.kdc
+KERBEROS_SERVER=kdc
JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
JSVC_HOME=/usr/bin
-SLEEP_SECONDS=10
+SLEEP_SECONDS=5
KERBEROS_ENABLED=true
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
new file mode 100644
index 0000000000..efda03e08d
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License lsfor the specific language governing permissions and
+# limitations under the License.
+
+FROM openjdk:8-jdk
+RUN apt-get update && apt-get install -y jq curl python sudo && apt-get clean
+
+RUN wget -O /usr/local/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64
+RUN chmod +x /usr/local/bin/dumb-init
+RUN mkdir -p /etc/security/keytabs && chmod -R a+wr /etc/security/keytabs
+ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
+ENV PATH $PATH:/opt/hadoop/bin
+
+RUN apt-get install -y jsvc
+ENV JSVC_HOME=/usr/bin
+ADD scripts/krb5.conf /etc/
+RUN apt-get install -y krb5-user
+
+RUN addgroup --gid 1000 hadoop
+RUN adduser --disabled-password --gecos "" --uid 1000 hadoop --gid 1000 --home /opt/hadoop
+RUN echo "hadoop ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
+ADD scripts /opt/
+
+WORKDIR /opt/hadoop
+
+VOLUME /data
+ENTRYPOINT ["/usr/local/bin/dumb-init", "--", "/opt/starter.sh"]
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
new file mode 100755
index 0000000000..abbdebe185
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+set -e
+mkdir -p build
+if [ ! -d "$DIR/build/apache-rat-0.12" ]; then
+ wget "http://xenia.sote.hu/ftp/mirrors/www.apache.org/creadur/apache-rat-0.12/apache-rat-0.12-bin.tar.gz -O $DIR/build/apache-rat.tar.gz"
+ cd "$DIR"/build || exit
+ tar zvxf apache-rat.tar.gz
+fi
+java -jar "$DIR"/build/apache-rat-0.12/apache-rat-0.12.jar "$DIR" -e public -e apache-rat-0.12 -e .git -e .gitignore
+docker build -t ahadoop/runner .
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
new file mode 100755
index 0000000000..ad2e176570
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
@@ -0,0 +1,115 @@
+#!/usr/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""convert environment variables to config"""
+
+import os
+import re
+
+import argparse
+
+import sys
+import transformation
+
+class Simple(object):
+ """Simple conversion"""
+ def __init__(self, args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--destination", help="Destination directory", required=True)
+ self.args = parser.parse_args(args=args)
+ # copy the default files to file.raw in destination directory
+
+ self.known_formats = ['xml', 'properties', 'yaml', 'yml', 'env', "sh", "cfg", 'conf']
+ self.output_dir = self.args.destination
+
+ self.configurables = {}
+
+ def destination_file_path(self, name, extension):
+ """destination file path"""
+ return os.path.join(self.output_dir, "{}.{}".format(name, extension))
+
+ def write_env_var(self, name, extension, key, value):
+ """Write environment variables"""
+ with open(self.destination_file_path(name, extension) + ".raw", "a") as myfile:
+ myfile.write("{}: {}\n".format(key, value))
+
+ def process_envs(self):
+ """Process environment variables"""
+ for key in os.environ.keys():
+ pattern = re.compile("[_\\.]")
+ parts = pattern.split(key)
+ extension = None
+ name = parts[0].lower()
+ if len(parts) > 1:
+ extension = parts[1].lower()
+ config_key = key[len(name) + len(extension) + 2:].strip()
+ if extension and "!" in extension:
+ splitted = extension.split("!")
+ extension = splitted[0]
+ fmt = splitted[1]
+ config_key = key[len(name) + len(extension) + len(fmt) + 3:].strip()
+ else:
+ fmt = extension
+
+ if extension and extension in self.known_formats:
+ if name not in self.configurables.keys():
+ with open(self.destination_file_path(name, extension) + ".raw", "w") as myfile:
+ myfile.write("")
+ self.configurables[name] = (extension, fmt)
+ self.write_env_var(name, extension, config_key, os.environ[key])
+ else:
+ for configurable_name in self.configurables:
+ if key.lower().startswith(configurable_name.lower()):
+ self.write_env_var(configurable_name,
+ self.configurables[configurable_name],
+ key[len(configurable_name) + 1:],
+ os.environ[key])
+
+ def transform(self):
+ """transform"""
+ for configurable_name in self.configurables:
+ name = configurable_name
+ extension, fmt = self.configurables[name]
+
+ destination_path = self.destination_file_path(name, extension)
+
+ with open(destination_path + ".raw", "r") as myfile:
+ content = myfile.read()
+ transformer_func = getattr(transformation, "to_" + fmt)
+ content = transformer_func(content)
+ with open(destination_path, "w") as myfile:
+ myfile.write(content)
+
+ def main(self):
+ """main"""
+
+ # add the
+ self.process_envs()
+
+ # copy file.ext.raw to file.ext in the destination directory, and
+ # transform to the right format (eg. key: value ===> XML)
+ self.transform()
+
+
+def main():
+ """main"""
+ Simple(sys.argv[1:]).main()
+
+
+if __name__ == '__main__':
+ Simple(sys.argv[1:]).main()
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
new file mode 100644
index 0000000000..82ae73aa80
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[logging]
+ default = FILE:/var/log/krb5libs.log
+ kdc = FILE:/var/log/krb5kdc.log
+ admin_server = FILE:/var/log/kadmind.log
+
+[libdefaults]
+ dns_canonicalize_hostname = false
+ dns_lookup_realm = false
+ ticket_lifetime = 24h
+ renew_lifetime = 7d
+ forwardable = true
+ rdns = false
+ default_realm = EXAMPLE.COM
+
+[realms]
+ EXAMPLE.COM = {
+ kdc = SERVER
+ admin_server = SERVER
+ }
+
+[domain_realm]
+ .example.com = EXAMPLE.COM
+ example.com = EXAMPLE.COM
\ No newline at end of file
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
new file mode 100755
index 0000000000..04cd49d12f
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
@@ -0,0 +1,100 @@
+#!/usr/bin/env bash
+##
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+set -e
+
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+echo "Setting up enviorment!!"
+
+if [ -n "$KERBEROS_ENABLED" ]; then
+ echo "Setting up kerberos!!"
+ KERBEROS_SERVER=${KERBEROS_SERVER:-krb5}
+ ISSUER_SERVER=${ISSUER_SERVER:-$KERBEROS_SERVER\:8081}
+
+ echo "KDC ISSUER_SERVER => $ISSUER_SERVER"
+
+ if [ -n "$SLEEP_SECONDS" ]; then
+ echo "Sleeping for $(SLEEP_SECONDS) seconds"
+ sleep "$SLEEP_SECONDS"
+ fi
+
+
+ while true
+ do
+ STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://"$ISSUER_SERVER"/keytab/test/test)
+ if [ "$STATUS" -eq 200 ]; then
+ echo "Got 200, KDC service ready!!"
+ break
+ else
+ echo "Got $STATUS :( KDC service not ready yet..."
+ fi
+ sleep 5
+ done
+
+ HOST_NAME=$(hostname -f)
+ export HOST_NAME
+ for NAME in ${KERBEROS_KEYTABS}; do
+ echo "Download $NAME/$HOSTNAME@EXAMPLE.COM keytab file to $CONF_DIR/$NAME.keytab"
+ wget "http://$ISSUER_SERVER/keytab/$HOST_NAME/$NAME" -O "$CONF_DIR/$NAME.keytab"
+ klist -kt "$CONF_DIR/$NAME.keytab"
+ KERBEROS_ENABLED=true
+ done
+
+ sed "s/SERVER/$KERBEROS_SERVER/g" "$DIR"/krb5.conf | sudo tee /etc/krb5.conf
+fi
+
+#To avoid docker volume permission problems
+sudo chmod o+rwx /data
+
+"$DIR"/envtoconf.py --destination /opt/hadoop/etc/hadoop
+
+if [ -n "$ENSURE_NAMENODE_DIR" ]; then
+ CLUSTERID_OPTS=""
+ if [ -n "$ENSURE_NAMENODE_CLUSTERID" ]; then
+ CLUSTERID_OPTS="-clusterid $ENSURE_NAMENODE_CLUSTERID"
+ fi
+ if [ ! -d "$ENSURE_NAMENODE_DIR" ]; then
+ /opt/hadoop/bin/hdfs namenode -format -force "$CLUSTERID_OPTS"
+ fi
+fi
+
+if [ -n "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
+ if [ ! -d "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
+ /opt/hadoop/bin/hdfs namenode -bootstrapStandby
+ fi
+fi
+
+if [ -n "$ENSURE_SCM_INITIALIZED" ]; then
+ if [ ! -f "$ENSURE_SCM_INITIALIZED" ]; then
+ /opt/hadoop/bin/ozone scm -init
+ fi
+fi
+
+if [ -n "$ENSURE_OM_INITIALIZED" ]; then
+ if [ ! -f "$ENSURE_OM_INITIALIZED" ]; then
+ #To make sure SCM is running in dockerized environment we will sleep
+ # Could be removed after HDFS-13203
+ echo "Waiting 15 seconds for SCM startup"
+ sleep 15
+ /opt/hadoop/bin/ozone om -createObjectStore
+ fi
+fi
+
+echo 'setup finished'
+"$@"
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
new file mode 100755
index 0000000000..5e708ce2b6
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
@@ -0,0 +1,150 @@
+#!/usr/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""This module transform properties into different format"""
+def render_yaml(yaml_root, prefix=""):
+ """render yaml"""
+ result = ""
+ if isinstance(yaml_root, dict):
+ if prefix:
+ result += "\n"
+ for key in yaml_root:
+ result += "{}{}: {}".format(prefix, key, render_yaml(
+ yaml_root[key], prefix + " "))
+ elif isinstance(yaml_root, list):
+ result += "\n"
+ for item in yaml_root:
+ result += prefix + " - " + render_yaml(item, prefix + " ")
+ else:
+ result += "{}\n".format(yaml_root)
+ return result
+
+
+def to_yaml(content):
+ """transform to yaml"""
+ props = process_properties(content)
+
+ keys = props.keys()
+ yaml_props = {}
+ for key in keys:
+ parts = key.split(".")
+ node = yaml_props
+ prev_part = None
+ parent_node = {}
+ for part in parts[:-1]:
+ if part.isdigit():
+ if isinstance(node, dict):
+ parent_node[prev_part] = []
+ node = parent_node[prev_part]
+ while len(node) <= int(part):
+ node.append({})
+ parent_node = node
+ node = node[int(node)]
+ else:
+ if part not in node:
+ node[part] = {}
+ parent_node = node
+ node = node[part]
+ prev_part = part
+ if parts[-1].isdigit():
+ if isinstance(node, dict):
+ parent_node[prev_part] = []
+ node = parent_node[prev_part]
+ node.append(props[key])
+ else:
+ node[parts[-1]] = props[key]
+
+ return render_yaml(yaml_props)
+
+
+def to_yml(content):
+ """transform to yml"""
+ return to_yaml(content)
+
+
+def to_properties(content):
+ """transform to properties"""
+ result = ""
+ props = process_properties(content)
+ for key, val in props.items():
+ result += "{}: {}\n".format(key, val)
+ return result
+
+
+def to_env(content):
+ """transform to environment variables"""
+ result = ""
+ props = process_properties(content)
+ for key, val in props:
+ result += "{}={}\n".format(key, val)
+ return result
+
+
+def to_sh(content):
+ """transform to shell"""
+ result = ""
+ props = process_properties(content)
+ for key, val in props:
+ result += "export {}=\"{}\"\n".format(key, val)
+ return result
+
+
+def to_cfg(content):
+ """transform to config"""
+ result = ""
+ props = process_properties(content)
+ for key, val in props:
+ result += "{}={}\n".format(key, val)
+ return result
+
+
+def to_conf(content):
+ """transform to configuration"""
+ result = ""
+ props = process_properties(content)
+ for key, val in props:
+ result += "export {}={}\n".format(key, val)
+ return result
+
+
+def to_xml(content):
+ """transform to xml"""
+ result = "\n"
+ props = process_properties(content)
+ for key in props:
+ result += "{0}{1}\n". \
+ format(key, props[key])
+ result += ""
+ return result
+
+
+def process_properties(content, sep=': ', comment_char='#'):
+ """
+ Read the file passed as parameter as a properties file.
+ """
+ props = {}
+ for line in content.split("\n"):
+ sline = line.strip()
+ if sline and not sline.startswith(comment_char):
+ key_value = sline.split(sep)
+ key = key_value[0].strip()
+ value = sep.join(key_value[1:]).strip().strip('"')
+ props[key] = value
+
+ return props
diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml
index 15ae307841..30dcc58b76 100644
--- a/hadoop-hdds/common/pom.xml
+++ b/hadoop-hdds/common/pom.xml
@@ -118,6 +118,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
bcprov-jdk15on
1.54
+
+
+ org.bouncycastle
+ bcpkix-jdk15on
+ 1.54
+
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
index ca22f48244..9fb7954094 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
@@ -327,11 +327,11 @@ public final class ScmConfigKeys {
"10m";
public static final String
- HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY =
- "hdds.scm.web.authentication.kerberos.principal";
+ HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY =
+ "hdds.scm.http.kerberos.principal";
public static final String
- HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY =
- "hdds.scm.web.authentication.kerberos.keytab";
+ HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY =
+ "hdds.scm.http.kerberos.keytab";
/**
* Never constructed.
*/
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
index 9231c8a107..9e0231a74a 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
@@ -48,7 +48,7 @@
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_MAX_DURATION_DEFAULT;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO_DEFAULT;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
/**
* A class that deals with all Security related configs in HDDS.
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
index 0f101ac755..e9a52f8aae 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
@@ -329,9 +329,6 @@ public final class OzoneConfigKeys {
DFS_RATIS_SERVER_FAILURE_DURATION_DEFAULT =
ScmConfigKeys.DFS_RATIS_SERVER_FAILURE_DURATION_DEFAULT;
- public static final String OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL =
- "ozone.web.authentication.kerberos.principal";
-
public static final String HDDS_DATANODE_PLUGINS_KEY =
"hdds.datanode.plugins";
diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml
index 83c3c9fad4..a60ad38a50 100644
--- a/hadoop-hdds/common/src/main/resources/ozone-default.xml
+++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml
@@ -1629,11 +1629,11 @@
- hdds.scm.web.authentication.kerberos.principal
+ hdds.scm.http.kerberos.principal
HTTP/_HOST@EXAMPLE.COM
- hdds.scm.web.authentication.kerberos.keytab
+ hdds.scm.http.kerberos.keytab
/etc/security/keytabs/HTTP.keytab
@@ -1645,7 +1645,7 @@
- ozone.om.http.kerberos.keytab.file
+ ozone.om.http.kerberos.keytab
/etc/security/keytabs/HTTP.keytab
OzoneManager http server kerberos keytab.
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
index 3a00ca05b5..5f99e2c35c 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
@@ -46,7 +46,7 @@
import java.util.Date;
import java.util.UUID;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
/**
* Test Class for Root Certificate generation.
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyGenerator.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyGenerator.java
index f9541a22b6..08761f48e8 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyGenerator.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyGenerator.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.hdds.security.x509.keys;
-import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
import java.security.KeyPair;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
index fb2eba3980..b514063224 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
@@ -30,6 +30,10 @@
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.util.ServicePlugin;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
@@ -147,12 +151,38 @@ public void start(Object service) {
datanodeDetails = initializeDatanodeDetails();
datanodeDetails.setHostName(hostname);
datanodeDetails.setIpAddress(ip);
+ LOG.info("HddsDatanodeService host:{} ip:{}", hostname, ip);
+ // Authenticate Hdds Datanode service if security is enabled
+ if (conf.getBoolean(OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY,
+ true)) {
+ if (SecurityUtil.getAuthenticationMethod(conf).equals(
+ UserGroupInformation.AuthenticationMethod.KERBEROS)) {
+ LOG.debug("Ozone security is enabled. Attempting login for Hdds " +
+ "Datanode user. "
+ + "Principal: {},keytab: {}", conf.get(
+ DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY),
+ conf.get(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY));
+
+ UserGroupInformation.setConfiguration(conf);
+
+ SecurityUtil.login(conf, DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
+ DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hostname);
+ } else {
+ throw new AuthenticationException(SecurityUtil.
+ getAuthenticationMethod(conf) + " authentication method not " +
+ "supported. Datanode user" + " login " + "failed.");
+ }
+ LOG.info("Hdds Datanode login successful.");
+ }
datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf);
startPlugins();
// Starting HDDS Daemons
datanodeStateMachine.startDaemon();
} catch (IOException e) {
throw new RuntimeException("Can't start the HDDS datanode plugin", e);
+ } catch (AuthenticationException ex) {
+ throw new RuntimeException("Fail to authentication when starting" +
+ " HDDS datanode plugin", ex);
}
}
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
index 41dd89a909..dce2a45e87 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
@@ -62,11 +62,11 @@ public StorageContainerManagerHttpServer(Configuration conf)
}
@Override protected String getKeytabFile() {
- return ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY;
+ return ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY;
}
@Override protected String getSpnegoPrincipal() {
- return ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
+ return ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY;
}
@Override protected String getEnabledKey() {
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
index 12beb83695..5917a11494 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMConfigKeys.java
@@ -180,8 +180,8 @@ private OMConfigKeys() {
+ "kerberos.keytab.file";
public static final String OZONE_OM_KERBEROS_PRINCIPAL_KEY = "ozone.om"
+ ".kerberos.principal";
- public static final String OZONE_OM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE =
+ public static final String OZONE_OM_HTTP_KERBEROS_KEYTAB_FILE =
"ozone.om.http.kerberos.keytab.file";
- public static final String OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY
+ public static final String OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY
= "ozone.om.http.kerberos.principal";
}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
index 809a354917..a0119b21fe 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
@@ -119,9 +119,9 @@ private void createCredentialsInKDC(Configuration conf, MiniKdc miniKdc)
conf.get(ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY));
createPrincipal(spnegoKeytab,
conf.get(ScmConfigKeys
- .HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY));
+ .HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY));
conf.get(OMConfigKeys
- .OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
+ .OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY);
createPrincipal(omKeyTab,
conf.get(OMConfigKeys.OZONE_OM_KERBEROS_PRINCIPAL_KEY));
}
@@ -155,12 +155,12 @@ private void setSecureConfig(Configuration conf) throws IOException {
conf.set(ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY,
"scm/" + host + "@" + realm);
- conf.set(ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+ conf.set(ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY,
"HTTP_SCM/" + host + "@" + realm);
conf.set(OMConfigKeys.OZONE_OM_KERBEROS_PRINCIPAL_KEY,
"om/" + host + "@" + realm);
- conf.set(OMConfigKeys.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+ conf.set(OMConfigKeys.OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY,
"HTTP_OM/" + host + "@" + realm);
scmKeytab = new File(workDir, "scm.keytab");
@@ -169,7 +169,8 @@ private void setSecureConfig(Configuration conf) throws IOException {
conf.set(ScmConfigKeys.HDDS_SCM_KERBEROS_KEYTAB_FILE_KEY,
scmKeytab.getAbsolutePath());
- conf.set(ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY,
+ conf.set(
+ ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY,
spnegoKeytab.getAbsolutePath());
conf.set(OMConfigKeys.OZONE_OM_KERBEROS_KEYTAB_FILE_KEY,
omKeyTab.getAbsolutePath());
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
index be12fa8897..8f7f058e24 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerHttpServer.java
@@ -64,11 +64,11 @@ public OzoneManagerHttpServer(Configuration conf, OzoneManager om)
}
@Override protected String getKeytabFile() {
- return OMConfigKeys.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE;
+ return OMConfigKeys.OZONE_OM_HTTP_KERBEROS_KEYTAB_FILE;
}
@Override protected String getSpnegoPrincipal() {
- return OMConfigKeys.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
+ return OMConfigKeys.OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY;
}
@Override protected String getEnabledKey() {